hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c6965586b48bc506e3e128702471b40475bd52dd
| 18,545
|
py
|
Python
|
gazoo_device/log_parser.py
|
dedsec-9/gazoo-device
|
5ed2867c258da80e53b6aae07ec7a65efe473a28
|
[
"Apache-2.0"
] | 14
|
2020-11-05T23:23:32.000Z
|
2022-03-01T18:59:29.000Z
|
gazoo_device/log_parser.py
|
dedsec-9/gazoo-device
|
5ed2867c258da80e53b6aae07ec7a65efe473a28
|
[
"Apache-2.0"
] | 1
|
2021-06-24T19:20:50.000Z
|
2021-06-24T19:20:50.000Z
|
gazoo_device/log_parser.py
|
isabella232/gazoo-device
|
0e1e276d72333e713b47152815708b9c74c45409
|
[
"Apache-2.0"
] | 5
|
2021-05-20T22:52:51.000Z
|
2022-02-21T08:46:21.000Z
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for log parser."""
import codecs
import os
import time
from gazoo_device import errors
from gazoo_device import gdm_logger
from gazoo_device.switchboard import data_framer
from gazoo_device.switchboard import log_process
logger = gdm_logger.get_logger("log_parser")
DISPLAY_REFRESH = 3.0 # secs
class LogParser(object):
"""Provides ability to search for specific events in log file."""
def __init__(self, parser_obj, log_path, display_refresh=DISPLAY_REFRESH):
"""Initialize LogParser class using provided information.
Args:
parser_obj (Parser): Instance of class Parser
log_path (str): Path to log filename containing raw, log event data
display_refresh (float): Number of seconds to wait prior to refresh
of display
Raises:
ParserError: If log_path does NOT exist
If event_filename already exists
If parser_object is None
Note:
Since the provided log_path is immediately parsed, initializing
LogParser using log files exceeding 100 MB can cause test applications
to appear to be delayed. The smaller the log file the faster
initialization will be.
"""
if parser_obj is None:
raise errors.ParserError("Log parser parameter check failed. "
"Bad parser_obj.")
if not os.path.isfile(log_path):
raise errors.ParserError(
"LogParser parameter check failed. "
"log file name: {} does not exist.".format(log_path))
self.event_filename = log_process.get_event_filename(log_path)
parser_obj.event_file_path = self.event_filename
if os.path.isfile(self.event_filename):
raise errors.ParserError("LogParser parameter check failed. "
"event_filename: {} already exists.".format(
self.event_filename))
if display_refresh < 0:
raise errors.ParserError(
"LogParser parameter check failed. "
"Expected display refresh >=0 instead got: {}".format(
display_refresh))
self._parser_obj = parser_obj
self._parse_events(log_path, display_refresh)
def get_last_event(self, event_labels=None, timeout=1.0):
r"""Returns the most recent matching event for each item in the list of event labels.
Args:
event_labels (list): list of event labels to lookup (example:
["power.state", "power.lost"]). None (default) returns the most
recent event regardless of label.
timeout (float): Timeout value in seconds. Example: 1.5.
Raises:
ParserError: on error parsing event file.
Returns:
ParserResult: An object containing the most recent event match for each
event label in the event_labels list, or the most recent event if
event_labels is None. The event data is stored in the .results_list
attribute, and the count of results is in the .count attribute. If no
matching events are found, the .results_list attribute will be set to
the empty list.
If the .timedout attribute is True, the operation timed out before all
events were searched or the event file was not found before the timeout
expired.
Notes:
The event_label is the desired filter filename without extension a .
and the filter name (e.g. power.state). The following examples use a
hypothetical power.json filter file with the following filters:
.. code-block:: none
{
"name": "state",
"regex_match": "power:(\\w+)"
},
{
"name": "lost",
"regex_match": "power lost"
},
{
"name": "restored",
"regex_match": "power restored"
}
The original log file being read looks like this:
.. code-block:: none
<2018-02-02 10:32:07.167234> [APPL] Spoke: power restored,
powering on
<2018-02-02 12:00:57.154328> [APPL] Spoke: power lost, powering
down
<2018-02-02 12:00:57.154328> power:OFF
<2018-02-02 12:01:22.122824> [APPL] Spoke: power restored,
powering on
<2018-02-02 12:01:22.122824> power:ON
<2018-02-02 12:02:18.121948> [APPL] Spoke: power lost, powering
down
<2018-02-02 12:02:18.121948> power:OFF
Example output with event_labels=["power.state"], showing group
matches:
.. code-block:: none
result.timedout = False
result.results_list =
[{'system_timestamp': datetime.datetime(2018, 2, 2, 12, 2,
18, 121948),
'raw_log_line': 'power:OFF',
'power.state': ['OFF']}]
result.count = 1
Example output with event_labels=["power.lost"], showing matches
with no groups:
.. code-block:: none
result.timedout = False
result.results_list =
[{'system_timestamp': datetime.datetime(2018, 2, 2, 12, 2,
18, 121948),
'raw_log_line': '[APPL] Spoke: power lost',
'power.lost': []}]
result.count = 1
Example output with event_labels=["power.state", "power.lost",
"power.restored"]:
.. code-block:: none
result.timedout = False
result.results_list =
[{'system_timestamp': datetime(2018, 2, 2, 12, 2, 18,
121948),
'raw_log_line': 'power:OFF',
'power.state': ['OFF']},
{'system_timestamp': datetime(2018, 2, 2, 12, 2, 18,
121948),
'raw_log_line': '[APPL] Spoke: power lost',
'power.lost': []}
{'system_timestamp': datetime(2018, 2, 2, 12, 1, 22,
122824),
'raw_log_line': '[APPL] Spoke: power restored, powering
on',
'power.restored': []}]
results.count = 3
"""
if event_labels:
if not isinstance(event_labels, list):
raise errors.ParserError("Expecting event_labels to be a list "
"or None but received {} instead.".format(
type(event_labels)))
cur_event_labels = self.get_event_labels()
for event_label in event_labels:
if event_label not in cur_event_labels:
raise errors.ParserError(
"Event label {} doesn't exist.".format(event_label))
return self._parser_obj.get_last_event(
self.event_filename, event_labels, timeout=timeout)
def get_event_history(self, event_labels=None, count=None, timeout=10.0):
r"""Returns up to count elements of event data matching given event labels.
Args:
event_labels (list): list of event labels to lookup (example:
["power.state", "power.lost"]). If None (default), return all
events.
count (int): of event data elements to return (Default: None for all
events).
timeout (float): Timeout value in seconds. Example: 1.5.
Raises:
ParserError: event_labels is not type list or
label in event_labels doesn't exist or
on error parsing event file
Returns:
ParserResult: An object containing events matching any of the event
labels provided in event_labels, or all available events if event_labels
in None. The event data is stored in the .results_list attribute and the
count of results in the .count attribute. If no matching events are
found, the .results_list attribute will be set to the empty list.
If the .timedout attribute is True, the operation timed out before all
events were searched, or the event file was not found before the timeout
expired.
Notes:
Event info is listed in most recent to least recent order.
The following examples use a hypothetical power.json filter file
with the
following filters:
.. code-block:: none
{
"name": "state",
"regex_match": "power:(\\w+)"
},
{
"name": "lost",
"regex_match": "power lost"
},
{
"name": "restored",
"regex_match": "power restored"
}
The original log file being read looks like this:
.. code-block:: none
<2018-02-02 10:32:07.167234> [APPL] Spoke: power restored,
powering on
<2018-02-02 12:00:57.154328> [APPL] Spoke: power lost, powering
down
<2018-02-02 12:00:57.154328> power:OFF
<2018-02-02 12:01:22.122824> [APPL] Spoke: power restored,
powering on
<2018-02-02 12:01:22.122824> power:ON
<2018-02-02 12:02:18.121948> [APPL] Spoke: power lost, powering
down
<2018-02-02 12:02:18.121948> power:OFF
Example output with event_labels ["power.state"], showing group
matches:
.. code-block:: none
result.timedout = False
result.results_list =
[{'system_timestamp': datetime.datetime(2018, 2, 2, 12, 2,
18, 121948),
'raw_log_line': 'power:OFF',
'power.state': ['OFF']},
{'system_timestamp': datetime.datetime(2018, 2, 2, 12, 1,
22, 122824),
'raw_log_line': 'power:ON',
'power.state': ['ON']},
{'system_timestamp': datetime.datetime(2018, 2, 2, 12, 0,
57, 154328),
'raw_log_line': 'power:OFF',
'power.state': ['OFF']}]
result.count = 3
Example output with event_labels ["power.lost"], showing matches
with no groups:
.. code-block:: none
result.timedout = False
result.results_list =
[{'system_timestamp': datetime.datetime(2018, 2, 2, 12, 2,
18, 121948),
'raw_log_line: '[APPL] Spoke: power lost, powering down',
'power.lost': []},
{'system_timestamp': datetime.datetime(2018, 2, 2, 12, 0,
57, 154328),
'raw_log_line': '[APPL] Spoke: power lost, powering down',
'power.lost': []}]
result.count = 2
Example output with event_labels ["power.restored"], showing matches
with no
groups and count=1:
.. code-block:: none
result.timedout = False
result.results_list =
[{'system_timestamp': datetime.datetime(2018, 2, 2, 12, 1,
22, 122824),
'raw_log_line': '[APPL] Spoke: power restored, powering
on',
'power.restored': []}
result.count = 1
Example output with event_labels ["power.lost", "power.restored"]:
.. code-block:: none
result.timedout = False
result.results_list =
[{'system_timestamp': datetime.datetime(2018, 2, 2, 12, 2,
18, 121948),
'raw_log_line: '[APPL] Spoke: power lost, powering down',
'power.lost': []},
{'system_timestamp': datetime.datetime(2018, 2, 2, 12, 1,
22, 122824),
'raw_log_line': '[APPL] Spoke: power restored, powering
on',
'power.restored': []},
{'system_timestamp': datetime.datetime(2018, 2, 2, 12, 0,
57, 154328),
'raw_log_line': '[APPL] Spoke: power lost, powering down',
'power.lost': []}
{'system_timestamp': datetime.datetime(2018, 2, 2, 10, 32,
7, 167234),
'raw_log_line': '[APPL] Spoke: power restored, powering
on',
'power.restored': []}]
result.count = 4
"""
if event_labels:
if not isinstance(event_labels, list):
raise errors.ParserError("Expecting event_labels to be a list "
"or None but received {} instead.".format(
type(event_labels)))
cur_event_labels = self.get_event_labels()
for event_label in event_labels:
if event_label not in cur_event_labels:
raise errors.ParserError(
"Event label {} doesn't exist.".format(event_label))
return self._parser_obj.get_event_history(
event_labels, count=count, timeout=timeout)
def get_event_history_count(self, event_label, timeout=10.0):
"""Returns count of event history matching event label specified.
Args:
event_label (str): event label to lookup (e.g. power.state)
timeout (float): Timeout value in seconds. Example: 1.5.
Raises:
ParserError: on error parsing event file or event_label doesn't exist
Returns:
ParserResult: An object containing the count of results in the .count
attribute. The .results_list field is set to the empty list.
If the .timedout attribute is True, the operation timed out before all
events were searched, or the event file was not found before the timeout
expired.
"""
if event_label not in self.get_event_labels():
raise errors.ParserError(
"Event label {} doesn't exist.".format(event_label))
return self._parser_obj.get_event_history_count(
self.event_filename, event_label, timeout=timeout)
def get_event_label_dict(self, pattern=None):
"""Takes an event label pattern, and returns a dictionary of matching event labels.
Args:
pattern (str): An event label pattern regular expression
Returns:
dict: containing keys (i.e. event labels), and values (i.e regular
expressions)
Raises:
ParserError: if pattern provided is an invalid regular expression
Note:
The pattern regular expression provided will be matched strictly
against the beginning of each event_label.
If no pattern is provided, the _filters_dict is returned.
"""
return self._parser_obj.get_event_label_dict(pattern=pattern)
def get_event_labels(self, pattern=None):
"""Takes an event label pattern, and returns a list of matching event labels.
Args:
pattern (str): An event label pattern regular expression
Returns:
list: of matching event label strings
Raises:
ParserError: if pattern provided is an invalid regular expression
Note:
The pattern regular expression provided will be matched strictly
against the beginning of each event_label.
If no pattern is provided, all filter event labels are returned.
"""
return self._parser_obj.get_event_labels(pattern=pattern)
def _parse_events(self, log_path, display_refresh):
"""Parses log file searching for events depositing results into an event file.
Args:
log_path (str): Path to log filename containing raw, log event data
display_refresh (float): Number of seconds to wait prior to refresh
of display
Raises:
ParserError: if log parser fails.
Note:
With large log files, this process may take a large amount of time.
"""
logger.info("Parsing log file {} into event file {}, please wait", log_path,
self.event_filename)
start_time = time.time()
with codecs.open(self.event_filename, "a", encoding="utf-8") as event_file:
log_filename = os.path.basename(log_path)
with codecs.open(
log_path, "r", encoding="utf-8", errors="replace") as log_file:
log_file.seek(0, os.SEEK_END)
total_bytes = log_file.tell()
log_file.seek(0, os.SEEK_SET)
process_time = start_time
buffered_unicode = u""
framer = data_framer.NewlineFramer()
new_header_length = (
log_process.HOST_TIMESTAMP_LENGTH +
log_process.LOG_LINE_HEADER_LENGTH)
old_header_length = 29
try:
while True:
log_data = log_file.read(size=4096)
if time.time() - process_time > display_refresh:
process_time = time.time()
bytes_processed = log_file.tell()
logger.info("{:.2%} complete - bytes processed: {} of {}",
bytes_processed / total_bytes, bytes_processed,
total_bytes)
if not log_data:
break
log_lines = buffered_unicode + log_data
buffered_len = len(buffered_unicode)
buffered_unicode = u""
for log_line in framer.get_lines(log_lines, begin=buffered_len):
if log_line[-1] == "\n":
if "> GDM-" in log_line:
header_length = new_header_length
else:
header_length = old_header_length
self._parser_obj.process_line(
event_file,
log_line,
header_length=header_length,
log_filename=log_filename)
else:
buffered_unicode += log_line
except IOError as err:
logger.debug("log_parser encountered error: {!r}".format(err))
raise errors.ParserError("Log file processing failed. "
"IOError: {!r}".format(err))
logger.info("Parsing log file {} into event file {} finished in {}s",
log_path, self.event_filename,
time.time() - start_time)
| 38.080082
| 89
| 0.586465
|
fdf4ab87a882e0ff79cb25d3da18fbad0a54b4fe
| 1,374
|
py
|
Python
|
python/tarjan.py
|
zetraxus/GiS
|
1c3085ced084bb477ae77a0d50434c74e60826d3
|
[
"MIT"
] | null | null | null |
python/tarjan.py
|
zetraxus/GiS
|
1c3085ced084bb477ae77a0d50434c74e60826d3
|
[
"MIT"
] | null | null | null |
python/tarjan.py
|
zetraxus/GiS
|
1c3085ced084bb477ae77a0d50434c74e60826d3
|
[
"MIT"
] | null | null | null |
class TarjanAlgorithm(object):
""" A Tarjan's Algorithm for bridge finding
Initialize with graph as adjacency list. After
`run`, founded bridges are stored in `bridges` property.
"""
NOT_ANALYZED = 0
NO_PARENT = -1
def __init__(self, adjacency_list):
self._graph = adjacency_list
self._length = len(adjacency_list)
self.clean()
def clean(self):
self._bridges = []
self._dfs_numbers = [self.NOT_ANALYZED] * self._length
def run(self):
for i in range(self._length):
if self._dfs_numbers[i] == self.NOT_ANALYZED:
self._counter = 1
self._dfs_tarjan(i, self.NO_PARENT)
def _dfs_tarjan(self, v, parent):
self._dfs_numbers[v] = self._counter
low = self._counter
self._counter += 1
for u in self._graph[v]:
if u != parent:
if self._dfs_numbers[u] == self.NOT_ANALYZED:
tmp = self._dfs_tarjan(u, v)
if tmp < low:
low = tmp
elif self._dfs_numbers[u] < low:
low = self._dfs_numbers[u]
if parent > self.NO_PARENT and low == self._dfs_numbers[v]:
self._bridges.append((parent, v))
return low
@property
def bridges(self):
return self._bridges
| 29.234043
| 67
| 0.55968
|
073fae573c85aecbc9f9f8bec4064a08df6745e2
| 9,304
|
py
|
Python
|
src/sage/repl/ipython_kernel/install.py
|
fchapoton/sage
|
765c5cb3e24dd134708eca97e4c52e0221cd94ba
|
[
"BSL-1.0"
] | 4
|
2020-07-17T04:49:44.000Z
|
2020-07-29T06:33:51.000Z
|
src/sage/repl/ipython_kernel/install.py
|
Ivo-Maffei/sage
|
467fbc70a08b552b3de33d9065204ee9cbfb02c7
|
[
"BSL-1.0"
] | 1
|
2020-04-18T16:30:43.000Z
|
2020-04-18T16:30:43.000Z
|
src/sage/repl/ipython_kernel/install.py
|
dimpase/sage
|
468f23815ade42a2192b0a9cd378de8fdc594dcd
|
[
"BSL-1.0"
] | 3
|
2020-03-29T17:13:36.000Z
|
2021-05-03T18:11:28.000Z
|
"""
Installing the SageMath Jupyter Kernel and Extensions
Kernels have to register themselves with Jupyter so that they appear
in the Jupyter notebook's kernel drop-down. This is done by
:class:`SageKernelSpec`.
.. NOTE::
The doctests in this module run in a temporary directory as the involved
directories might be different during runs of the tests and actual
installation and because we might be lacking write permission to places
such as ``/usr/share``.
"""
import os
import errno
from sage.env import (
SAGE_DOC, SAGE_LOCAL, SAGE_EXTCODE,
SAGE_VERSION,
MATHJAX_DIR, JSMOL_DIR, THREEJS_DIR,
)
class SageKernelSpec(object):
def __init__(self, prefix=None):
"""
Utility to manage SageMath kernels and extensions
INPUT:
- ``prefix`` -- (optional, default: ``sys.prefix``)
directory for the installation prefix
EXAMPLES::
sage: from sage.repl.ipython_kernel.install import SageKernelSpec
sage: prefix = tmp_dir()
sage: spec = SageKernelSpec(prefix=prefix)
sage: spec._display_name # random output
'SageMath 6.9'
sage: spec.kernel_dir == SageKernelSpec(prefix=prefix).kernel_dir
True
"""
self._display_name = 'SageMath {0}'.format(SAGE_VERSION)
if prefix is None:
from sys import prefix
jupyter_dir = os.path.join(prefix, "share", "jupyter")
self.nbextensions_dir = os.path.join(jupyter_dir, "nbextensions")
self.kernel_dir = os.path.join(jupyter_dir, "kernels", self.identifier())
self._mkdirs()
def _mkdirs(self):
"""
Create necessary parent directories
EXAMPLES::
sage: from sage.repl.ipython_kernel.install import SageKernelSpec
sage: spec = SageKernelSpec(prefix=tmp_dir())
sage: spec._mkdirs()
sage: os.path.isdir(spec.nbextensions_dir)
True
"""
def mkdir_p(path):
try:
os.makedirs(path)
except OSError:
if not os.path.isdir(path):
raise
mkdir_p(self.nbextensions_dir)
mkdir_p(self.kernel_dir)
@classmethod
def identifier(cls):
"""
Internal identifier for the SageMath kernel
OUTPUT: the string ``"sagemath"``.
EXAMPLES::
sage: from sage.repl.ipython_kernel.install import SageKernelSpec
sage: SageKernelSpec.identifier()
'sagemath'
"""
return 'sagemath'
def symlink(self, src, dst):
"""
Symlink ``src`` to ``dst``
This is not an atomic operation.
Already-existing symlinks will be deleted, already existing
non-empty directories will be kept.
EXAMPLES::
sage: from sage.repl.ipython_kernel.install import SageKernelSpec
sage: spec = SageKernelSpec(prefix=tmp_dir())
sage: path = tmp_dir()
sage: spec.symlink(os.path.join(path, 'a'), os.path.join(path, 'b'))
sage: os.listdir(path)
['b']
"""
try:
os.remove(dst)
except OSError as err:
if err.errno == errno.EEXIST:
return
os.symlink(src, dst)
def use_local_mathjax(self):
"""
Symlink SageMath's Mathjax install to the Jupyter notebook.
EXAMPLES::
sage: from sage.repl.ipython_kernel.install import SageKernelSpec
sage: spec = SageKernelSpec(prefix=tmp_dir())
sage: spec.use_local_mathjax()
sage: mathjax = os.path.join(spec.nbextensions_dir, 'mathjax')
sage: os.path.isdir(mathjax)
True
"""
src = MATHJAX_DIR
dst = os.path.join(self.nbextensions_dir, 'mathjax')
self.symlink(src, dst)
def use_local_jsmol(self):
"""
Symlink jsmol to the Jupyter notebook.
EXAMPLES::
sage: from sage.repl.ipython_kernel.install import SageKernelSpec
sage: spec = SageKernelSpec(prefix=tmp_dir())
sage: spec.use_local_jsmol()
sage: jsmol = os.path.join(spec.nbextensions_dir, 'jsmol')
sage: os.path.isdir(jsmol)
True
sage: os.path.isfile(os.path.join(jsmol, "JSmol.min.js"))
True
"""
src = os.path.join(JSMOL_DIR)
dst = os.path.join(self.nbextensions_dir, 'jsmol')
self.symlink(src, dst)
def use_local_threejs(self):
"""
Symlink threejs to the Jupyter notebook.
EXAMPLES::
sage: from sage.repl.ipython_kernel.install import SageKernelSpec
sage: spec = SageKernelSpec(prefix=tmp_dir())
sage: spec.use_local_threejs()
sage: threejs = os.path.join(spec.nbextensions_dir, 'threejs')
sage: os.path.isdir(threejs)
True
"""
src = THREEJS_DIR
dst = os.path.join(self.nbextensions_dir, 'threejs')
self.symlink(src, dst)
def _kernel_cmd(self):
"""
Helper to construct the SageMath kernel command.
OUTPUT:
List of strings. The command to start a new SageMath kernel.
EXAMPLES::
sage: from sage.repl.ipython_kernel.install import SageKernelSpec
sage: spec = SageKernelSpec(prefix=tmp_dir())
sage: spec._kernel_cmd()
['/.../sage',
'--python',
'-m',
'sage.repl.ipython_kernel',
'-f',
'{connection_file}']
"""
return [
os.path.join(SAGE_LOCAL, 'bin', 'sage'),
'--python',
'-m', 'sage.repl.ipython_kernel',
'-f', '{connection_file}',
]
def kernel_spec(self):
"""
Return the kernel spec as Python dictionary
OUTPUT:
A dictionary. See the Jupyter documentation for details.
EXAMPLES::
sage: from sage.repl.ipython_kernel.install import SageKernelSpec
sage: spec = SageKernelSpec(prefix=tmp_dir())
sage: spec.kernel_spec()
{'argv': ..., 'display_name': 'SageMath ...', 'language': 'sage'}
"""
return dict(
argv=self._kernel_cmd(),
display_name=self._display_name,
language='sage',
)
def _install_spec(self):
"""
Install the SageMath Jupyter kernel
EXAMPLES::
sage: from sage.repl.ipython_kernel.install import SageKernelSpec
sage: spec = SageKernelSpec(prefix=tmp_dir())
sage: spec._install_spec()
"""
jsonfile = os.path.join(self.kernel_dir, "kernel.json")
import json
with open(jsonfile, 'w') as f:
json.dump(self.kernel_spec(), f)
def _symlink_resources(self):
"""
Symlink miscellaneous resources
This method symlinks additional resources (like the SageMath
documentation) into the SageMath kernel directory. This is
necessary to make the help links in the notebook work.
EXAMPLES::
sage: from sage.repl.ipython_kernel.install import SageKernelSpec
sage: spec = SageKernelSpec(prefix=tmp_dir())
sage: spec._install_spec()
sage: spec._symlink_resources()
"""
path = os.path.join(SAGE_EXTCODE, 'notebook-ipython')
for filename in os.listdir(path):
self.symlink(
os.path.join(path, filename),
os.path.join(self.kernel_dir, filename)
)
self.symlink(
os.path.join(SAGE_DOC, 'html', 'en'),
os.path.join(self.kernel_dir, 'doc')
)
@classmethod
def update(cls, *args, **kwds):
"""
Configure the Jupyter notebook for the SageMath kernel
This method does everything necessary to use the SageMath kernel,
you should never need to call any of the other methods
directly.
EXAMPLES::
sage: from sage.repl.ipython_kernel.install import SageKernelSpec
sage: SageKernelSpec.update(prefix=tmp_dir())
"""
instance = cls(*args, **kwds)
instance.use_local_mathjax()
instance.use_local_jsmol()
instance.use_local_threejs()
instance._install_spec()
instance._symlink_resources()
def have_prerequisites(debug=True):
"""
Check that we have all prerequisites to run the Jupyter notebook.
In particular, the Jupyter notebook requires OpenSSL whether or
not you are using https. See :trac:`17318`.
INPUT:
``debug`` -- boolean (default: ``True``). Whether to print debug
information in case that prerequisites are missing.
OUTPUT:
Boolean.
EXAMPLES::
sage: from sage.repl.ipython_kernel.install import have_prerequisites
sage: have_prerequisites(debug=False) in [True, False]
True
"""
try:
from notebook.notebookapp import NotebookApp
return True
except ImportError:
if debug:
import traceback
traceback.print_exc()
return False
| 29.630573
| 81
| 0.588672
|
b722d3cb283eae4c0861fff94c0322a1b76ab262
| 1,100
|
py
|
Python
|
google/ads/googleads/v7/enums/types/budget_type.py
|
wxxlouisa/google-ads-python
|
f24137966f6bfcb765a9b1fae79f2d23041825fe
|
[
"Apache-2.0"
] | 285
|
2018-10-05T16:47:58.000Z
|
2022-03-31T00:58:39.000Z
|
google/ads/googleads/v7/enums/types/budget_type.py
|
wxxlouisa/google-ads-python
|
f24137966f6bfcb765a9b1fae79f2d23041825fe
|
[
"Apache-2.0"
] | 425
|
2018-09-10T13:32:41.000Z
|
2022-03-31T14:50:05.000Z
|
google/ads/googleads/v7/enums/types/budget_type.py
|
wxxlouisa/google-ads-python
|
f24137966f6bfcb765a9b1fae79f2d23041825fe
|
[
"Apache-2.0"
] | 369
|
2018-11-28T07:01:00.000Z
|
2022-03-28T09:53:22.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v7.enums",
marshal="google.ads.googleads.v7",
manifest={"BudgetTypeEnum",},
)
class BudgetTypeEnum(proto.Message):
r"""Describes Budget types. """
class BudgetType(proto.Enum):
r"""Possible Budget types."""
UNSPECIFIED = 0
UNKNOWN = 1
STANDARD = 2
HOTEL_ADS_COMMISSION = 3
FIXED_CPA = 4
__all__ = tuple(sorted(__protobuf__.manifest))
| 28.205128
| 74
| 0.697273
|
96792a366a461b95a3945cd6fdff2c63bca0fc40
| 14,745
|
py
|
Python
|
tests/test_multiprocess.py
|
takeda/client_python
|
09fb45953bac018a90e89f0b1e7bcd1d5d81c01b
|
[
"Apache-2.0"
] | 2,729
|
2015-02-12T13:13:24.000Z
|
2022-03-30T10:33:12.000Z
|
tests/test_multiprocess.py
|
takeda/client_python
|
09fb45953bac018a90e89f0b1e7bcd1d5d81c01b
|
[
"Apache-2.0"
] | 668
|
2015-02-10T22:57:50.000Z
|
2022-03-30T06:25:49.000Z
|
tests/test_multiprocess.py
|
takeda/client_python
|
09fb45953bac018a90e89f0b1e7bcd1d5d81c01b
|
[
"Apache-2.0"
] | 767
|
2015-02-10T22:51:46.000Z
|
2022-03-26T01:11:58.000Z
|
from __future__ import unicode_literals
import glob
import os
import shutil
import sys
import tempfile
import warnings
from prometheus_client import mmap_dict, values
from prometheus_client.core import (
CollectorRegistry, Counter, Gauge, Histogram, Sample, Summary,
)
from prometheus_client.multiprocess import (
mark_process_dead, MultiProcessCollector,
)
from prometheus_client.values import (
get_value_class, MultiProcessValue, MutexValue,
)
if sys.version_info < (2, 7):
# We need the skip decorators from unittest2 on Python 2.6.
import unittest2 as unittest
else:
import unittest
class TestMultiProcessDeprecation(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
def tearDown(self):
os.environ.pop('prometheus_multiproc_dir', None)
os.environ.pop('PROMETHEUS_MULTIPROC_DIR', None)
values.ValueClass = MutexValue
shutil.rmtree(self.tempdir)
def test_deprecation_warning(self):
os.environ['prometheus_multiproc_dir'] = self.tempdir
with warnings.catch_warnings(record=True) as w:
values.ValueClass = get_value_class()
registry = CollectorRegistry()
collector = MultiProcessCollector(registry)
Counter('c', 'help', registry=None)
assert os.environ['PROMETHEUS_MULTIPROC_DIR'] == self.tempdir
assert len(w) == 1
assert issubclass(w[-1].category, DeprecationWarning)
assert "PROMETHEUS_MULTIPROC_DIR" in str(w[-1].message)
def test_mark_process_dead_respects_lowercase(self):
os.environ['prometheus_multiproc_dir'] = self.tempdir
# Just test that this does not raise with a lowercase env var. The
# logic is tested elsewhere.
mark_process_dead(123)
class TestMultiProcess(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
os.environ['PROMETHEUS_MULTIPROC_DIR'] = self.tempdir
values.ValueClass = MultiProcessValue(lambda: 123)
self.registry = CollectorRegistry()
self.collector = MultiProcessCollector(self.registry)
@property
def _value_class(self):
return
def tearDown(self):
del os.environ['PROMETHEUS_MULTIPROC_DIR']
shutil.rmtree(self.tempdir)
values.ValueClass = MutexValue
def test_counter_adds(self):
c1 = Counter('c', 'help', registry=None)
values.ValueClass = MultiProcessValue(lambda: 456)
c2 = Counter('c', 'help', registry=None)
self.assertEqual(0, self.registry.get_sample_value('c_total'))
c1.inc(1)
c2.inc(2)
self.assertEqual(3, self.registry.get_sample_value('c_total'))
def test_summary_adds(self):
s1 = Summary('s', 'help', registry=None)
values.ValueClass = MultiProcessValue(lambda: 456)
s2 = Summary('s', 'help', registry=None)
self.assertEqual(0, self.registry.get_sample_value('s_count'))
self.assertEqual(0, self.registry.get_sample_value('s_sum'))
s1.observe(1)
s2.observe(2)
self.assertEqual(2, self.registry.get_sample_value('s_count'))
self.assertEqual(3, self.registry.get_sample_value('s_sum'))
def test_histogram_adds(self):
h1 = Histogram('h', 'help', registry=None)
values.ValueClass = MultiProcessValue(lambda: 456)
h2 = Histogram('h', 'help', registry=None)
self.assertEqual(0, self.registry.get_sample_value('h_count'))
self.assertEqual(0, self.registry.get_sample_value('h_sum'))
self.assertEqual(0, self.registry.get_sample_value('h_bucket', {'le': '5.0'}))
h1.observe(1)
h2.observe(2)
self.assertEqual(2, self.registry.get_sample_value('h_count'))
self.assertEqual(3, self.registry.get_sample_value('h_sum'))
self.assertEqual(2, self.registry.get_sample_value('h_bucket', {'le': '5.0'}))
def test_gauge_all(self):
g1 = Gauge('g', 'help', registry=None)
values.ValueClass = MultiProcessValue(lambda: 456)
g2 = Gauge('g', 'help', registry=None)
self.assertEqual(0, self.registry.get_sample_value('g', {'pid': '123'}))
self.assertEqual(0, self.registry.get_sample_value('g', {'pid': '456'}))
g1.set(1)
g2.set(2)
mark_process_dead(123)
self.assertEqual(1, self.registry.get_sample_value('g', {'pid': '123'}))
self.assertEqual(2, self.registry.get_sample_value('g', {'pid': '456'}))
def test_gauge_liveall(self):
g1 = Gauge('g', 'help', registry=None, multiprocess_mode='liveall')
values.ValueClass = MultiProcessValue(lambda: 456)
g2 = Gauge('g', 'help', registry=None, multiprocess_mode='liveall')
self.assertEqual(0, self.registry.get_sample_value('g', {'pid': '123'}))
self.assertEqual(0, self.registry.get_sample_value('g', {'pid': '456'}))
g1.set(1)
g2.set(2)
self.assertEqual(1, self.registry.get_sample_value('g', {'pid': '123'}))
self.assertEqual(2, self.registry.get_sample_value('g', {'pid': '456'}))
mark_process_dead(123, os.environ['PROMETHEUS_MULTIPROC_DIR'])
self.assertEqual(None, self.registry.get_sample_value('g', {'pid': '123'}))
self.assertEqual(2, self.registry.get_sample_value('g', {'pid': '456'}))
def test_gauge_min(self):
g1 = Gauge('g', 'help', registry=None, multiprocess_mode='min')
values.ValueClass = MultiProcessValue(lambda: 456)
g2 = Gauge('g', 'help', registry=None, multiprocess_mode='min')
self.assertEqual(0, self.registry.get_sample_value('g'))
g1.set(1)
g2.set(2)
self.assertEqual(1, self.registry.get_sample_value('g'))
def test_gauge_max(self):
g1 = Gauge('g', 'help', registry=None, multiprocess_mode='max')
values.ValueClass = MultiProcessValue(lambda: 456)
g2 = Gauge('g', 'help', registry=None, multiprocess_mode='max')
self.assertEqual(0, self.registry.get_sample_value('g'))
g1.set(1)
g2.set(2)
self.assertEqual(2, self.registry.get_sample_value('g'))
def test_gauge_livesum(self):
g1 = Gauge('g', 'help', registry=None, multiprocess_mode='livesum')
values.ValueClass = MultiProcessValue(lambda: 456)
g2 = Gauge('g', 'help', registry=None, multiprocess_mode='livesum')
self.assertEqual(0, self.registry.get_sample_value('g'))
g1.set(1)
g2.set(2)
self.assertEqual(3, self.registry.get_sample_value('g'))
mark_process_dead(123, os.environ['PROMETHEUS_MULTIPROC_DIR'])
self.assertEqual(2, self.registry.get_sample_value('g'))
def test_namespace_subsystem(self):
c1 = Counter('c', 'help', registry=None, namespace='ns', subsystem='ss')
c1.inc(1)
self.assertEqual(1, self.registry.get_sample_value('ns_ss_c_total'))
def test_counter_across_forks(self):
pid = 0
values.ValueClass = MultiProcessValue(lambda: pid)
c1 = Counter('c', 'help', registry=None)
self.assertEqual(0, self.registry.get_sample_value('c_total'))
c1.inc(1)
c1.inc(1)
pid = 1
c1.inc(1)
self.assertEqual(3, self.registry.get_sample_value('c_total'))
self.assertEqual(1, c1._value.get())
def test_initialization_detects_pid_change(self):
pid = 0
values.ValueClass = MultiProcessValue(lambda: pid)
# can not inspect the files cache directly, as it's a closure, so we
# check for the actual files themselves
def files():
fs = os.listdir(os.environ['PROMETHEUS_MULTIPROC_DIR'])
fs.sort()
return fs
c1 = Counter('c1', 'c1', registry=None)
self.assertEqual(files(), ['counter_0.db'])
c2 = Counter('c2', 'c2', registry=None)
self.assertEqual(files(), ['counter_0.db'])
pid = 1
c3 = Counter('c3', 'c3', registry=None)
self.assertEqual(files(), ['counter_0.db', 'counter_1.db'])
@unittest.skipIf(sys.version_info < (2, 7), "Test requires Python 2.7+.")
def test_collect(self):
pid = 0
values.ValueClass = MultiProcessValue(lambda: pid)
labels = dict((i, i) for i in 'abcd')
def add_label(key, value):
l = labels.copy()
l[key] = value
return l
c = Counter('c', 'help', labelnames=labels.keys(), registry=None)
g = Gauge('g', 'help', labelnames=labels.keys(), registry=None)
h = Histogram('h', 'help', labelnames=labels.keys(), registry=None)
c.labels(**labels).inc(1)
g.labels(**labels).set(1)
h.labels(**labels).observe(1)
pid = 1
c.labels(**labels).inc(1)
g.labels(**labels).set(1)
h.labels(**labels).observe(5)
metrics = dict((m.name, m) for m in self.collector.collect())
self.assertEqual(
metrics['c'].samples, [Sample('c_total', labels, 2.0)]
)
metrics['g'].samples.sort(key=lambda x: x[1]['pid'])
self.assertEqual(metrics['g'].samples, [
Sample('g', add_label('pid', '0'), 1.0),
Sample('g', add_label('pid', '1'), 1.0),
])
metrics['h'].samples.sort(
key=lambda x: (x[0], float(x[1].get('le', 0)))
)
expected_histogram = [
Sample('h_bucket', add_label('le', '0.005'), 0.0),
Sample('h_bucket', add_label('le', '0.01'), 0.0),
Sample('h_bucket', add_label('le', '0.025'), 0.0),
Sample('h_bucket', add_label('le', '0.05'), 0.0),
Sample('h_bucket', add_label('le', '0.075'), 0.0),
Sample('h_bucket', add_label('le', '0.1'), 0.0),
Sample('h_bucket', add_label('le', '0.25'), 0.0),
Sample('h_bucket', add_label('le', '0.5'), 0.0),
Sample('h_bucket', add_label('le', '0.75'), 0.0),
Sample('h_bucket', add_label('le', '1.0'), 1.0),
Sample('h_bucket', add_label('le', '2.5'), 1.0),
Sample('h_bucket', add_label('le', '5.0'), 2.0),
Sample('h_bucket', add_label('le', '7.5'), 2.0),
Sample('h_bucket', add_label('le', '10.0'), 2.0),
Sample('h_bucket', add_label('le', '+Inf'), 2.0),
Sample('h_count', labels, 2.0),
Sample('h_sum', labels, 6.0),
]
self.assertEqual(metrics['h'].samples, expected_histogram)
@unittest.skipIf(sys.version_info < (2, 7), "Test requires Python 2.7+.")
def test_merge_no_accumulate(self):
pid = 0
values.ValueClass = MultiProcessValue(lambda: pid)
labels = dict((i, i) for i in 'abcd')
def add_label(key, value):
l = labels.copy()
l[key] = value
return l
h = Histogram('h', 'help', labelnames=labels.keys(), registry=None)
h.labels(**labels).observe(1)
pid = 1
h.labels(**labels).observe(5)
path = os.path.join(os.environ['PROMETHEUS_MULTIPROC_DIR'], '*.db')
files = glob.glob(path)
metrics = dict(
(m.name, m) for m in self.collector.merge(files, accumulate=False)
)
metrics['h'].samples.sort(
key=lambda x: (x[0], float(x[1].get('le', 0)))
)
expected_histogram = [
Sample('h_bucket', add_label('le', '0.005'), 0.0),
Sample('h_bucket', add_label('le', '0.01'), 0.0),
Sample('h_bucket', add_label('le', '0.025'), 0.0),
Sample('h_bucket', add_label('le', '0.05'), 0.0),
Sample('h_bucket', add_label('le', '0.075'), 0.0),
Sample('h_bucket', add_label('le', '0.1'), 0.0),
Sample('h_bucket', add_label('le', '0.25'), 0.0),
Sample('h_bucket', add_label('le', '0.5'), 0.0),
Sample('h_bucket', add_label('le', '0.75'), 0.0),
Sample('h_bucket', add_label('le', '1.0'), 1.0),
Sample('h_bucket', add_label('le', '2.5'), 0.0),
Sample('h_bucket', add_label('le', '5.0'), 1.0),
Sample('h_bucket', add_label('le', '7.5'), 0.0),
Sample('h_bucket', add_label('le', '10.0'), 0.0),
Sample('h_bucket', add_label('le', '+Inf'), 0.0),
Sample('h_sum', labels, 6.0),
]
self.assertEqual(metrics['h'].samples, expected_histogram)
def test_missing_gauge_file_during_merge(self):
# These files don't exist, just like if mark_process_dead(9999999) had been
# called during self.collector.collect(), after the glob found it
# but before the merge actually happened.
# This should not raise and return no metrics
self.assertFalse(self.collector.merge([
os.path.join(self.tempdir, 'gauge_liveall_9999999.db'),
os.path.join(self.tempdir, 'gauge_livesum_9999999.db'),
]))
class TestMmapedDict(unittest.TestCase):
def setUp(self):
fd, self.tempfile = tempfile.mkstemp()
os.close(fd)
self.d = mmap_dict.MmapedDict(self.tempfile)
def test_process_restart(self):
self.d.write_value('abc', 123.0)
self.d.close()
self.d = mmap_dict.MmapedDict(self.tempfile)
self.assertEqual(123, self.d.read_value('abc'))
self.assertEqual([('abc', 123.0)], list(self.d.read_all_values()))
def test_expansion(self):
key = 'a' * mmap_dict._INITIAL_MMAP_SIZE
self.d.write_value(key, 123.0)
self.assertEqual([(key, 123.0)], list(self.d.read_all_values()))
def test_multi_expansion(self):
key = 'a' * mmap_dict._INITIAL_MMAP_SIZE * 4
self.d.write_value('abc', 42.0)
self.d.write_value(key, 123.0)
self.d.write_value('def', 17.0)
self.assertEqual(
[('abc', 42.0), (key, 123.0), ('def', 17.0)],
list(self.d.read_all_values()))
def test_corruption_detected(self):
self.d.write_value('abc', 42.0)
# corrupt the written data
self.d._m[8:16] = b'somejunk'
with self.assertRaises(RuntimeError):
list(self.d.read_all_values())
def tearDown(self):
os.unlink(self.tempfile)
class TestUnsetEnv(unittest.TestCase):
def setUp(self):
self.registry = CollectorRegistry()
fp, self.tmpfl = tempfile.mkstemp()
os.close(fp)
def test_unset_syncdir_env(self):
self.assertRaises(
ValueError, MultiProcessCollector, self.registry)
def test_file_syncpath(self):
registry = CollectorRegistry()
self.assertRaises(
ValueError, MultiProcessCollector, registry, self.tmpfl)
def tearDown(self):
os.remove(self.tmpfl)
| 39.637097
| 86
| 0.607121
|
130274fd482b7d78083378b00f293462b948b2a2
| 3,971
|
py
|
Python
|
sdk/recoveryservices/azure-mgmt-recoveryservices/azure/mgmt/recoveryservices/models/resource_certificate_and_aad_details.py
|
tzhanl/azure-sdk-for-python
|
18cd03f4ab8fd76cc0498f03e80fbc99f217c96e
|
[
"MIT"
] | 1
|
2021-09-07T18:36:04.000Z
|
2021-09-07T18:36:04.000Z
|
sdk/recoveryservices/azure-mgmt-recoveryservices/azure/mgmt/recoveryservices/models/resource_certificate_and_aad_details.py
|
tzhanl/azure-sdk-for-python
|
18cd03f4ab8fd76cc0498f03e80fbc99f217c96e
|
[
"MIT"
] | 2
|
2019-10-02T23:37:38.000Z
|
2020-10-02T01:17:31.000Z
|
sdk/recoveryservices/azure-mgmt-recoveryservices/azure/mgmt/recoveryservices/models/resource_certificate_and_aad_details.py
|
tzhanl/azure-sdk-for-python
|
18cd03f4ab8fd76cc0498f03e80fbc99f217c96e
|
[
"MIT"
] | 2
|
2021-05-23T16:46:31.000Z
|
2021-05-26T23:51:09.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource_certificate_details import ResourceCertificateDetails
class ResourceCertificateAndAadDetails(ResourceCertificateDetails):
"""Certificate details representing the Vault credentials for AAD.
All required parameters must be populated in order to send to Azure.
:param certificate: The base64 encoded certificate raw data string.
:type certificate: bytearray
:param friendly_name: Certificate friendly name.
:type friendly_name: str
:param issuer: Certificate issuer.
:type issuer: str
:param resource_id: Resource ID of the vault.
:type resource_id: long
:param subject: Certificate Subject Name.
:type subject: str
:param thumbprint: Certificate thumbprint.
:type thumbprint: str
:param valid_from: Certificate Validity start Date time.
:type valid_from: datetime
:param valid_to: Certificate Validity End Date time.
:type valid_to: datetime
:param auth_type: Required. Constant filled by server.
:type auth_type: str
:param aad_authority: Required. AAD tenant authority.
:type aad_authority: str
:param aad_tenant_id: Required. AAD tenant Id.
:type aad_tenant_id: str
:param service_principal_client_id: Required. AAD service principal
clientId.
:type service_principal_client_id: str
:param service_principal_object_id: Required. AAD service principal
ObjectId.
:type service_principal_object_id: str
:param azure_management_endpoint_audience: Required. Azure Management
Endpoint Audience.
:type azure_management_endpoint_audience: str
"""
_validation = {
'auth_type': {'required': True},
'aad_authority': {'required': True},
'aad_tenant_id': {'required': True},
'service_principal_client_id': {'required': True},
'service_principal_object_id': {'required': True},
'azure_management_endpoint_audience': {'required': True},
}
_attribute_map = {
'certificate': {'key': 'certificate', 'type': 'bytearray'},
'friendly_name': {'key': 'friendlyName', 'type': 'str'},
'issuer': {'key': 'issuer', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'long'},
'subject': {'key': 'subject', 'type': 'str'},
'thumbprint': {'key': 'thumbprint', 'type': 'str'},
'valid_from': {'key': 'validFrom', 'type': 'iso-8601'},
'valid_to': {'key': 'validTo', 'type': 'iso-8601'},
'auth_type': {'key': 'authType', 'type': 'str'},
'aad_authority': {'key': 'aadAuthority', 'type': 'str'},
'aad_tenant_id': {'key': 'aadTenantId', 'type': 'str'},
'service_principal_client_id': {'key': 'servicePrincipalClientId', 'type': 'str'},
'service_principal_object_id': {'key': 'servicePrincipalObjectId', 'type': 'str'},
'azure_management_endpoint_audience': {'key': 'azureManagementEndpointAudience', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ResourceCertificateAndAadDetails, self).__init__(**kwargs)
self.aad_authority = kwargs.get('aad_authority', None)
self.aad_tenant_id = kwargs.get('aad_tenant_id', None)
self.service_principal_client_id = kwargs.get('service_principal_client_id', None)
self.service_principal_object_id = kwargs.get('service_principal_object_id', None)
self.azure_management_endpoint_audience = kwargs.get('azure_management_endpoint_audience', None)
self.auth_type = 'AzureActiveDirectory'
| 45.643678
| 104
| 0.663309
|
aec90134adab2d61279e2316f88ae6c8ada39a2e
| 1,985
|
py
|
Python
|
lib/surface/compute/interconnects/attachments/describe.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | 2
|
2019-11-10T09:17:07.000Z
|
2019-12-18T13:44:08.000Z
|
lib/surface/compute/interconnects/attachments/describe.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | null | null | null |
lib/surface/compute/interconnects/attachments/describe.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | 1
|
2020-07-25T01:40:19.000Z
|
2020-07-25T01:40:19.000Z
|
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for describing interconnects attachments."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.api_lib.compute.interconnects.attachments import client
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.compute import flags as compute_flags
from googlecloudsdk.command_lib.compute.interconnects.attachments import flags
class Describe(base.DescribeCommand):
"""Describe a Compute Engine interconnect attachment.
*{command}* displays all data associated with Compute Engine
interconnect attachment in a project.
"""
INTERCONNECT_ATTACHMENT_ARG = None
@classmethod
def Args(cls, parser):
cls.INTERCONNECT_ATTACHMENT_ARG = flags.InterconnectAttachmentArgument()
cls.INTERCONNECT_ATTACHMENT_ARG.AddArgument(
parser, operation_type='describe')
def Run(self, args):
holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
ref = self.INTERCONNECT_ATTACHMENT_ARG.ResolveAsResource(
args,
holder.resources,
scope_lister=compute_flags.GetDefaultScopeLister(holder.client))
interconnect_attachment = client.InterconnectAttachment(
ref, compute_client=holder.client)
return interconnect_attachment.Describe()
| 36.759259
| 78
| 0.782368
|
f1990bf1b49eb1c8ff519f20d98ab741ab55d039
| 3,614
|
py
|
Python
|
rlkit/core/batch_rl_algorithm.py
|
hammer-wang/rlkit-pybullet
|
d4b47f97747046e557bd8eb8e16cbb105d604fe6
|
[
"MIT"
] | null | null | null |
rlkit/core/batch_rl_algorithm.py
|
hammer-wang/rlkit-pybullet
|
d4b47f97747046e557bd8eb8e16cbb105d604fe6
|
[
"MIT"
] | null | null | null |
rlkit/core/batch_rl_algorithm.py
|
hammer-wang/rlkit-pybullet
|
d4b47f97747046e557bd8eb8e16cbb105d604fe6
|
[
"MIT"
] | null | null | null |
import abc
import gtimer as gt
from rlkit.core.rl_algorithm import BaseRLAlgorithm
from rlkit.data_management.replay_buffer import ReplayBuffer
from rlkit.samplers.data_collector import PathCollector
import pickle as pkl
class BatchRLAlgorithm(BaseRLAlgorithm, metaclass=abc.ABCMeta):
def __init__(
self,
trainer,
exploration_env,
evaluation_env,
exploration_data_collector: PathCollector,
evaluation_data_collector: PathCollector,
replay_buffer: ReplayBuffer,
batch_size,
max_path_length,
num_epochs,
num_eval_steps_per_epoch,
num_expl_steps_per_train_loop,
num_trains_per_train_loop,
num_train_loops_per_epoch=1,
min_num_steps_before_training=0,
):
super().__init__(
trainer,
exploration_env,
evaluation_env,
exploration_data_collector,
evaluation_data_collector,
replay_buffer,
)
self.batch_size = batch_size
self.max_path_length = max_path_length
self.num_epochs = num_epochs
self.num_eval_steps_per_epoch = num_eval_steps_per_epoch
self.num_trains_per_train_loop = num_trains_per_train_loop
self.num_train_loops_per_epoch = num_train_loops_per_epoch
self.num_expl_steps_per_train_loop = num_expl_steps_per_train_loop
self.min_num_steps_before_training = min_num_steps_before_training
def _train(self):
if self.min_num_steps_before_training > 0:
init_expl_paths = self.expl_data_collector.collect_new_paths(
self.max_path_length,
self.min_num_steps_before_training,
discard_incomplete_paths=False,
)
self.replay_buffer.add_paths(init_expl_paths)
self.expl_data_collector.end_epoch(-1)
for epoch in gt.timed_for(
range(self._start_epoch, self.num_epochs),
save_itrs=True,
):
self.eval_data_collector.collect_new_paths(
self.max_path_length,
self.num_eval_steps_per_epoch,
discard_incomplete_paths=True,
)
gt.stamp('evaluation sampling')
for _ in range(self.num_train_loops_per_epoch):
new_expl_paths = self.expl_data_collector.collect_new_paths(
self.max_path_length,
self.num_expl_steps_per_train_loop,
discard_incomplete_paths=False,
)
gt.stamp('exploration sampling', unique=False)
self.replay_buffer.add_paths(new_expl_paths)
gt.stamp('data storing', unique=False)
self.training_mode(True)
for _ in range(self.num_trains_per_train_loop):
train_data = self.replay_buffer.random_batch(
self.batch_size)
self.trainer.train(train_data)
gt.stamp('training', unique=False)
self.training_mode(False)
self._end_epoch(epoch)
def _eval(self, save_path):
'''
Generate rollouts from model.
'''
self.eval_data_collector.end_epoch(-1)
paths = self.eval_data_collector.collect_new_paths(
self.max_path_length,
self.num_eval_steps_per_epoch,
discard_incomplete_paths=False,
)
pkl.dump(paths, open(save_path, 'wb'))
| 36.505051
| 76
| 0.617598
|
326235a8060541833c74e9c408f08036a2e9673d
| 1,528
|
py
|
Python
|
lambda_function.py
|
Greg-s-Tutorials/vanity-numbers
|
9f31f16ba7c51e704b6b0da839a47c16fe772449
|
[
"MIT"
] | null | null | null |
lambda_function.py
|
Greg-s-Tutorials/vanity-numbers
|
9f31f16ba7c51e704b6b0da839a47c16fe772449
|
[
"MIT"
] | null | null | null |
lambda_function.py
|
Greg-s-Tutorials/vanity-numbers
|
9f31f16ba7c51e704b6b0da839a47c16fe772449
|
[
"MIT"
] | 1
|
2021-12-02T00:05:13.000Z
|
2021-12-02T00:05:13.000Z
|
import boto3
from vanitynumbers import (
validate_phone_number,
save_vanity_numbers,
get_response_message,
find_words
)
from vanitynumbers.data import lambda_test_events
def lambda_handler(event, context):
# Get the service resource.
ddb = boto3.resource('dynamodb')
# Instantiate vanity_numbers table resource
table = ddb.Table('vanity_numbers')
# Get customer phone_number from AWS Connect contact flow event
phone_number = event["Details"]["ContactData"]["CustomerEndpoint"]["Address"]
number_validation = validate_phone_number(phone_number)
if number_validation["valid"] is False:
return { "ResponseMessage": "Sorry, we can process your request at this time." }
validated_phone_number = number_validation["full_number"]
# Check to see if the vanity numbers for the phone
# number have already been saved.
response = table.get_item(
Key={
"phone_number": validated_phone_number
}
)
print("GET RESPONSE: ", response)
if "Item" in response:
item = response["Item"]
return { "ResponseMessage": get_response_message(item["vanity_numbers"]) }
vanity_numbers = find_words(validated_phone_number)
response = save_vanity_numbers(validated_phone_number, vanity_numbers)
if response["HTTPStatusCode"] < 200 or response["HTTPStatusCode"] > 299:
return { "ResponseMessage": "Sorry, we can process your request at this time." }
return { "ResponseMessage": get_response_message(vanity_numbers) }
lambda_handler(lambda_test_events.test_1, None)
| 31.833333
| 84
| 0.75
|
2b0517b6a8bb300aa4b862bf01ff80b3b8e71cdf
| 27,022
|
py
|
Python
|
kubernetes/client/apis/authorization_v1beta1_api.py
|
iamneha/python
|
5b208a1a49a8d6f8bbab28bcc226b9ef793bcbd0
|
[
"Apache-2.0"
] | 1
|
2019-02-17T15:28:39.000Z
|
2019-02-17T15:28:39.000Z
|
kubernetes/client/apis/authorization_v1beta1_api.py
|
iamneha/python
|
5b208a1a49a8d6f8bbab28bcc226b9ef793bcbd0
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/client/apis/authorization_v1beta1_api.py
|
iamneha/python
|
5b208a1a49a8d6f8bbab28bcc226b9ef793bcbd0
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.13.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..api_client import ApiClient
class AuthorizationV1beta1Api(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_namespaced_local_subject_access_review(self, namespace, body, **kwargs):
"""
create a LocalSubjectAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_namespaced_local_subject_access_review(namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1LocalSubjectAccessReview body: (required)
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param bool include_uninitialized: If IncludeUninitialized is specified, the object may be returned without completing initialization.
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1LocalSubjectAccessReview
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_namespaced_local_subject_access_review_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_local_subject_access_review_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_local_subject_access_review_with_http_info(self, namespace, body, **kwargs):
"""
create a LocalSubjectAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_namespaced_local_subject_access_review_with_http_info(namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1LocalSubjectAccessReview body: (required)
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param bool include_uninitialized: If IncludeUninitialized is specified, the object may be returned without completing initialization.
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1LocalSubjectAccessReview
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'dry_run', 'include_uninitialized', 'pretty']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_local_subject_access_review" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_local_subject_access_review`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_local_subject_access_review`")
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run']))
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/authorization.k8s.io/v1beta1/namespaces/{namespace}/localsubjectaccessreviews', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1LocalSubjectAccessReview',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_self_subject_access_review(self, body, **kwargs):
"""
create a SelfSubjectAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_self_subject_access_review(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1beta1SelfSubjectAccessReview body: (required)
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param bool include_uninitialized: If IncludeUninitialized is specified, the object may be returned without completing initialization.
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1SelfSubjectAccessReview
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_self_subject_access_review_with_http_info(body, **kwargs)
else:
(data) = self.create_self_subject_access_review_with_http_info(body, **kwargs)
return data
def create_self_subject_access_review_with_http_info(self, body, **kwargs):
"""
create a SelfSubjectAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_self_subject_access_review_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1beta1SelfSubjectAccessReview body: (required)
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param bool include_uninitialized: If IncludeUninitialized is specified, the object may be returned without completing initialization.
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1SelfSubjectAccessReview
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'dry_run', 'include_uninitialized', 'pretty']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_self_subject_access_review" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_self_subject_access_review`")
collection_formats = {}
path_params = {}
query_params = []
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run']))
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/authorization.k8s.io/v1beta1/selfsubjectaccessreviews', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1SelfSubjectAccessReview',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_self_subject_rules_review(self, body, **kwargs):
"""
create a SelfSubjectRulesReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_self_subject_rules_review(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1beta1SelfSubjectRulesReview body: (required)
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param bool include_uninitialized: If IncludeUninitialized is specified, the object may be returned without completing initialization.
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1SelfSubjectRulesReview
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_self_subject_rules_review_with_http_info(body, **kwargs)
else:
(data) = self.create_self_subject_rules_review_with_http_info(body, **kwargs)
return data
def create_self_subject_rules_review_with_http_info(self, body, **kwargs):
"""
create a SelfSubjectRulesReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_self_subject_rules_review_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1beta1SelfSubjectRulesReview body: (required)
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param bool include_uninitialized: If IncludeUninitialized is specified, the object may be returned without completing initialization.
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1SelfSubjectRulesReview
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'dry_run', 'include_uninitialized', 'pretty']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_self_subject_rules_review" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_self_subject_rules_review`")
collection_formats = {}
path_params = {}
query_params = []
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run']))
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/authorization.k8s.io/v1beta1/selfsubjectrulesreviews', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1SelfSubjectRulesReview',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_subject_access_review(self, body, **kwargs):
"""
create a SubjectAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_subject_access_review(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1beta1SubjectAccessReview body: (required)
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param bool include_uninitialized: If IncludeUninitialized is specified, the object may be returned without completing initialization.
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1SubjectAccessReview
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_subject_access_review_with_http_info(body, **kwargs)
else:
(data) = self.create_subject_access_review_with_http_info(body, **kwargs)
return data
def create_subject_access_review_with_http_info(self, body, **kwargs):
"""
create a SubjectAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_subject_access_review_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1beta1SubjectAccessReview body: (required)
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param bool include_uninitialized: If IncludeUninitialized is specified, the object may be returned without completing initialization.
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1SubjectAccessReview
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'dry_run', 'include_uninitialized', 'pretty']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_subject_access_review" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_subject_access_review`")
collection_formats = {}
path_params = {}
query_params = []
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run']))
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/authorization.k8s.io/v1beta1/subjectaccessreviews', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1SubjectAccessReview',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_api_resources(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_resources(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_api_resources_with_http_info(**kwargs)
else:
(data) = self.get_api_resources_with_http_info(**kwargs)
return data
def get_api_resources_with_http_info(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_resources_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_resources" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/authorization.k8s.io/v1beta1/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIResourceList',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 47.911348
| 276
| 0.610281
|
190f76f35bd3cf40c76bb744f9d3205abccfc1e0
| 11,272
|
py
|
Python
|
tools/ECPB/create_tfrecords.py
|
Ernstsen/Pedestron
|
0c5aa35881561bcd0acf5de8939472efd6409256
|
[
"Apache-2.0"
] | 594
|
2020-03-20T11:52:59.000Z
|
2022-03-30T11:58:55.000Z
|
tools/ECPB/create_tfrecords.py
|
Ernstsen/Pedestron
|
0c5aa35881561bcd0acf5de8939472efd6409256
|
[
"Apache-2.0"
] | 131
|
2020-03-25T09:48:04.000Z
|
2022-03-30T17:54:38.000Z
|
tools/ECPB/create_tfrecords.py
|
Ernstsen/Pedestron
|
0c5aa35881561bcd0acf5de8939472efd6409256
|
[
"Apache-2.0"
] | 128
|
2020-03-20T14:22:11.000Z
|
2022-03-22T09:41:39.000Z
|
import glob
import hashlib
import json
import logging
import os
import pickle
import time
from concurrent.futures import ThreadPoolExecutor
import numpy as np
import tensorflow as tf
logging.basicConfig(level=logging.INFO,
format='%(asctime)s, %(levelname)-8s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
)
tf.app.flags.DEFINE_string('out_dir', './data/ecp/tfrecords', 'Place to search for the created files.')
tf.app.flags.DEFINE_string('dataset_name', 'ecp-day',
'Name of the dataset, used to create the tfrecord files.')
tf.app.flags.DEFINE_string('anno_path', './data/day/labels',
'Base directory which contains the ecp annotations.')
tf.app.flags.DEFINE_string('img_path', './data/day/img',
'Base directory which contains the ecp images.')
tf.app.flags.DEFINE_integer('train_shards', 20, 'Number of training shards.')
tf.app.flags.DEFINE_integer('val_shards', 4, 'Number of validation shards.')
tf.app.flags.DEFINE_integer('shuffle', 1, 'Shuffle the data before writing it to tfrecord files.')
FLAGS = tf.app.flags.FLAGS
def int64_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def int64_list_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=value))
def bytes_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
def bytes_list_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))
def float_list_feature(value):
return tf.train.Feature(float_list=tf.train.FloatList(value=value))
class ExampleCreator:
def __init__(self, create_sha256_key=False):
self.create_sha256_key = create_sha256_key
# Create a single Session to run all image coding calls.
self._sess = tf.Session(config=tf.ConfigProto(device_count={'GPU': 1}, gpu_options={'allow_growth': True}))
# Initializes function that decodes RGB PNG data.
self._decode_data = tf.placeholder(dtype=tf.string)
self._decoded = tf.image.decode_png(self._decode_data, channels=3)
self._encode_data = tf.placeholder(dtype=tf.uint8)
self._encoded = tf.image.encode_png(self._encode_data)
self.identity_to_label = {
'pedestrian': 0,
'rider': 1,
}
def decode_png(self, img_data):
img = self._sess.run(self._decoded, feed_dict={self._decode_data: img_data})
assert len(img.shape) == 3
assert img.shape[2] == 3
return img
def encode_png(self, img):
assert len(img.shape) == 3
assert img.shape[2] == 3
return self._sess.run(self._encoded, feed_dict={self._encode_data: img})
def load_img(self, path):
ext = os.path.splitext(path)[1]
if path.endswith('.pgm'):
raise NotImplementedError('pgm not supported')
if path.endswith('.png'):
with tf.gfile.FastGFile(path, 'rb') as f:
img_data = f.read()
# seems a little bit stupid to first decode and then encode the image, but so what...
return self.decode_png(img_data), ext[1:]
else:
raise NotImplementedError('unknown file format: {}'.format(ext))
def create_example(self, img_path, anno_path):
assert os.path.splitext(os.path.basename(img_path))[0] == os.path.splitext(os.path.basename(anno_path))[0]
img, format = self.load_img(img_path)
with open(anno_path, 'r') as f:
annotations = json.load(f)
img_height, img_width = img.shape[:2]
assert img_height == 1024
assert img_width == 1920
encoded = self.encode_png(img)
if self.create_sha256_key:
key = hashlib.sha256(encoded).hexdigest()
else:
key = '__no_key_generated__'
ymin, xmin, ymax, xmax, label, text = [], [], [], [], [], []
img_tags = [tag.encode('utf8') for tag in annotations['tags']]
skipped_annotations = 0
box_cnt = 0
box_sizes = []
for anno in annotations['children']:
if anno['identity'] not in self.identity_to_label.keys():
skipped_annotations += 1
continue
# TODO add loading of ignore regions if you want to use them
box_cnt += 1
if anno['identity'] == 'rider':
pass
# TODO consider bounding box of ridden vehicle that is stored in anno['children']
cls_label = self.identity_to_label[anno['identity']]
ymin.append(float(anno['y0']) / img_height)
xmin.append(float(anno['x0']) / img_width)
ymax.append(float(anno['y1']) / img_height)
xmax.append(float(anno['x1']) / img_width)
if xmax[-1] > 1:
print('oh no...')
label.append(cls_label)
text.append(anno['identity'].encode('utf8'))
h = ymax[-1] - ymin[-1]
w = xmax[-1] - xmin[-1]
box_sizes.append((h, w))
if skipped_annotations > 0:
logging.debug(
'Skipped {}/{} annotations for img {}'.format(skipped_annotations, len(annotations), img_path))
feature_dict = {
'image/height': int64_feature(img_height),
'image/width': int64_feature(img_width),
'img/tags': bytes_list_feature(img_tags),
'image/filename': bytes_feature(img_path.encode('utf8')),
'image/source_id': bytes_feature(img_path.encode('utf8')),
'image/key/sha256': bytes_feature(key.encode('utf8')),
'image/encoded': bytes_feature(encoded),
'image/format': bytes_feature('png'.encode('utf8')),
'image/object/bbox/xmin': float_list_feature(xmin),
'image/object/bbox/xmax': float_list_feature(xmax),
'image/object/bbox/ymin': float_list_feature(ymin),
'image/object/bbox/ymax': float_list_feature(ymax),
'image/object/class/text': bytes_list_feature(text),
'image/object/class/label': int64_list_feature(label),
'image/object/cnt': int64_feature(box_cnt),
}
example = tf.train.Example(features=tf.train.Features(feature=feature_dict))
return example, skipped_annotations, box_sizes, (img_height, img_width)
def write_shard(args):
shard, num_shards, type, data, example_creator = args
out_fn = '{}-{}-{:05d}-of-{:05d}'.format(FLAGS.dataset_name, type, shard, num_shards)
out_file = os.path.join(FLAGS.out_dir, out_fn)
writer = tf.python_io.TFRecordWriter(out_file)
logging.info('Creating shard {}-{}/{}'.format(type, shard, num_shards))
skipped_annotations = 0
box_sizes = []
img_sizes = set()
cnt = 0
for cnt, datum in enumerate(data, start=1):
img_path, anno_path = datum
example, skipped, sizes, img_size = example_creator.create_example(img_path, anno_path)
skipped_annotations += skipped
box_sizes.extend(sizes)
img_sizes.add(img_size)
writer.write(example.SerializeToString())
if cnt % 10 == 0:
logging.info('Written {} examples for shard {}-{}/{}'.format(cnt, type, shard, num_shards))
if skipped_annotations > 0:
logging.info('Written {} examples for shard {}-{}/{}'.format(cnt, type, shard, num_shards))
logging.info(
'Finished shard {}-{}/{}: {} examples written and {} annotations skipped'.format(type, shard, num_shards, cnt,
skipped_annotations))
return box_sizes, type, img_sizes
def create_jobs(type, data, num_shards, example_creator):
if FLAGS.shuffle:
np.random.shuffle(data)
# split into roughly even sized pieces
k, m = divmod(len(data), num_shards)
shards = [data[i * k + min(i, m):(i + 1) * k + min(i + 1, m)] for i in range(num_shards)]
# check if we didn't f@#! it up
total_length = 0
for shard in shards:
total_length += len(shard)
assert total_length == len(data)
# create and run jobs
jobs = [(shard_id + 1, num_shards, type, data, example_creator) for shard_id, data in enumerate(shards)]
return jobs
def get_files(path, ext):
files = glob.glob(os.path.join(path, '*', '*.{}'.format(ext)))
files = sorted(files)
return files
def process_dataset():
create_dirs([FLAGS.out_dir])
if FLAGS.shuffle:
with open(os.path.join(FLAGS.out_dir, FLAGS.dataset_name + '-np_random_state'), 'wb') as f:
pickle.dump(np.random.get_state(), f)
# prepare train and val splits
train_img_path = os.path.join(FLAGS.img_path, 'train')
val_img_path = os.path.join(FLAGS.img_path, 'val')
train_imgs = get_files(train_img_path, 'png')
val_imgs = get_files(val_img_path, 'png')
train_anno_path = os.path.join(FLAGS.anno_path, 'train')
val_anno_path = os.path.join(FLAGS.anno_path, 'val')
train_annos = get_files(train_anno_path, 'json')
val_annos = get_files(val_anno_path, 'json')
train_data = list(zip(train_imgs, train_annos))
val_data = list(zip(val_imgs, val_annos))
# object which does all the hard work
example_creator = ExampleCreator()
# Process each split in a different thread
train_jobs = create_jobs('train', train_data, FLAGS.train_shards, example_creator)
val_jobs = create_jobs('val', val_data, FLAGS.val_shards, example_creator)
jobs = train_jobs + val_jobs
with ThreadPoolExecutor() as executor:
result = executor.map(write_shard, jobs,
chunksize=1) # chunksize=1 is important, since our jobs are long running
box_sizes = []
img_sizes = set()
for sizes, type, img_sizes_ in result:
img_sizes.update(img_sizes_)
if type == 'train':
box_sizes.extend(sizes)
if len(img_sizes) > 1:
logging.error('Different image sizes detected: {}'.format(img_sizes))
box_sizes = np.array(box_sizes, np.float64)
np.save(os.path.join(FLAGS.out_dir, FLAGS.dataset_name + '-box_sizes'), box_sizes)
np.save(os.path.join(FLAGS.out_dir, FLAGS.dataset_name + '-img_size_height_width'), list(img_sizes)[0])
def create_dirs(dirs):
for dir in dirs:
try:
os.makedirs(dir)
except OSError:
assert os.path.isdir(dir), '{} exists but is not a directory'.format(dir)
def main(args):
assert FLAGS.out_dir
assert FLAGS.dataset_name
assert FLAGS.img_path
assert FLAGS.anno_path
assert FLAGS.train_shards
assert FLAGS.val_shards
logging.info('Saving results to {}'.format(FLAGS.out_dir))
logging.info('----- START -----')
start = time.time()
process_dataset()
end = time.time()
elapsed = int(end - start)
logging.info('----- FINISHED in {:02d}:{:02d}:{:02d} -----'.format(elapsed // 3600,
(elapsed // 60) % 60,
elapsed % 60))
if __name__ == '__main__':
tf.app.run()
| 36.478964
| 118
| 0.620564
|
8288c390f0d456fcec03927cf004da68c0ff66fe
| 172
|
py
|
Python
|
pra subir/pythonexercicios/ex27.py
|
daianebandeira88/curso-python
|
763f5f36b6d7329549ad861c63acc3c84aade887
|
[
"MIT"
] | null | null | null |
pra subir/pythonexercicios/ex27.py
|
daianebandeira88/curso-python
|
763f5f36b6d7329549ad861c63acc3c84aade887
|
[
"MIT"
] | null | null | null |
pra subir/pythonexercicios/ex27.py
|
daianebandeira88/curso-python
|
763f5f36b6d7329549ad861c63acc3c84aade887
|
[
"MIT"
] | null | null | null |
n=str(input('digite seu nome completo')).strip()
nome=n.split()
print('seu primeiro nome é {} '.format(nome[0],))
print('e seu ultimo nome é {}'.format(nome[len(nome)-1]))
| 34.4
| 57
| 0.668605
|
7330afd9515754ce8b2a9f7adca4f41c54b24485
| 1,112
|
py
|
Python
|
gluonnlp/__init__.py
|
bkktimber/gluon-nlp
|
205acce13a83b30eabd7a638e4773e7a4f91059a
|
[
"Apache-2.0"
] | null | null | null |
gluonnlp/__init__.py
|
bkktimber/gluon-nlp
|
205acce13a83b30eabd7a638e4773e7a4f91059a
|
[
"Apache-2.0"
] | null | null | null |
gluonnlp/__init__.py
|
bkktimber/gluon-nlp
|
205acce13a83b30eabd7a638e4773e7a4f91059a
|
[
"Apache-2.0"
] | 1
|
2018-09-18T08:39:00.000Z
|
2018-09-18T08:39:00.000Z
|
# coding: utf-8
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=wildcard-import
"""NLP toolkit."""
from . import loss
from . import data
from . import embedding
from . import model
from .vocab import *
__version__ = '0.4.1'
__all__ = ['data',
'model',
'embedding',
'Vocab',
'loss',
'initializer']
| 30.054054
| 62
| 0.710432
|
aa286d2118c13d6018202f28ff475a01ef7d8093
| 1,964
|
py
|
Python
|
tests/accelerators/ddp_model.py
|
javierlorenzod/pytorch-lightning
|
6dba26666aa564db414eb238d99a4213006d8220
|
[
"Apache-2.0"
] | 1
|
2021-08-05T01:45:26.000Z
|
2021-08-05T01:45:26.000Z
|
tests/accelerators/ddp_model.py
|
javierlorenzod/pytorch-lightning
|
6dba26666aa564db414eb238d99a4213006d8220
|
[
"Apache-2.0"
] | null | null | null |
tests/accelerators/ddp_model.py
|
javierlorenzod/pytorch-lightning
|
6dba26666aa564db414eb238d99a4213006d8220
|
[
"Apache-2.0"
] | 1
|
2021-02-16T00:47:46.000Z
|
2021-02-16T00:47:46.000Z
|
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Runs either `.fit()` or `.test()` on a single node across multiple gpus.
"""
import os
from argparse import ArgumentParser
import torch
from pytorch_lightning import seed_everything, Trainer
from tests.base import EvalModelTemplate
def main():
seed_everything(1234)
parser = ArgumentParser(add_help=False)
parser = Trainer.add_argparse_args(parser)
parser.add_argument('--trainer_method', default='fit')
parser.add_argument('--tmpdir')
parser.add_argument('--workdir')
parser.set_defaults(gpus=2)
parser.set_defaults(accelerator="ddp")
args = parser.parse_args()
model = EvalModelTemplate()
trainer = Trainer.from_argparse_args(args)
result = {}
if args.trainer_method == 'fit':
trainer.fit(model)
result = {'status': 'complete', 'method': args.trainer_method, 'result': None}
if args.trainer_method == 'test':
result = trainer.test(model)
result = {'status': 'complete', 'method': args.trainer_method, 'result': result}
if args.trainer_method == 'fit_test':
trainer.fit(model)
result = trainer.test(model)
result = {'status': 'complete', 'method': args.trainer_method, 'result': result}
if len(result) > 0:
file_path = os.path.join(args.tmpdir, 'ddp.result')
torch.save(result, file_path)
if __name__ == '__main__':
main()
| 32.733333
| 88
| 0.697556
|
495245d32fc4b2dd0f3d070ae40b9e0f731c01a5
| 4,093
|
py
|
Python
|
imagebot/_template_matcher.py
|
jnhyperion/ImageBot
|
d8b8f917bdafc6a4e4feb0e17d99a81853f2605a
|
[
"MIT"
] | null | null | null |
imagebot/_template_matcher.py
|
jnhyperion/ImageBot
|
d8b8f917bdafc6a4e4feb0e17d99a81853f2605a
|
[
"MIT"
] | null | null | null |
imagebot/_template_matcher.py
|
jnhyperion/ImageBot
|
d8b8f917bdafc6a4e4feb0e17d99a81853f2605a
|
[
"MIT"
] | null | null | null |
import cv2
import math
import numpy as np
from typing import List, Union, Tuple
from ._base_matcher import BaseMatcher
from ._convertor import convert_images
from ._results import MatchingResult
class TemplateMatcher(BaseMatcher):
def __init__(
self,
image_path: str,
template_path: str,
convert_2_gray: bool = False,
tolerance: float = 0.8,
template_from_resolution: Union[None, Tuple[int, int]] = None,
):
super().__init__(image_path, template_path, convert_2_gray=convert_2_gray)
self.tolerance = tolerance
self.template_from_resolution = template_from_resolution
self._converted_image = None
self._converted_template = None
def find_all_results(self) -> List[MatchingResult]:
res = self._cv2_match_template()
all_matches = np.where(res >= self.tolerance)
points = zip(*all_matches[::-1])
non_overlapped_points = []
for pt in points:
is_overlapped = False
for non_overlapped_pt in non_overlapped_points:
dist = math.hypot(
non_overlapped_pt[0] - pt[0], non_overlapped_pt[1] - pt[1]
)
if dist < 5:
# points are too close, consider they are overlapped
is_overlapped = True
break
if not is_overlapped:
non_overlapped_points.append(pt)
results: List[MatchingResult] = []
for pt in non_overlapped_points:
rectangle = self._get_rectangle(pt)
center = self._get_rectangle_center(pt)
one_good_match = MatchingResult(
center=center, rect=rectangle, confidence=float(res[pt[1]][pt[0]])
)
results.append(one_good_match)
return results
def find_best_result(self) -> Union[MatchingResult, None]:
res = self._cv2_match_template()
_, confidence, _, pt = cv2.minMaxLoc(res)
rectangle = self._get_rectangle(pt)
center = self._get_rectangle_center(pt)
best_match = MatchingResult(
center=center, rect=rectangle, confidence=float(confidence)
)
return best_match if confidence >= self.tolerance else None
def _cv2_match_template(self):
self._converted_image, self._converted_template = convert_images(
self.image, self.template, self.convert_2_gray
)
if self.template_from_resolution is not None:
try:
_template_resolution = (
int(
self.w_template
* self.w_image
/ self.template_from_resolution[0]
),
int(
self.h_template
* self.h_image
/ self.template_from_resolution[1]
),
)
self._converted_template = cv2.resize(
self._converted_template,
_template_resolution,
interpolation=cv2.INTER_NEAREST,
)
except Exception as e:
print(
f"Fail to resize template based on the given image resolution {self.template_from_resolution}: {e}"
)
return cv2.matchTemplate(
self._converted_image, self._converted_template, cv2.TM_CCOEFF_NORMED
)
def _get_rectangle(self, loc) -> Tuple[Tuple[int, int], Tuple[int, int]]:
x, y = loc
h, w = self._get_converted_wh()
return (int(x), int(y)), (int(x + w), int(y + h))
def _get_converted_wh(self):
if self._converted_template is not None:
_h, _w = self._converted_template.shape[:2]
else:
_h, _w = self.h_template, self.w_template
return _h, _w
def _get_rectangle_center(self, loc) -> Tuple[int, int]:
x, y = loc
h, w = self._get_converted_wh()
return int(x + w / 2), int(y + h / 2)
| 37.550459
| 119
| 0.571708
|
540ef532f525f94d29e608fcf9ec7ebab3d351e3
| 338
|
py
|
Python
|
sympy/plotting/pygletplot/plot_object.py
|
sn6uv/sympy
|
5b149c2f72847e4785c65358b09d99b29f101dd5
|
[
"BSD-3-Clause"
] | 7
|
2015-01-14T06:55:33.000Z
|
2018-08-11T14:43:52.000Z
|
sympy/plotting/plot_object.py
|
goodok/sympy
|
de84ed2139125a755ea7b6ba91d945d9fbbe5ed9
|
[
"BSD-3-Clause"
] | 1
|
2018-02-19T04:56:04.000Z
|
2018-02-19T04:56:04.000Z
|
sympy/plotting/plot_object.py
|
goodok/sympy
|
de84ed2139125a755ea7b6ba91d945d9fbbe5ed9
|
[
"BSD-3-Clause"
] | 1
|
2016-04-24T14:39:22.000Z
|
2016-04-24T14:39:22.000Z
|
class PlotObject(object):
"""
Base class for objects which can be displayed in
a Plot.
"""
visible = True
def _draw(self):
if self.visible:
self.draw()
def draw(self):
"""
OpenGL rendering code for the plot object.
Override in base class.
"""
pass
| 18.777778
| 52
| 0.529586
|
d7e41d5e4fc1536d6646f01f4f8b25294e041d30
| 848
|
py
|
Python
|
src/neuralnetwork.py
|
hirotatsuya/deep-learning-python
|
83b9da2b616f6b6b545b354b3c0c931b59add6f0
|
[
"MIT"
] | null | null | null |
src/neuralnetwork.py
|
hirotatsuya/deep-learning-python
|
83b9da2b616f6b6b545b354b3c0c931b59add6f0
|
[
"MIT"
] | null | null | null |
src/neuralnetwork.py
|
hirotatsuya/deep-learning-python
|
83b9da2b616f6b6b545b354b3c0c931b59add6f0
|
[
"MIT"
] | null | null | null |
import numpy as np
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def identity_function(x):
return x
def init_network():
network = {}
network['W1'] = np.array([[0.1, 0.3, 0.5], [0.2, 0.4, 0.6]])
network['b1'] = np.array([0.1, 0.2, 0.3])
network['W2'] = np.array([[0.1, 0.4], [0.2, 0.5], [0.3, 0.6]])
network['b2'] = np.array([0.1, 0.2])
network['W3'] = np.array([[0.1, 0.3], [0.2, 0.4]])
network['b3'] = np.array([0.1, 0.2])
return network
def forward(network, x):
W1, W2, W3 = network['W1'], network['W2'], network['W3']
b1, b2, b3 = network['b1'], network['b2'], network['b3']
a1 = np.dot(x, W1) + b1
z1 = sigmoid(a1)
a2 = np.dot(z1, W2) + b2
z2 = sigmoid(a2)
a3 = np.dot(z2, W3) + b3
y = identity_function(a3)
return y
network = init_network()
x = np.array([1.0, 0.5])
y = forward(network, x)
print(y)
| 22.918919
| 64
| 0.553066
|
8252dac0ee3bb51d313ce6d8339506ed41e3bde7
| 1,359
|
py
|
Python
|
racecar_behaviors/scripts/path_following.py
|
matlabbe/racecar-velodyne
|
94ef4c0a4b908ca12f9236458c492a1583a8889c
|
[
"MIT"
] | 2
|
2021-01-20T02:36:32.000Z
|
2021-11-09T02:46:25.000Z
|
racecar_behaviors/scripts/path_following.py
|
matlabbe/racecar-velodyne
|
94ef4c0a4b908ca12f9236458c492a1583a8889c
|
[
"MIT"
] | null | null | null |
racecar_behaviors/scripts/path_following.py
|
matlabbe/racecar-velodyne
|
94ef4c0a4b908ca12f9236458c492a1583a8889c
|
[
"MIT"
] | 12
|
2020-07-07T16:23:45.000Z
|
2021-09-07T20:33:40.000Z
|
#!/usr/bin/env python
import rospy
import math
import numpy as np
from geometry_msgs.msg import Twist
from sensor_msgs.msg import LaserScan
from nav_msgs.msg import Odometry
class PathFollowing:
def __init__(self):
self.max_speed = rospy.get_param('~max_speed', 1)
self.max_steering = rospy.get_param('~max_steering', 0.37)
self.cmd_vel_pub = rospy.Publisher('cmd_vel', Twist, queue_size=1)
self.scan_sub = rospy.Subscriber('scan', LaserScan, self.scan_callback, queue_size=1)
self.odom_sub = rospy.Subscriber('odom', Odometry, self.odom_callback, queue_size=1)
def scan_callback(self, msg):
# Because the lidar is oriented backward on the racecar,
# if we want the middle value of the ranges to be forward:
#l2 = len(msg.ranges)/2;
#ranges = msg.ranges[l2:len(msg.ranges)] + msg.ranges[0:l2]
twist = Twist()
twist.linear.x = self.max_speed
twist.angular.z = 0
self.cmd_vel_pub.publish(twist);
def odom_callback(self, msg):
rospy.loginfo("Current speed = %f m/s", msg.twist.twist.linear.x)
def main():
rospy.init_node('path_following')
pathFollowing = PathFollowing()
rospy.spin()
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
| 30.886364
| 93
| 0.655629
|
601ad1ac15a29a330f03f33758d30c4c2007c163
| 162
|
py
|
Python
|
my_project/__init__.py
|
sotch-pr35mac/clinc-business-logic-server-template-python
|
6178edb7b3bb368376720fe2baf0babe65a37329
|
[
"BSD-3-Clause"
] | 1
|
2019-11-11T15:40:35.000Z
|
2019-11-11T15:40:35.000Z
|
my_project/__init__.py
|
sotch-pr35mac/clinc-business-logic-server-template-python
|
6178edb7b3bb368376720fe2baf0babe65a37329
|
[
"BSD-3-Clause"
] | 3
|
2018-08-06T20:43:38.000Z
|
2021-06-10T20:43:52.000Z
|
my_project/__init__.py
|
sotch-pr35mac/clinc-business-logic-server-template-python
|
6178edb7b3bb368376720fe2baf0babe65a37329
|
[
"BSD-3-Clause"
] | 2
|
2018-08-06T18:37:39.000Z
|
2019-05-23T13:27:56.000Z
|
"""
Module imports for templates.python.business_logic.my_project
This file is automatically generated by ./scripts/empty_pyinit.sh
DO NOT EDIT IT MANUALLY
"""
| 20.25
| 65
| 0.796296
|
fbcf1482defc9d99f1d63dab75fd0d87eea701a2
| 4,529
|
py
|
Python
|
plaster/gen/sigproc_v2_generator.py
|
erisyon/plaster
|
20af32aed2365c6351fe3c26293308960099152b
|
[
"MIT"
] | null | null | null |
plaster/gen/sigproc_v2_generator.py
|
erisyon/plaster
|
20af32aed2365c6351fe3c26293308960099152b
|
[
"MIT"
] | 22
|
2020-06-22T19:27:50.000Z
|
2021-09-30T20:02:31.000Z
|
plaster/gen/sigproc_v2_generator.py
|
erisyon/plaster
|
20af32aed2365c6351fe3c26293308960099152b
|
[
"MIT"
] | 2
|
2020-06-16T17:38:46.000Z
|
2021-08-06T09:37:22.000Z
|
from munch import Munch
from plaster.gen.base_generator import BaseGenerator
from plaster.run.rad_filter.rad_filter_params import RadFilterParams
from plaster.gen import task_templates
from plaster.tools.schema.schema import Schema as s
from plaster.gen.report_builder import ReportBuilder
class SigprocV2Generator(BaseGenerator):
"""
Examine sigproc_v2 and study their results.
Note that this requires a calibration file produced by running the run
generated by the sigproc_v2_calibration generator.
"""
schema = s(
s.is_kws_r(
**BaseGenerator.sigproc_source_schema.schema(),
**BaseGenerator.sigproc_v2_schema.schema(),
**BaseGenerator.lnfit_schema.schema(),
**BaseGenerator.error_model_schema.schema(),
**RadFilterParams.schema.schema(),
classify_dyetracks=s.is_bool(help="If true then compare to dyetracks"),
dyetrack_n_cycles=s.is_int(
noneable=True, help="Number of cycles of simulated dyetracks"
),
dyetrack_n_counts=s.is_int(noneable=True, help="Number of dyes max."),
is_timelapse=s.is_bool(help="Is a timelapse experiment"),
)
)
defaults = Munch(
classify_dyetracks=False,
movie=False,
is_timelapse=False,
start_cycle=0,
**RadFilterParams.defaults,
)
def generate(self):
runs = []
lnfit_tasks = self.lnfits(sigproc_version="v2")
assert isinstance(self.sigproc_source, str)
sigproc_tasks = self.tasks_for_sigproc_v2()
rad_filter_task = task_templates.rad_filter(
field_quality_thresh=self.field_quality_thresh,
dark_thresh_in_stds=self.dark_thresh_in_stds,
noi_thresh_in_stds=self.noi_thresh_in_stds,
)
nn_n2_task = {}
if self.classify_dyetracks:
# TODO: This is a bit of a hacked up mess, this mode is
# used for calibration purposes and might not be a long-term
# feature and so is using hard-coded n_channels for example
self.label_set = [""]
self.scheme = []
n_schemes = 0
for protease, label_set, err_set in self.run_parameter_permutator():
nn_n2_task = task_templates.nn_v2(
"../sigproc_v2",
err_set,
prep_folder=None,
sim_v2_folder=None,
rad_filter_folder=f"../rad_filter",
run_against_all_dyetracks=True,
run_row_k_fit=True,
include_sigproc=True,
dyetrack_n_cycles=self.dyetrack_n_cycles,
dyetrack_n_counts=self.dyetrack_n_counts,
)
n_schemes += 1
assert n_schemes == 1
run = Munch(
run_name=f"sigproc_v2",
**sigproc_tasks,
**rad_filter_task,
**lnfit_tasks,
**nn_n2_task,
)
if self.force_run_name is not None:
run.run_name = self.force_run_name
# self.report_section_run_object(run)
# template = "sigproc_v2_analyze_template.ipynb"
# self.report_section_from_template(template)
#
# if lnfit_tasks:
# self.report_section_from_template("lnfit_template.ipynb")
runs += [run]
n_runs = len(runs)
# self.report_preamble(
# utils.smart_wrap(
# f"""
# # Sigproc V2 Analyze
# ## {n_runs} run(s) processed.
# This file generated by {current_file_and_line_str()}.
# """,
# width=None,
# )
# )
if self.classify_dyetracks:
rb = ReportBuilder()
rb.report_section_run_object(run)
template = "sigproc_v2_classify_dyetracks_template.ipynb"
rb.report_section_from_template(template)
self.add_report("sigproc_v2_classify_dyetracks", rb)
# if self.is_timelapse and self.dyetrack_n_counts == 1:
# rb = ReportBuilder()
# rb.report_section_run_object(run)
# template = "sigproc_v2_timelapse_template.ipynb"
# rb.report_section_from_template(template)
# self.add_report("sigproc_v2_timelapse", rb)
self.static_reports += ["ims_import", "sigproc_primary", "sigproc_secondary"]
return runs
| 34.838462
| 85
| 0.597483
|
49a41dee7afb65abc07818d79e6cc004a1675776
| 543
|
py
|
Python
|
app.py
|
garritfra/express-python
|
681fdad93e34e31057a716c68b40492776346944
|
[
"Unlicense"
] | 1
|
2021-09-28T04:07:32.000Z
|
2021-09-28T04:07:32.000Z
|
app.py
|
garritfra/express-python
|
681fdad93e34e31057a716c68b40492776346944
|
[
"Unlicense"
] | null | null | null |
app.py
|
garritfra/express-python
|
681fdad93e34e31057a716c68b40492776346944
|
[
"Unlicense"
] | null | null | null |
#!/usr/bin/env python3
from http.server import HTTPServer, SimpleHTTPRequestHandler, HTTPStatus
class Handler(SimpleHTTPRequestHandler):
def do_GET(self):
self.send_response(HTTPStatus.OK)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(b'Hello, World!')
return
def run(port, server_class=HTTPServer, handler_class=Handler):
print("Server running at " + str(port))
httpd = HTTPServer(("localhost", port), Handler)
httpd.serve_forever()
run(8080)
| 24.681818
| 72
| 0.694291
|
a2a8ee8e1274c7a420f0eda0f96fb584ca6d0584
| 920
|
py
|
Python
|
medium/739-daily-temperatures.py
|
wanglongjiang/leetcode
|
c61d2e719e81575cfb5bde9d64e15cee7cf01ef3
|
[
"MIT"
] | 2
|
2021-03-14T11:38:26.000Z
|
2021-03-14T11:38:30.000Z
|
medium/739-daily-temperatures.py
|
wanglongjiang/leetcode
|
c61d2e719e81575cfb5bde9d64e15cee7cf01ef3
|
[
"MIT"
] | null | null | null |
medium/739-daily-temperatures.py
|
wanglongjiang/leetcode
|
c61d2e719e81575cfb5bde9d64e15cee7cf01ef3
|
[
"MIT"
] | 1
|
2022-01-17T19:33:23.000Z
|
2022-01-17T19:33:23.000Z
|
'''
每日温度
请根据每日 气温 列表,重新生成一个列表。对应位置的输出为:要想观测到更高的气温,至少需要等待的天数。如果气温在这之后都不会升高,请在该位置用 0 来代替。
例如,给定一个列表 temperatures = [73, 74, 75, 71, 69, 72, 76, 73],你的输出应该是 [1, 1, 4, 2, 1, 1, 0, 0]。
提示:气温 列表长度的范围是 [1, 30000]。每个气温的值的均为华氏度,都是在 [30, 100] 范围内的整数。
'''
from typing import List
'''
思路:单调栈
将T[0]入栈,然后遍历整个温度数值T[1..n-1]:
如果当前温度t[i]<=栈顶温度,入栈
如果当前温度t[i]>栈顶温度,栈顶元素j出栈,j需要的天数为i-j
持续上面的过程2,直至栈顶元素>=当前温度,然后将当前温度入栈
时间复杂度:O(n),每个元素最多入栈1次
空间复杂度:O(n),每个元素最多入栈1次
'''
class Solution:
def dailyTemperatures(self, T: List[int]) -> List[int]:
ans = [0] * len(T)
stack = []
for i in range(len(T)):
while stack and T[i] > T[stack[-1]]:
prev = stack.pop()
ans[prev] = i - prev
stack.append(i)
while not stack:
ans[stack.pop()] = 0
return ans
s = Solution()
print(s.dailyTemperatures([73, 74, 75, 71, 69, 72, 76, 73]))
| 23.589744
| 91
| 0.579348
|
11fdebb1b93fd1c8f987cf108e42884fe90c950d
| 5,007
|
py
|
Python
|
library/avi_geodb.py
|
avinetworks/ansible-role-avisdk
|
b20b1ca33b80e43c2598dc1823439055658600f7
|
[
"Apache-2.0"
] | 13
|
2016-10-11T16:43:01.000Z
|
2021-10-08T20:19:57.000Z
|
library/avi_geodb.py
|
avinetworks/ansible-role-avisdk
|
b20b1ca33b80e43c2598dc1823439055658600f7
|
[
"Apache-2.0"
] | 27
|
2017-08-30T13:51:59.000Z
|
2022-03-11T05:52:38.000Z
|
library/avi_geodb.py
|
avinetworks/ansible-role-avisdk
|
b20b1ca33b80e43c2598dc1823439055658600f7
|
[
"Apache-2.0"
] | 11
|
2016-10-11T19:47:10.000Z
|
2021-05-21T07:21:18.000Z
|
#!/usr/bin/python
# module_check: supported
# Copyright 2021 VMware, Inc. All rights reserved. VMware Confidential
# SPDX-License-Identifier: Apache License 2.0
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_geodb
author: Gaurav Rastogi (@grastogi23) <grastogi@avinetworks.com>
short_description: Module for setup of GeoDB Avi RESTful Object
description:
- This module is used to configure GeoDB object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.7"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent", "present"]
type: str
avi_api_update_method:
description:
- Default method for object update is HTTP PUT.
- Setting to patch will override that behavior to use HTTP PATCH.
version_added: "2.5"
default: put
choices: ["put", "patch"]
type: str
avi_api_patch_op:
description:
- Patch operation to use when using avi_api_update_method as patch.
version_added: "2.5"
choices: ["add", "replace", "delete", "remove"]
type: str
avi_patch_path:
description:
- Patch path to use when using avi_api_update_method as patch.
type: str
avi_patch_value:
description:
- Patch value to use when using avi_api_update_method as patch.
type: str
description:
description:
- Description.
- Field introduced in 21.1.1.
type: str
files:
description:
- Geo database files.
- Field introduced in 21.1.1.
required: true
type: list
is_federated:
description:
- This field indicates that this object is replicated across gslb federation.
- Field introduced in 21.1.1.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
mappings:
description:
- Custom mappings of geo values.
- All mappings which start with the prefix 'system-' (any case) are reserved for system default objects and may be overwritten.
- Field introduced in 21.1.1.
type: list
name:
description:
- Geo database name.
- Field introduced in 21.1.1.
required: true
type: str
tenant_ref:
description:
- Tenant that this object belongs to.
- It is a reference to an object of type tenant.
- Field introduced in 21.1.1.
type: str
url:
description:
- Avi controller URL of the object.
type: str
uuid:
description:
- Uuid of this object.
- Field introduced in 21.1.1.
type: str
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- hosts: all
vars:
avi_credentials:
username: "admin"
password: "something"
controller: "192.168.15.18"
api_version: "21.1.1"
- name: Example to create GeoDB object
avi_geodb:
avi_credentials: "{{ avi_credentials }}"
state: present
name: sample_geodb
"""
RETURN = '''
obj:
description: GeoDB (api/geodb) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from avi.sdk.utils.ansible_utils import avi_common_argument_spec
from avi.sdk.utils.ansible_utils import (
avi_ansible_api, avi_common_argument_spec)
HAS_AVI = True
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
avi_api_update_method=dict(default='put',
choices=['put', 'patch']),
avi_api_patch_op=dict(choices=['add', 'replace', 'delete', 'remove']),
avi_patch_path=dict(type='str',),
avi_patch_value=dict(type='str',),
description=dict(type='str',),
files=dict(type='list', required=True),
is_federated=dict(type='bool',),
mappings=dict(type='list',),
name=dict(type='str', required=True),
tenant_ref=dict(type='str',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) or requests is not installed. '
'For more details visit https://github.com/vmware/alb-sdk.'))
return avi_ansible_api(module, 'geodb',
set())
if __name__ == '__main__':
main()
| 30.345455
| 139
| 0.604354
|
a48bcaa6f477b8900d01deb73e23c2d25670d477
| 24,506
|
py
|
Python
|
superset/dashboards/api.py
|
AmritaTech/superset
|
c685c9ea8fa70ba6646617d0a272c11e1130081c
|
[
"Apache-2.0"
] | 4
|
2021-10-05T00:55:28.000Z
|
2021-12-21T10:56:14.000Z
|
superset/dashboards/api.py
|
AmritaTech/superset
|
c685c9ea8fa70ba6646617d0a272c11e1130081c
|
[
"Apache-2.0"
] | 23
|
2020-12-28T14:25:14.000Z
|
2022-03-11T05:29:41.000Z
|
superset/dashboards/api.py
|
AmritaTech/superset
|
c685c9ea8fa70ba6646617d0a272c11e1130081c
|
[
"Apache-2.0"
] | 3
|
2021-01-05T10:54:08.000Z
|
2021-01-05T12:36:19.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import logging
from datetime import datetime
from io import BytesIO
from typing import Any, Dict
from zipfile import ZipFile
from flask import g, make_response, redirect, request, Response, send_file, url_for
from flask_appbuilder.api import expose, protect, rison, safe
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_babel import ngettext
from marshmallow import ValidationError
from werkzeug.wrappers import Response as WerkzeugResponse
from werkzeug.wsgi import FileWrapper
from superset import is_feature_enabled, thumbnail_cache
from superset.commands.exceptions import CommandInvalidError
from superset.commands.importers.v1.utils import remove_root
from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod
from superset.dashboards.commands.bulk_delete import BulkDeleteDashboardCommand
from superset.dashboards.commands.create import CreateDashboardCommand
from superset.dashboards.commands.delete import DeleteDashboardCommand
from superset.dashboards.commands.exceptions import (
DashboardBulkDeleteFailedError,
DashboardCreateFailedError,
DashboardDeleteFailedError,
DashboardForbiddenError,
DashboardImportError,
DashboardInvalidError,
DashboardNotFoundError,
DashboardUpdateFailedError,
)
from superset.dashboards.commands.export import ExportDashboardsCommand
from superset.dashboards.commands.importers.dispatcher import ImportDashboardsCommand
from superset.dashboards.commands.update import UpdateDashboardCommand
from superset.dashboards.dao import DashboardDAO
from superset.dashboards.filters import (
DashboardFavoriteFilter,
DashboardFilter,
DashboardTitleOrSlugFilter,
)
from superset.dashboards.schemas import (
DashboardPostSchema,
DashboardPutSchema,
get_delete_ids_schema,
get_export_ids_schema,
get_fav_star_ids_schema,
GetFavStarIdsSchema,
openapi_spec_methods_override,
thumbnail_query_schema,
)
from superset.extensions import event_logger
from superset.models.dashboard import Dashboard
from superset.tasks.thumbnails import cache_dashboard_thumbnail
from superset.utils.screenshots import DashboardScreenshot
from superset.utils.urls import get_url_path
from superset.views.base import generate_download_headers
from superset.views.base_api import (
BaseSupersetModelRestApi,
RelatedFieldFilter,
statsd_metrics,
)
from superset.views.filters import FilterRelatedOwners
logger = logging.getLogger(__name__)
class DashboardRestApi(BaseSupersetModelRestApi):
datamodel = SQLAInterface(Dashboard)
include_route_methods = RouteMethod.REST_MODEL_VIEW_CRUD_SET | {
RouteMethod.EXPORT,
RouteMethod.IMPORT,
RouteMethod.RELATED,
"bulk_delete", # not using RouteMethod since locally defined
"favorite_status",
}
resource_name = "dashboard"
allow_browser_login = True
class_permission_name = "Dashboard"
method_permission_name = MODEL_API_RW_METHOD_PERMISSION_MAP
show_columns = [
"id",
"charts",
"css",
"dashboard_title",
"json_metadata",
"owners.id",
"owners.username",
"owners.first_name",
"owners.last_name",
"changed_by_name",
"changed_by_url",
"changed_by.username",
"changed_on",
"position_json",
"published",
"url",
"slug",
"table_names",
"thumbnail_url",
]
list_columns = [
"id",
"published",
"slug",
"url",
"css",
"position_json",
"json_metadata",
"thumbnail_url",
"changed_by.first_name",
"changed_by.last_name",
"changed_by.username",
"changed_by.id",
"changed_by_name",
"changed_by_url",
"changed_on_utc",
"changed_on_delta_humanized",
"created_by.first_name",
"created_by.id",
"created_by.last_name",
"dashboard_title",
"owners.id",
"owners.username",
"owners.first_name",
"owners.last_name",
]
list_select_columns = list_columns + ["changed_on", "changed_by_fk"]
order_columns = [
"changed_by.first_name",
"changed_on_delta_humanized",
"created_by.first_name",
"dashboard_title",
"published",
]
add_columns = [
"dashboard_title",
"slug",
"owners",
"position_json",
"css",
"json_metadata",
"published",
]
edit_columns = add_columns
search_columns = (
"created_by",
"dashboard_title",
"id",
"owners",
"published",
"slug",
"changed_by",
)
search_filters = {
"dashboard_title": [DashboardTitleOrSlugFilter],
"id": [DashboardFavoriteFilter],
}
base_order = ("changed_on", "desc")
add_model_schema = DashboardPostSchema()
edit_model_schema = DashboardPutSchema()
base_filters = [["slice", DashboardFilter, lambda: []]]
order_rel_fields = {
"slices": ("slice_name", "asc"),
"owners": ("first_name", "asc"),
}
related_field_filters = {
"owners": RelatedFieldFilter("first_name", FilterRelatedOwners),
"created_by": RelatedFieldFilter("first_name", FilterRelatedOwners),
}
allowed_rel_fields = {"owners", "created_by"}
openapi_spec_tag = "Dashboards"
""" Override the name set for this collection of endpoints """
openapi_spec_component_schemas = (GetFavStarIdsSchema,)
apispec_parameter_schemas = {
"get_delete_ids_schema": get_delete_ids_schema,
"get_export_ids_schema": get_export_ids_schema,
"thumbnail_query_schema": thumbnail_query_schema,
"get_fav_star_ids_schema": get_fav_star_ids_schema,
}
openapi_spec_methods = openapi_spec_methods_override
""" Overrides GET methods OpenApi descriptions """
def __init__(self) -> None:
if is_feature_enabled("THUMBNAILS"):
self.include_route_methods = self.include_route_methods | {"thumbnail"}
super().__init__()
@expose("/", methods=["POST"])
@protect()
@safe
@statsd_metrics
@event_logger.log_this_with_context(log_to_statsd=False)
def post(self) -> Response:
"""Creates a new Dashboard
---
post:
description: >-
Create a new Dashboard.
requestBody:
description: Dashboard schema
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/{{self.__class__.__name__}}.post'
responses:
201:
description: Dashboard added
content:
application/json:
schema:
type: object
properties:
id:
type: number
result:
$ref: '#/components/schemas/{{self.__class__.__name__}}.post'
302:
description: Redirects to the current digest
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
500:
$ref: '#/components/responses/500'
"""
if not request.is_json:
return self.response_400(message="Request is not JSON")
try:
item = self.add_model_schema.load(request.json)
# This validates custom Schema with custom validations
except ValidationError as error:
return self.response_400(message=error.messages)
try:
new_model = CreateDashboardCommand(g.user, item).run()
return self.response(201, id=new_model.id, result=item)
except DashboardInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except DashboardCreateFailedError as ex:
logger.error(
"Error creating model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))
@expose("/<pk>", methods=["PUT"])
@protect()
@safe
@statsd_metrics
@event_logger.log_this_with_context(log_to_statsd=False)
def put(self, pk: int) -> Response:
"""Changes a Dashboard
---
put:
description: >-
Changes a Dashboard.
parameters:
- in: path
schema:
type: integer
name: pk
requestBody:
description: Dashboard schema
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/{{self.__class__.__name__}}.put'
responses:
200:
description: Dashboard changed
content:
application/json:
schema:
type: object
properties:
id:
type: number
result:
$ref: '#/components/schemas/{{self.__class__.__name__}}.put'
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
403:
$ref: '#/components/responses/403'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
if not request.is_json:
return self.response_400(message="Request is not JSON")
try:
item = self.edit_model_schema.load(request.json)
# This validates custom Schema with custom validations
except ValidationError as error:
return self.response_400(message=error.messages)
try:
changed_model = UpdateDashboardCommand(g.user, pk, item).run()
response = self.response(200, id=changed_model.id, result=item)
except DashboardNotFoundError:
response = self.response_404()
except DashboardForbiddenError:
response = self.response_403()
except DashboardInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except DashboardUpdateFailedError as ex:
logger.error(
"Error updating model %s: %s", self.__class__.__name__, str(ex)
)
response = self.response_422(message=str(ex))
return response
@expose("/<pk>", methods=["DELETE"])
@protect()
@safe
@statsd_metrics
@event_logger.log_this_with_context(log_to_statsd=False)
def delete(self, pk: int) -> Response:
"""Deletes a Dashboard
---
delete:
description: >-
Deletes a Dashboard.
parameters:
- in: path
schema:
type: integer
name: pk
responses:
200:
description: Dashboard deleted
content:
application/json:
schema:
type: object
properties:
message:
type: string
401:
$ref: '#/components/responses/401'
403:
$ref: '#/components/responses/403'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
try:
DeleteDashboardCommand(g.user, pk).run()
return self.response(200, message="OK")
except DashboardNotFoundError:
return self.response_404()
except DashboardForbiddenError:
return self.response_403()
except DashboardDeleteFailedError as ex:
logger.error(
"Error deleting model %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))
@expose("/", methods=["DELETE"])
@protect()
@safe
@statsd_metrics
@rison(get_delete_ids_schema)
@event_logger.log_this_with_context(log_to_statsd=False)
def bulk_delete(self, **kwargs: Any) -> Response:
"""Delete bulk Dashboards
---
delete:
description: >-
Deletes multiple Dashboards in a bulk operation.
parameters:
- in: query
name: q
content:
application/json:
schema:
$ref: '#/components/schemas/get_delete_ids_schema'
responses:
200:
description: Dashboard bulk delete
content:
application/json:
schema:
type: object
properties:
message:
type: string
401:
$ref: '#/components/responses/401'
403:
$ref: '#/components/responses/403'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
item_ids = kwargs["rison"]
try:
BulkDeleteDashboardCommand(g.user, item_ids).run()
return self.response(
200,
message=ngettext(
"Deleted %(num)d dashboard",
"Deleted %(num)d dashboards",
num=len(item_ids),
),
)
except DashboardNotFoundError:
return self.response_404()
except DashboardForbiddenError:
return self.response_403()
except DashboardBulkDeleteFailedError as ex:
return self.response_422(message=str(ex))
@expose("/export/", methods=["GET"])
@protect()
@safe
@statsd_metrics
@rison(get_export_ids_schema)
@event_logger.log_this_with_context(log_to_statsd=False)
def export(self, **kwargs: Any) -> Response:
"""Export dashboards
---
get:
description: >-
Exports multiple Dashboards and downloads them as YAML files.
parameters:
- in: query
name: q
content:
application/json:
schema:
$ref: '#/components/schemas/get_export_ids_schema'
responses:
200:
description: Dashboard export
content:
text/plain:
schema:
type: string
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
requested_ids = kwargs["rison"]
if is_feature_enabled("VERSIONED_EXPORT"):
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
root = f"dashboard_export_{timestamp}"
filename = f"{root}.zip"
buf = BytesIO()
with ZipFile(buf, "w") as bundle:
try:
for file_name, file_content in ExportDashboardsCommand(
requested_ids
).run():
with bundle.open(f"{root}/{file_name}", "w") as fp:
fp.write(file_content.encode())
except DashboardNotFoundError:
return self.response_404()
buf.seek(0)
return send_file(
buf,
mimetype="application/zip",
as_attachment=True,
attachment_filename=filename,
)
query = self.datamodel.session.query(Dashboard).filter(
Dashboard.id.in_(requested_ids)
)
query = self._base_filters.apply_all(query)
ids = [item.id for item in query.all()]
if not ids:
return self.response_404()
export = Dashboard.export_dashboards(ids)
resp = make_response(export, 200)
resp.headers["Content-Disposition"] = generate_download_headers("json")[
"Content-Disposition"
]
return resp
@expose("/<pk>/thumbnail/<digest>/", methods=["GET"])
@protect()
@safe
@rison(thumbnail_query_schema)
@event_logger.log_this_with_context(log_to_statsd=False)
def thumbnail(
self, pk: int, digest: str, **kwargs: Dict[str, bool]
) -> WerkzeugResponse:
"""Get Dashboard thumbnail
---
get:
description: >-
Compute async or get already computed dashboard thumbnail from cache.
parameters:
- in: path
schema:
type: integer
name: pk
- in: path
name: digest
description: A hex digest that makes this dashboard unique
schema:
type: string
- in: query
name: q
content:
application/json:
schema:
$ref: '#/components/schemas/thumbnail_query_schema'
responses:
200:
description: Dashboard thumbnail image
content:
image/*:
schema:
type: string
format: binary
202:
description: Thumbnail does not exist on cache, fired async to compute
content:
application/json:
schema:
type: object
properties:
message:
type: string
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
dashboard = self.datamodel.get(pk, self._base_filters)
if not dashboard:
return self.response_404()
dashboard_url = get_url_path(
"Superset.dashboard", dashboard_id_or_slug=dashboard.id
)
# If force, request a screenshot from the workers
if kwargs["rison"].get("force", False):
cache_dashboard_thumbnail.delay(dashboard_url, dashboard.digest, force=True)
return self.response(202, message="OK Async")
# fetch the dashboard screenshot using the current user and cache if set
screenshot = DashboardScreenshot(
dashboard_url, dashboard.digest
).get_from_cache(cache=thumbnail_cache)
# If the screenshot does not exist, request one from the workers
if not screenshot:
cache_dashboard_thumbnail.delay(dashboard_url, dashboard.digest, force=True)
return self.response(202, message="OK Async")
# If digests
if dashboard.digest != digest:
return redirect(
url_for(
f"{self.__class__.__name__}.thumbnail",
pk=pk,
digest=dashboard.digest,
)
)
return Response(
FileWrapper(screenshot), mimetype="image/png", direct_passthrough=True
)
@expose("/favorite_status/", methods=["GET"])
@protect()
@safe
@statsd_metrics
@rison(get_fav_star_ids_schema)
@event_logger.log_this_with_context(log_to_statsd=False)
def favorite_status(self, **kwargs: Any) -> Response:
"""Favorite Stars for Dashboards
---
get:
description: >-
Check favorited dashboards for current user
parameters:
- in: query
name: q
content:
application/json:
schema:
$ref: '#/components/schemas/get_fav_star_ids_schema'
responses:
200:
description:
content:
application/json:
schema:
$ref: "#/components/schemas/GetFavStarIdsSchema"
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
500:
$ref: '#/components/responses/500'
"""
requested_ids = kwargs["rison"]
dashboards = DashboardDAO.find_by_ids(requested_ids)
if not dashboards:
return self.response_404()
favorited_dashboard_ids = DashboardDAO.favorited_ids(dashboards, g.user.id)
res = [
{"id": request_id, "value": request_id in favorited_dashboard_ids}
for request_id in requested_ids
]
return self.response(200, result=res)
@expose("/import/", methods=["POST"])
@protect()
@safe
@statsd_metrics
def import_(self) -> Response:
"""Import dashboard(s) with associated charts/datasets/databases
---
post:
requestBody:
required: true
content:
multipart/form-data:
schema:
type: object
properties:
formData:
type: string
format: binary
passwords:
type: string
overwrite:
type: bool
responses:
200:
description: Dashboard import result
content:
application/json:
schema:
type: object
properties:
message:
type: string
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
upload = request.files.get("formData")
if not upload:
return self.response_400()
with ZipFile(upload) as bundle:
contents = {
remove_root(file_name): bundle.read(file_name).decode()
for file_name in bundle.namelist()
}
passwords = (
json.loads(request.form["passwords"])
if "passwords" in request.form
else None
)
overwrite = request.form.get("overwrite") == "true"
command = ImportDashboardsCommand(
contents, passwords=passwords, overwrite=overwrite
)
try:
command.run()
return self.response(200, message="OK")
except CommandInvalidError as exc:
logger.warning("Import dashboard failed")
return self.response_422(message=exc.normalized_messages())
except DashboardImportError as exc:
logger.exception("Import dashboard failed")
return self.response_500(message=str(exc))
| 33.569863
| 88
| 0.564433
|
28105367567031650103288f9acc79ddc001dd8f
| 198
|
py
|
Python
|
testautoload/models.py
|
MaxLarue/django-simple-acl
|
d980d149b663b0742eb2baf52ff596e7827c8941
|
[
"MIT"
] | 1
|
2020-10-13T17:19:42.000Z
|
2020-10-13T17:19:42.000Z
|
testautoload/models.py
|
MaxLarue/django-simple-acl
|
d980d149b663b0742eb2baf52ff596e7827c8941
|
[
"MIT"
] | null | null | null |
testautoload/models.py
|
MaxLarue/django-simple-acl
|
d980d149b663b0742eb2baf52ff596e7827c8941
|
[
"MIT"
] | null | null | null |
from django.db import models
class Order(models.Model):
pass
class Product(models.Model):
pass
class Invoice(models.Model):
pass
class InvoiceProposition(models.Model):
pass
| 11
| 39
| 0.712121
|
8a49b737bc12ef8272baccf696e7a9840bda1b85
| 371
|
py
|
Python
|
models/conditional_event_definition.py
|
THM-MA/XSDATA-waypoint
|
dd94442f9d6677c525bf3ebb03c15fec52fa1079
|
[
"MIT"
] | null | null | null |
models/conditional_event_definition.py
|
THM-MA/XSDATA-waypoint
|
dd94442f9d6677c525bf3ebb03c15fec52fa1079
|
[
"MIT"
] | null | null | null |
models/conditional_event_definition.py
|
THM-MA/XSDATA-waypoint
|
dd94442f9d6677c525bf3ebb03c15fec52fa1079
|
[
"MIT"
] | null | null | null |
from dataclasses import dataclass
from .t_conditional_event_definition import TConditionalEventDefinition
__NAMESPACE__ = "http://www.omg.org/spec/BPMN/20100524/MODEL"
@dataclass
class ConditionalEventDefinition(TConditionalEventDefinition):
class Meta:
name = "conditionalEventDefinition"
namespace = "http://www.omg.org/spec/BPMN/20100524/MODEL"
| 30.916667
| 71
| 0.789757
|
e01c23c709f3766f56b36fe37c8129c21e3e53ce
| 150
|
py
|
Python
|
zilean/system/utils/_mysqld_utils.py
|
A-Hilaly/zilean
|
2b2e87969a0d8064e8b92b07c346a4006f93c795
|
[
"Apache-2.0"
] | null | null | null |
zilean/system/utils/_mysqld_utils.py
|
A-Hilaly/zilean
|
2b2e87969a0d8064e8b92b07c346a4006f93c795
|
[
"Apache-2.0"
] | null | null | null |
zilean/system/utils/_mysqld_utils.py
|
A-Hilaly/zilean
|
2b2e87969a0d8064e8b92b07c346a4006f93c795
|
[
"Apache-2.0"
] | null | null | null |
def __mysql_version():
pass
def __check_mysql_exists():
pass
def __execute_mysqld_in():
pass
def __execute_mysqld_out():
pass
| 8.823529
| 27
| 0.68
|
3e52d47d92e904d3063f341744eb86f9cc5c80fc
| 2,550
|
py
|
Python
|
default_data.py
|
kevincloud/sentinel-data-api
|
c737b62247963f61a453d06fb4c965d4d5dd8ac3
|
[
"Apache-2.0"
] | 1
|
2021-12-06T21:01:37.000Z
|
2021-12-06T21:01:37.000Z
|
default_data.py
|
kevincloud/sentinel-data-api
|
c737b62247963f61a453d06fb4c965d4d5dd8ac3
|
[
"Apache-2.0"
] | null | null | null |
default_data.py
|
kevincloud/sentinel-data-api
|
c737b62247963f61a453d06fb4c965d4d5dd8ac3
|
[
"Apache-2.0"
] | null | null | null |
import configparser
from azure.cosmosdb.table.tableservice import TableService
from azure.cosmosdb.table.models import Entity
config = configparser.ConfigParser()
config.read('app.ini')
identifier = config['App']['Identifier']
account_name = identifier + "-cosmos-db"
account_key = config['App']['AccountKey']
table_endpoint = "https://" + identifier + "-cosmos-db.table.cosmos.azure.com:443/"
connection_string = "DefaultEndpointsProtocol=https;AccountName=" + account_name + ";AccountKey=" + account_key + ";TableEndpoint=" + table_endpoint + ";"
table_service = TableService(endpoint_suffix="table.cosmos.azure.com", connection_string=connection_string)
table_name = identifier + "-cosmos-table"
# Start over
items = table_service.query_entities(table_name)
for itm in items:
table_service.delete_entity(table_name, itm.PartitionKey, itm.RowKey)
# Add new entries
item = Entity()
item.PartitionKey = "required-modules"
item.RowKey = "custom-vnet"
table_service.insert_entity('kevincx-cosmos-table', item)
item = Entity()
item.PartitionKey = "required-modules"
item.RowKey = "custom-sg"
table_service.insert_entity('kevincx-cosmos-table', item)
item = Entity()
item.PartitionKey = "required-modules"
item.RowKey = "custom-blob"
table_service.insert_entity('kevincx-cosmos-table', item)
item = Entity()
item.PartitionKey = "approved-instances"
item.RowKey = "Standard_A1_v2"
table_service.insert_entity('kevincx-cosmos-table', item)
item = Entity()
item.PartitionKey = "approved-instances"
item.RowKey = "Standard_A2_v2"
table_service.insert_entity('kevincx-cosmos-table', item)
item = Entity()
item.PartitionKey = "approved-instances"
item.RowKey = "Standard_A4_v2"
table_service.insert_entity('kevincx-cosmos-table', item)
item = Entity()
item.PartitionKey = "approved-instances"
item.RowKey = "Standard_A8_v2"
table_service.insert_entity('kevincx-cosmos-table', item)
item = Entity()
item.PartitionKey = "prohibited-resources"
item.RowKey = "azurerm_resource_group"
table_service.insert_entity('kevincx-cosmos-table', item)
item = Entity()
item.PartitionKey = "prohibited-resources"
item.RowKey = "azurerm_virtual_network"
table_service.insert_entity('kevincx-cosmos-table', item)
item = Entity()
item.PartitionKey = "prohibited-resources"
item.RowKey = "azurerm_network_security_group"
table_service.insert_entity('kevincx-cosmos-table', item)
item = Entity()
item.PartitionKey = "prohibited-resources"
item.RowKey = "azurerm_subnet_network_security_group_association"
table_service.insert_entity('kevincx-cosmos-table', item)
| 33.552632
| 154
| 0.78549
|
f1c6279be76fa88a569c09c04c3546116c32db52
| 1,709
|
py
|
Python
|
app/core/migrations/0001_initial.py
|
AitorPo/recipe-app-api
|
abb9d1774735c78d5fac0ebc20e727ad1a4851c7
|
[
"MIT"
] | null | null | null |
app/core/migrations/0001_initial.py
|
AitorPo/recipe-app-api
|
abb9d1774735c78d5fac0ebc20e727ad1a4851c7
|
[
"MIT"
] | null | null | null |
app/core/migrations/0001_initial.py
|
AitorPo/recipe-app-api
|
abb9d1774735c78d5fac0ebc20e727ad1a4851c7
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.1.15 on 2021-09-28 08:09
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0009_alter_user_last_name_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(max_length=255, unique=True)),
('name', models.CharField(max_length=255)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
]
| 50.264706
| 266
| 0.63897
|
d43893e5e464f90b18387e00f26cdfed7675cb91
| 49,214
|
py
|
Python
|
Python/libraries/recognizers-number-with-unit/recognizers_number_with_unit/resources/portuguese_numeric_with_unit.py
|
Irrelevances/Recognizers-Text
|
630ce12bb47e201f663d72c31c680f6d40171962
|
[
"MIT"
] | null | null | null |
Python/libraries/recognizers-number-with-unit/recognizers_number_with_unit/resources/portuguese_numeric_with_unit.py
|
Irrelevances/Recognizers-Text
|
630ce12bb47e201f663d72c31c680f6d40171962
|
[
"MIT"
] | null | null | null |
Python/libraries/recognizers-number-with-unit/recognizers_number_with_unit/resources/portuguese_numeric_with_unit.py
|
Irrelevances/Recognizers-Text
|
630ce12bb47e201f663d72c31c680f6d40171962
|
[
"MIT"
] | null | null | null |
# ------------------------------------------------------------------------------
# <auto-generated>
# This code was generated by a tool.
# Changes to this file may cause incorrect behavior and will be lost if
# the code is regenerated.
# </auto-generated>
# ------------------------------------------------------------------------------
from .base_numbers import BaseNumbers
# pylint: disable=line-too-long
class PortugueseNumericWithUnit:
AgeSuffixList = dict([('Ano', 'anos|ano'),
('Mês', 'meses|mes|mês'),
('Semana', 'semanas|semana'),
('Dia', 'dias|dia')])
AreaSuffixList = dict([('Quilômetro quadrado', 'quilômetro quadrado|quilómetro quadrado|quilometro quadrado|quilômetros quadrados|quilómetros quadrados|quilomeros quadrados|km2|km^2|km²'),
('Hectare', 'hectômetro quadrado|hectómetro quadrado|hectômetros quadrados|hectómetros cuadrados|hm2|hm^2|hm²|hectare|hectares'),
('Decâmetro quadrado', 'decâmetro quadrado|decametro quadrado|decâmetros quadrados|decametro quadrado|dam2|dam^2|dam²|are|ares'),
('Metro quadrado', 'metro quadrado|metros quadrados|m2|m^2|m²'),
('Decímetro quadrado', 'decímetro quadrado|decimentro quadrado|decímetros quadrados|decimentros quadrados|dm2|dm^2|dm²'),
('Centímetro quadrado', 'centímetro quadrado|centimetro quadrado|centímetros quadrados|centrimetros quadrados|cm2|cm^2|cm²'),
('Milímetro quadrado', 'milímetro quadrado|milimetro quadrado|milímetros quadrados|militmetros quadrados|mm2|mm^2|mm²'),
('Polegada quadrada', 'polegada quadrada|polegadas quadradas|in2|in^2|in²'),
('Pé quadrado', 'pé quadrado|pe quadrado|pés quadrados|pes quadrados|pé2|pé^2|pé²|sqft|sq ft|ft2|ft^2|ft²'),
('Jarda quadrada', 'jarda quadrada|jardas quadradas|yd2|yd^2|yd²'),
('Milha quadrada', 'milha quadrada|milhas quadradas|mi2|mi^2|mi²'),
('Acre', 'acre|acres')])
CurrencySuffixList = dict([('Dólar', 'dólar|dolar|dólares|dolares'),
('Peso', 'peso|pesos'),
('Coroa', 'coroa|coroas'),
('Rublo', 'rublo|rublos'),
('Libra', 'libra|libras'),
('Florim', 'florim|florins|ƒ'),
('Dinar', 'dinar|dinares'),
('Franco', 'franco|francos'),
('Rupia', 'rúpia|rupia|rúpias|rupias'),
('Escudo', 'escudo|escudos'),
('Xelim', 'xelim|xelins|xelims'),
('Lira', 'lira|liras'),
('Centavo', 'centavo|cêntimo|centimo|centavos|cêntimos|centimo'),
('Centésimo', 'centésimo|centésimos'),
('Pêni', 'pêni|péni|peni|penies|pennies'),
('Manat', 'manat|manate|mánate|man|manats|manates|mánates'),
('Euro', 'euro|euros|€|eur'),
('Centavo de Euro', 'centavo de euro|cêntimo de euro|centimo de euro|centavos de euro|cêntimos de euro|centimos de euro'),
('Dólar do Caribe Oriental', 'dólar do Caribe Oriental|dolar do Caribe Oriental|dólares do Caribe Oriental|dolares do Caribe Oriental|dólar das Caraíbas Orientais|dolar das Caraibas Orientais|dólares das Caraíbas Orientais|dolares das Caraibas Orientais|ec$|xcd'),
('Centavo do Caribe Oriental', 'centavo do Caribe Oriental|centavo das Caraíbas Orientais|cêntimo do Caribe Oriental|cêntimo das Caraíbas Orientais|centavos do Caribe Oriental|centavos das Caraíbas Orientais|cêntimos do Caribe Oriental|cêntimos das Caraíbas Orientais'),
('Franco CFA da África Ocidental', 'franco CFA da África Ocidental|franco CFA da Africa Ocidental|francos CFA da África Occidental|francos CFA da Africa Occidental|franco CFA Ocidental|xof'),
('Centavo de CFA da África Ocidental', 'centavo de CFA da Africa Occidental|centavos de CFA da África Ocidental|cêntimo de CFA da Africa Occidental|cêntimos de CFA da África Ocidental'),
('Franco CFA da África Central', 'franco CFA da África Central|franco CFA da Africa Central|francos CFA da África Central|francos CFA da Africa Central|franco CFA central|xaf'),
('Centavo de CFA da África Central', 'centavo de CFA de África Central|centavos de CFA da África Central|cêntimo de CFA de África Central|cêntimos de CFA da África Central'),
('Apsar abcásio', 'apsar abcásio|apsar abecásio|apsar abcasio|apsar|apsares'),
('Afegani afegão', 'afegani afegão|afegane afegão|؋|afn|afegane|afgane|afegâni|afeganis|afeganes|afganes|afegânis'),
('Pul', 'pul|pules|puls'),
('Lek albanês', 'lek|lekë|lekes|lek albanês|leque|leques|all'),
('Qindarke', 'qindarka|qindarkë|qindarke|qindarkas'),
('Kwanza angolano', 'kwanza angolano|kwanzas angolanos|kwanza|kwanzas|aoa|kz'),
('Cêntimo angolano', 'cêntimo angolano|cêntimo|cêntimos'),
('Florim das Antilhas Holandesas', 'florim das antilhas holandesas|florim das antilhas neerlandesas|ang'),
('Rial saudita', 'rial saudita|riais sauditas|riyal saudita|riyals sauditas|riyal|riyals|sar'),
('Halala saudita', 'halala saudita|halala|hallalah'),
('Dinar argelino', 'dinar argelino|dinares argelinos|dzd'),
('Cêntimo argelino', 'centimo argelino|centimos argelinos|cêntimo argelino|cêntimos argelinos|centavo argelino|centavos argelinos'),
('Peso argentino', 'peso argentino|pesos argentinos|peso|pesos|ar$|ars'),
('Centavo argentino', 'centavo argentino|centavos argentinos|centavo|ctvo.|ctvos.'),
('Dram armênio', 'dram armênio|dram armênios|dram arménio|dram arménios|dram armenio|dram armenios|dram|drame|drames|դր.'),
('Luma armênio', 'luma armênio|lumas armênios|luma arménio|lumas arménios|luma armenio|lumas armenios|luma|lumas'),
('Florim arubano', 'florín arubeño|florines arubeños|ƒ arubeños|aƒ|awg'),
('Dólar australiano', 'dólar australiano|dólares australianos|dolar australiano|dolares australianos|a$|aud'),
('Centavo australiano', 'centavo australiano|centavos australianos'),
('Manat azeri', 'manat azeri|manats azeris|azn|manat azerbaijanês|manat azerbaijano|manats azerbaijaneses|manats azerbaijanos'),
('Qəpik azeri', 'qəpik azeri|qəpik|qəpiks'),
('Dólar bahamense', 'dólar bahamense|dólares bahamense|dolar bahamense|dolares bahamense|dólar baamiano|dólares baamiano|dolar baamiano|dolares baamiano|b$|bsd'),
('Centavo bahamense', 'centavo bahamense|centavos bahamense'),
('Dinar bareinita', 'dinar bareinita|dinar baremita|dinares bareinitas|dinares baremitas|bhd'),
('Fil bareinita', 'fil bareinita|fil baremita|fils bareinitas|fils baremitas'),
('Taka bengali', 'taka bengali|takas bengalis|taca|tacas|taka|takas|bdt'),
('Poisha bengali', 'poisha bengali|poishas bengalis'),
('Dólar de Barbados', 'dólar de barbados|dólares de barbados|dolar de barbados|dolares de barbados|dólar dos barbados|dólares dos barbados|bbd'),
('Centavo de Barbados', 'centavo de barbados|centavos de barbados|centavo dos barbados|centavos dos barbados'),
('Dólar de Belize', 'dólar de belize|dólares de belize|dolar de belize|dolares de belize|dólar do belize|dólares do belize|dolar do belize|dolares do belize|bz$|bzd'),
('Centavo de Belize', 'centavo de belize|centavos de belize|cêntimo do belize|cêntimos do belize'),
('Dólar bermudense', 'dólar bermudense|dólares bermudenses|bd$|bmd'),
('Centavo bermudense', 'centavo bermudense|centavos bermudenses|cêntimo bermudense| cêntimos bermudenses'),
('Rublo bielorrusso', 'rublo bielorrusso|rublos bielorrussos|br|byr'),
('Copeque bielorusso', 'copeque bielorrusso|copeques bielorrussos|kopek bielorrusso|kopeks bielorrussos|kap'),
('Quiate mianmarense', 'quiate mianmarense|quiates mianmarenses|kyat mianmarense|kyates mianmarenses|quiate myanmarense|quiates myanmarenses|kyat myanmarense|kyates myanmarenses|quiate birmanês|quite birmanes|quiates birmaneses|kyat birmanês|kyat birmanes|kyates birmaneses|mmk'),
('Pya mianmarense', 'pya mianmarense|pyas mianmarenses|pya myanmarense|pyas myanmarenses|pya birmanês|pya birmanes|pyas birmaneses'),
('Boliviano', 'boliviano|bolivianos|bob|bs'),
('Centavo Boliviano', 'centavo boliviano|centavos bolivianos'),
('Marco da Bósnia e Herzegovina', 'marco conversível|marco conversivel|marco convertível|marco convertivel|marcos conversíveis|marcos conversiveis|marcos convertíveis|marcos convertivies|bam'),
('Fening da Bósnia e Herzegovina', 'fening conversível|fening conversivel|fening convertível|fening convertivel|fenings conversíveis|fenings conversiveis|fenings convertíveis|fenings convertiveis'),
('Pula', 'pula|pulas|bwp'),
('Thebe', 'thebe|thebes'),
('Real brasileiro', 'real brasileiro|real do brasil|real|reais brasileiros|reais do brasil|reais|r$|brl'),
('Centavo brasileiro', 'centavo de real|centavo brasileiro|centavos de real|centavos brasileiros'),
('Dólar de Brunei', 'dólar de brunei|dolar de brunei|dólar do brunei|dolar do brunei|dólares de brunéi|dolares de brunei|dólares do brunei|dolares do brunei|bnd'),
('Sen de Brunei', 'sen de brunei|sen do brunei|sens de brunei|sens do brunei'),
('Lev búlgaro', 'lev búlgaro|leve búlgaro|leves búlgaros|lev bulgaro|leve bulgaro|leves bulgaros|lv|bgn'),
('Stotinka búlgaro', 'stotinka búlgaro|stotinki búlgaros|stotinka bulgaro|stotinki bulgaros'),
('Franco do Burundi', 'franco do burundi|francos do burundi|fbu|fib'),
('Centavo Burundi', 'centavo burundi|cêntimo burundi|centimo burundi|centavos burundi|cêntimo burundi|centimo burundi'),
('Ngultrum butanês', 'ngultrum butanês|ngultrum butanes|ngúltrume butanês|ngultrume butanes|ngultrum butaneses|ngúltrumes butaneses|ngultrumes butaneses|btn'),
('Chetrum butanês', 'chetrum butanês|chetrum butanes|chetrum butaneses'),
('Escudo cabo-verdiano', 'escudo cabo-verdiano|escudos cabo-verdianos|cve'),
('Riel cambojano', 'riel cambojano|riéis cambojanos|rieis cambojanos|khr'),
('Dólar canadense', 'dólar canadense|dolar canadense|dólares canadenses|dolares canadenses|c$|cad'),
('Centavo canadense', 'centavo canadense|centavos canadenses'),
('Peso chileno', 'peso chileno|pesos chilenos|cpl'),
('Yuan chinês', 'yuan chinês|yuan chines|yuans chineses|yuan|yuans|renminbi|rmb|cny|¥'),
('Peso colombiano', 'peso colombiano|pesos colombianos|cop|col$'),
('Centavo colombiano', 'centavo colombiano|centavos colombianos'),
('Franco comorense', 'franco comorense|francos comorenses|kmf|₣'),
('Franco congolês', 'franco congolês|franco congoles|francos congoleses|cdf'),
('Centavo congolês', 'centavo congolês|centavo congoles|centavos congoleses|cêntimo congolês|centimo congoles|cêntimos congoleses|cêntimos congoleses'),
('Won norte-coreano', 'won norte-coreano|wŏn norte-coreano|won norte-coreanos|wŏn norte-coreanos|kpw'),
('Chon norte-coreano', 'chon norte-coreano|chŏn norte-coreano|chŏn norte-coreanos|chon norte-coreanos'),
('Won sul-coreano', 'wŏn sul-coreano|won sul-coreano|wŏnes sul-coreanos|wones sul-coreanos|krw'),
('Jeon sul-coreano', 'jeons sul-coreano|jeons sul-coreanos'),
('Colón costarriquenho', 'colón costarriquenho|colon costarriquenho|colons costarriquenho|colones costarriquenhos|crc'),
('Kuna croata', 'kuna croata|kunas croatas|hrk'),
('Lipa croata', 'lipa croata|lipas croatas'),
('Peso cubano', 'peso cubano|pesos cubanos|cup'),
('Peso cubano convertível', 'peso cubano conversível|pesos cubanos conversíveis|peso cubano conversivel|pesos cubanos conversiveis|peso cubano convertível|pesos cubanos convertíveis|peso cubano convertivel|pesos cubanos convertiveis|cuc'),
('Coroa dinamarquesa', 'coroa dinamarquesa|coroas dinamarquesas|dkk'),
('Libra egípcia', 'libra egípcia|libra egipcia|libras egípcias|libras egipcias|egp|le'),
('Piastra egípcia', 'piastra egípcia|piastra egipcia|pisastras egípcias|piastras egipcias'),
('Dirham dos Emirados Árabes Unidos', 'dirham|dirhams|dirham dos emirados arabes unidos|aed|dhs'),
('Nakfa', 'nakfa|nfk|ern'),
('Centavo de Nakfa', 'cêntimo de nakfa|cêntimos de nakfa|centavo de nafka|centavos de nafka'),
('Peseta', 'peseta|pesetas|pts.|ptas.|esp'),
('Dólar estadunidense', 'dólar dos estados unidos|dolar dos estados unidos|dólar estadunidense|dólar americano|dólares dos estados unidos|dolares dos estados unidos|dólares estadunidenses|dólares americanos|dolar estadunidense|dolar americano|dolares estadunidenses|dolares americanos|usd|u$d|us$|usd$'),
('Coroa estoniana', 'coroa estoniana|coroas estonianas|eek'),
('Senti estoniano', 'senti estoniano|senti estonianos'),
('Birr etíope', 'birr etíope|birr etiope|birr etíopes|birr etiopes|br|etb'),
('Santim etíope', 'santim etíope|santim etiope|santim etíopes|santim etiopes'),
('Peso filipino', 'peso filipino|pesos filipinos|php'),
('Marco finlandês', 'marco finlandês|marco finlandes|marcos finlandeses'),
('Dólar fijiano', 'dólar fijiano|dolar fijiano|dólares fijianos|dolares fijianos|fj$|fjd'),
('Centavo fijiano', 'centavo fijiano|centavos fijianos'),
('Dalasi gambiano', 'dalasi|gmd'),
('Bututs', 'butut|bututs'),
('Lari georgiano', 'lari georgiano|lari georgianos|gel'),
('Tetri georgiano', 'tetri georgiano|tetri georgianos'),
('Cedi', 'cedi|ghs|gh₵'),
('Pesewa', 'pesewa'),
('Libra de Gibraltar', 'libra de gibraltar|libras de gibraltar|gip'),
('Peni de Gibraltar', 'peni de gibraltar|penies de gibraltar'),
('Quetzal guatemalteco', 'quetzal guatemalteco|quetzales guatemaltecos|quetzal|quetzales|gtq'),
('Centavo guatemalteco', 'centavo guatemalteco|centavos guatemaltecos'),
('Libra de Guernsey', 'libra de Guernsey|libras de Guernsey|ggp'),
('Peni de Guernsey', 'peni de Guernsey|penies de Guernsey'),
('Franco da Guiné', 'franco da guiné|franco da guine| franco guineense|francos da guiné|francos da guine|francos guineense|gnf|fg'),
('Centavo da Guiné', 'cêntimo guineense|centimo guineense|centavo guineense|cêntimos guineenses|centimos guineenses|centavos guineenses'),
('Dólar guianense', 'dólar guianense|dólares guianense|dolar guianense|dolares guianense|gyd|gy'),
('Gurde haitiano', 'gurde haitiano|gourde|gurdes haitianos|htg'),
('Centavo haitiano', 'cêntimo haitiano|cêntimos haitianos|centavo haitiano|centavos haitianos'),
('Lempira hondurenha', 'lempira hondurenha|lempiras hondurenhas|lempira|lempiras|hnl'),
('Centavo hondurenho', 'centavo hondurenho|centavos hondurehos|cêntimo hondurenho|cêntimos hondurenhos'),
('Dólar de Hong Kong', 'dólar de hong kong|dolar de hong kong|dólares de hong kong|dolares de hong kong|hk$|hkd'),
('Florim húngaro', 'florim húngaro|florim hungaro|florins húngaros|florins hungaros|forinte|forintes|huf'),
('Filér húngaro', 'fillér|filér|filler|filer'),
('Rupia indiana', 'rúpia indiana|rupia indiana|rupias indianas|inr'),
('Paisa indiana', 'paisa indiana|paisas indianas'),
('Rupia indonésia', 'rupia indonesia|rupia indonésia|rupias indonesias|rupias indonésias|idr'),
('Sen indonésio', 'send indonésio|sen indonesio|sen indonésios|sen indonesios'),
('Rial iraniano', 'rial iraniano|riais iranianos|irr'),
('Dinar iraquiano', 'dinar iraquiano|dinares iraquianos|iqd'),
('Fil iraquiano', 'fil iraquiano|fils iraquianos|files iraquianos'),
('Libra manesa', 'libra manesa|libras manesas|imp'),
('Peni manês', 'peni manes|peni manês|penies maneses'),
('Coroa islandesa', 'coroa islandesa|coroas islandesas|isk|íkr'),
('Aurar islandês', 'aurar islandês|aurar islandes|aurar islandeses|eyrir'),
('Dólar das Ilhas Cayman', 'dólar das ilhas cayman|dolar das ilhas cayman|dólar das ilhas caimão|dólares das ilhas cayman|dolares das ilhas cayman|dólares das ilhas caimão|ci$|kyd'),
('Dólar das Ilhas Cook', 'dólar das ilhas cook|dolar das ilhas cook|dólares das ilhas cook|dolares das ilhas cook'),
('Coroa feroesa', 'coroa feroesa|coroas feroesas|fkr'),
('Libra das Malvinas', 'libra das malvinas|libras das malvinas|fk£|fkp'),
('Dólar das Ilhas Salomão', 'dólar das ilhas salomão|dolar das ilhas salomao|dólares das ilhas salomão|dolares das ilhas salomao|sbd'),
('Novo shekel israelense', 'novo shekel|novos shekeles|novo shequel|novo siclo|novo xéquel|shekeles novos|novos sheqalim|sheqalim novos|ils'),
('Agora', 'agora|agorot'),
('Dólar jamaicano', 'dólar jamaicano|dolar jamaicano|dólares jamaicanos|dolares jamaicanos|j$|ja$|jmd'),
('Yen', 'yen|iene|yenes|ienes|jpy'),
('Libra de Jersey', 'libra de Jersey|libras de Jersey|jep'),
('Dinar jordaniano', 'dinar jordaniano|dinar jordano|dinares jordanianos|dinares jordanos|jd|jod'),
('Piastra jordaniana', 'piastra jordaniana|piastra jordano|piastras jordanianas|piastra jordaniano|piastras jordanianos|piastras jordanos'),
('Tengue cazaque', 'tenge|tengue|tengué|tengue cazaque|kzt'),
('Tiyin', 'tiyin|tiyins'),
('Xelim queniano', 'xelim queniano|xelins quenianos|ksh|kes'),
('Som quirguiz', 'som quirguiz|som quirguizes|soms quirguizes|kgs'),
('Tyiyn', 'tyiyn|tyiyns'),
('Dólar de Kiribati', 'dólar de kiribati|dolar de kiribati|dólares de kiribati|dolares de kiribati'),
('Dinar kuwaitiano', 'dinar kuwaitiano|dinar cuaitiano|dinares kuwaitiano|dinares cuaitianos|kwd'),
('Quipe laosiano', 'quipe|quipes|kipe|kipes|kip|kip laosiano|kip laociano|kips laosianos|kips laocianos|lak'),
('Att laosiano', 'at|att|att laosiano|att laosianos'),
('Loti do Lesoto', 'loti|lóti|maloti|lotis|lótis|lsl'),
('Sente', 'sente|lisente'),
('Libra libanesa', 'libra libanesa|libras libanesas|lbp'),
('Dólar liberiano', 'dólar liberiano|dolar liberiano|dólares liberianos|dolares liberianos|l$|lrd'),
('Dinar libio', 'dinar libio|dinar líbio|dinares libios|dinares líbios|ld|lyd'),
('Dirham libio', 'dirham libio|dirhams libios|dirham líbio|dirhams líbios'),
('Litas lituana', 'litas lituana|litai lituanas|ltl'),
('Pataca macaense', 'pataca macaense|patacas macaenses|mop$|mop'),
('Avo macaense', 'avo macaense|avos macaenses'),
('Ho macaense', 'ho macaense|ho macaenses'),
('Dinar macedônio', 'denar macedonio|denare macedonios|denar macedônio|denar macedónio|denare macedônio|denare macedónio|dinar macedonio|dinar macedônio|dinar macedónio|dinares macedonios|dinares macedônios|dinares macedónios|den|mkd'),
('Deni macedônio', 'deni macedonio|deni macedônio|deni macedónio|denis macedonios|denis macedônios|denis macedónios'),
('Ariary malgaxe', 'ariai malgaxe|ariary malgaxe|ariary malgaxes|ariaris|mga'),
('Iraimbilanja', 'iraimbilanja|iraimbilanjas'),
('Ringuite malaio', 'ringgit malaio|ringgit malaios|ringgits malaios|ringuite malaio|ringuites malaios|rm|myr'),
('Sen malaio', 'sen malaio|sen malaios|centavo malaio|centavos malaios|cêntimo malaio|cêntimos malaios'),
('Kwacha do Malawi', 'kwacha|cuacha|quacha|mk|mwk'),
('Tambala', 'tambala|tambalas|tambala malawi'),
('Rupia maldiva', 'rupia maldiva|rupias maldivas|rupia das maldivas| rupias das maldivas|mvr'),
('Dirame marroquino', 'dirame marroquino|dirham marroquinho|dirhams marroquinos|dirames marroquinos|mad'),
('Rupia maurícia', 'rupia maurícia|rupia de Maurício|rupia mauricia|rupia de mauricio|rupias de mauricio|rupias de maurício|rupias mauricias|rupias maurícias|mur'),
('Uguia', 'uguia|uguias|oguia|ouguiya|oguias|mro'),
('Kume', 'kumes|kume|khoums'),
('Peso mexicano', 'peso mexicano|pesos mexicanos|mxn'),
('Centavo mexicano', 'centavo mexicano|centavos mexicanos'),
('Leu moldávio', 'leu moldavo|lei moldavos|leu moldávio|leu moldavio|lei moldávios|lei moldavios|leus moldavos|leus moldavios|leus moldávios|mdl'),
('Ban moldávio', 'ban moldavo|bani moldavos'),
('Tugrik mongol', 'tugrik mongol|tugrik|tugriks mongóis|tugriks mongois|tug|mnt'),
('Metical moçambicao', 'metical|metical moçambicano|metical mocambicano|meticais|meticais moçambicanos|meticais mocambicanos|mtn|mzn'),
('Dólar namibiano', 'dólar namibiano|dólares namibianos|dolar namibio|dolares namibios|n$|nad'),
('Centavo namibiano', 'centavo namibiano|centavos namibianos|centavo namibio|centavos namibianos'),
('Rupia nepalesa', 'rupia nepalesa|rupias nepalesas|npr'),
('Paisa nepalesa', 'paisa nepalesa|paisas nepalesas'),
('Córdova nicaraguense', 'córdova nicaraguense|cordova nicaraguense|cordova nicaraguana|córdoba nicaragüense|córdobas nicaragüenses|cordobas nicaraguenses|córdovas nicaraguenses|cordovas nicaraguenses|córdovas nicaraguanasc$|nio'),
('Centavo nicaraguense', 'centavo nicaragüense|centavos nicaraguenses|centavo nicaraguano|centavos nicaraguenses|centavo nicaraguano|centavos nicaraguanos'),
('Naira', 'naira|ngn'),
('Kobo', 'kobo'),
('Coroa norueguesa', 'coroa norueguesa|coroas norueguesas|nok'),
('Franco CFP', 'franco cfp|francos cfp|xpf'),
('Dólar neozelandês', 'dólar neozelandês|dolar neozelandes|dólares neozelandeses|dolares neozelandeses|dólar da nova zelândia|dolar da nova zelandia|dólares da nova zelândia|dolares da nova zelandia|nz$|nzd'),
('Centavo neozelandês', 'centavo neozelandês|centavo neozelandes|centavo da nova zelandia|centavo da nova zelândia|centavos da nova zelandia|centavos neozelandeses|centavos da nova zelândia'),
('Rial omanense', 'rial omani|riais omanis|rial omanense|riais omanenses|omr'),
('Baisa omanense', 'baisa omani|baisas omanis|baisa omanense|baisas omanenses'),
('Florim holandês', 'florim holandês|florim holandes|florins holandeses|nlg'),
('Rupia paquistanesa', 'rupia paquistanesa|rupias paquistanesas|pkr'),
('Paisa paquistanesa', 'paisa paquistanesa|paisas paquistanesasas'),
('Balboa panamenho', 'balboa panamenho|balboas panamenhos|balboa|pab|balboa panamense|balboas panamenses'),
('Centavo panamenho', 'centavo panamenho|cêntimo panamenho|centavos panamenhos|cêntimos panamenhos|cêntimo panamense|cêntimos panamenses'),
('Kina', 'kina|kina papuásia|kinas|kinas papuásias|pkg|pgk'),
('Toea', 'toea'),
('Guarani', 'guarani|guaranis|gs|pyg'),
('Novo Sol', 'novo sol peruano|novos sóis peruanos|sol|soles|sóis|nuevo sol|pen|s#.'),
('Centavo de sol', 'cêntimo de sol|cêntimos de sol|centavo de sol|centavos de sol'),
('Złoty', 'złoty|złotys|zloty|zlotys|zloti|zlotis|zlóti|zlótis|zlote|zł|pln'),
('Groszy', 'groszy|grosz'),
('Rial catariano', 'rial qatari|riais qataris|rial catarense|riais catarenses|rial catariano|riais catarianos|qr|qar'),
('Dirame catariano', 'dirame catariano|dirames catarianos|dirame qatari|dirames qataris|dirame catarense|dirames catarenses|dirham qatari|dirhams qataris|dirham catarense|dirhams catarenses|dirham catariano|dirhams catariano'),
('Libra esterlina', 'libra esterlina|libras esterlinas|gbp'),
('Coroa checa', 'coroa checa|coroas checas|kc|czk'),
('Peso dominicano', 'peso dominicano|pesos dominicanos|rd$|dop'),
('Centavo dominicano', 'centavo dominicano|centavos dominicanos'),
('Franco ruandês', 'franco ruandês|franco ruandes|francos ruandeses|rf|rwf'),
('Céntimo ruandês', 'cêntimo ruandês|centimo ruandes|centavo ruandês|centavo ruandes|cêntimos ruandeses|centimos ruandeses|centavos ruandeses'),
('Leu romeno', 'leu romeno|lei romenos|leus romenos|ron'),
('Ban romeno', 'ban romeno|bani romeno|bans romenos'),
('Rublo russo', 'rublo russo|rublos russos|rub|р.'),
('Copeque ruso', 'copeque russo|copeques russos|kopek ruso|kopeks rusos|copeque|copeques|kopek|kopeks'),
('Tala samoano', 'tala|tālā|talas|tala samonano|talas samoanos|ws$|sat|wst'),
('Sene samoano', 'sene'),
('Libra de Santa Helena', 'libra de santa helena|libras de santa helena|shp'),
('Pêni de Santa Helena', 'peni de santa helena|penies de santa helena'),
('Dobra', 'dobra|dobras|db|std'),
('Dinar sérvio', 'dinar sérvio|dinar servio|dinar serbio|dinares sérvios|dinares servios|dinares serbios|rsd'),
('Para sérvio', 'para sérvio|para servio|para serbio|paras sérvios|paras servios|paras serbios'),
('Rupia seichelense', 'rupia de seicheles|rupias de seicheles|rupia seichelense|rupias seichelenses|scr'),
('Centavo seichelense', 'centavo de seicheles|centavos de seicheles|centavo seichelense|centavos seichelenses'),
('Leone serra-leonino', 'leone|leones|leone serra-leonino|leones serra-leoninos|le|sll'),
('Dólar de Cingapura', 'dólar de singapura|dolar de singapura|dórar de cingapura|dolar de cingapura|dólares de singapura|dolares de singapura|dólares de cingapura|dolares de cingapura|sgb'),
('Centavo de Cingapura', 'centavo de singapura|centavos de singapura|centavo de cingapura|centavos de cingapura'),
('Libra síria', 'libra síria|libra siria|libras sírias|libras sirias|s£|syp'),
('Piastra síria', 'piastra siria|piastras sirias|piastra síria|piastras sírias'),
('Xelim somali', 'xelim somali|xelins somalis|xelim somaliano|xelins somalianos|sos'),
('Centavo somali', 'centavo somapli|centavos somalis|centavo somaliano|centavos somalianos'),
('Xelim da Somalilândia', 'xelim da somalilândia|xelins da somalilândia|xelim da somalilandia|xelins da somalilandia'),
('Centavo da Somalilândia', 'centavo da somalilândia|centavos da somalilândia|centavo da somalilandia|centavos da somalilandia'),
('Rupia do Sri Lanka', 'rupia do sri lanka|rupia do sri lanca|rupias do sri lanka|rupias do sri lanca|rupia cingalesa|rupias cingalesas|lkr'),
('Lilangeni', 'lilangeni|lilangenis|emalangeni|szl'),
('Rand sul-africano', 'rand|rand sul-africano|rands|rands sul-africanos|zar'),
('Libra sudanesa', 'libra sudanesa|libras sudanesas|sdg'),
('Piastra sudanesa', 'piastra sudanesa|piastras sudanesas'),
('Libra sul-sudanesa', 'libra sul-sudanesa|libras sul-sudanesas|ssp'),
('Piastra sul-sudanesa', 'piastra sul-sudanesa|piastras sul-sudanesas'),
('Coroa sueca', 'coroa sueca|coroas suecas|sek'),
('Franco suíço', 'franco suíço|franco suico|francos suíços|francos suicos|sfr|chf'),
('Rappen suíço', 'rappen suíço|rappen suico|rappens suíços|rappens suicos'),
('Dólar surinamês', 'dólar surinamês|dolar surinames|dólar do Suriname|dolar do Suriname|dólares surinameses|dolares surinameses|dólares do Suriname|dolares do Suriname|srd'),
('Centavo surinamês', 'centavo surinamês|centavo surinames|centavos surinameses'),
('Baht tailandês', 'baht tailandês|bath tailandes|baht tailandeses|thb'),
('Satang tailandês', 'satang tailandês|satang tailandes|satang tailandeses'),
('Novo dólar taiwanês', 'novo dólar taiwanês|novo dolar taiwanes|dólar taiwanês|dolar taiwanes|dólares taiwaneses|dolares taiwaneses|twd'),
('Centavo taiwanês', 'centavo taiwanês|centavo taiwanes|centavos taiwaneses'),
('Xelim tanzaniano', 'xelim tanzaniano|xelins tanzanianos|tzs'),
('Centavo tanzaniano', 'centavo tanzaniano|centavos tanzanianos'),
('Somoni tajique', 'somoni tajique|somoni|somonis tajiques|somonis|tjs'),
('Diram tajique', 'diram tajique|dirams tajiques|dirames tajiques'),
('Paʻanga', 'paanga|paangas|paʻanga|pa\'anga|top'),
('Seniti', 'seniti'),
('Rublo transdniestriano', 'rublo transdniestriano|rublos transdniestriano'),
('Copeque transdniestriano', 'copeque transdniestriano|copeques transdniestriano'),
('Dólar de Trinidade e Tobago', 'dólar de trinidade e tobago|dólares trinidade e tobago|dolar de trinidade e tobago|dolares trinidade e tobago|dólar de trinidad e tobago|dólares trinidad e tobago|ttd'),
('Centavo de Trinidade e Tobago', 'centavo de trinidade e tobago|centavos de trinidade e tobago|centavo de trinidad e tobago|centavos de trinidad e tobago'),
('Dinar tunisiano', 'dinar tunisiano|dinares tunisianos|dinar tunisino|dinares tunisinos|tnd'),
('Milim tunisiano', 'milim tunisiano|milim tunesianos|millime tunisianos|millimes tunisianos|milim tunisino|milim tunisinos|millime tunisinos|millimes tunisinos'),
('Lira turca', 'lira turca|liras turcas|try'),
('Kuruş turco', 'kuruş turco|kuruş turcos'),
('Manat turcomeno', 'manat turcomeno|manats turcomenos|tmt'),
('Tennesi turcomeno', 'tennesi turcomeno|tennesis turcomenos|tenge turcomenos|tenges turcomenos'),
('Dólar tuvaluano', 'dólar tuvaluano|dolar tuvaluano|dólares tuvaluanos|dolares tuvaluanos'),
('Centavo tuvaluano', 'centavo tuvaluano|centavos tuvaluanos'),
('Grívnia', 'grívnia|grivnia|grívnias|grivnias|grivna|grivnas|uah'),
('Copeque ucraniano', 'kopiyka|copeque ucraniano|copeques ucranianos'),
('Xelim ugandês', 'xelim ugandês|xelim ugandes|xelins ugandeses|ugx'),
('Centavo ugandês', 'centavo ugandês|centavo ugandes|centavos ugandeses'),
('Peso uruguaio', 'peso uruguaio|pesos uruguayis|uyu'),
('Centésimo uruguayo', 'centésimo uruguaio|centesimo uruguaio|centésimos uruguaios|centesimos uruguaios'),
('Som uzbeque', 'som uzbeque|som uzbeques|soms uzbeques|somes uzbeques|som usbeque|som usbeques|soms usbeques|somes usbeques|uzs'),
('Tiyin uzbeque', 'tiyin uzbeque|tiyin uzbeques|tiyins uzbeques|tiyin usbeque|tiyin usbeques|tiyins usbeques'),
('Vatu', 'vatu|vatus|vuv'),
('Bolívar forte venezuelano', 'bolívar forte|bolivar forte|bolívar|bolivar|bolívares|bolivares|vef'),
('Centavo de bolívar', 'cêntimo de bolívar|cêntimos de bolívar|centavo de bolívar|centavo de bolivar|centavos de bolívar|centavos de bolivar'),
('Dongue vietnamita', 'dongue vietnamita|Đồng vietnamita|dong vietnamita|dongues vietnamitas|dongs vietnamitas|vnd'),
('Hào vietnamita', 'hào vietnamita|hao vietnamita|hào vietnamitas|hàos vietnamitas|haos vietnamitas'),
('Rial iemenita', 'rial iemenita|riais iemenitas|yer'),
('Fils iemenita', 'fils iemenita|fils iemenitas'),
('Franco djibutiano', 'franco djibutiano|francos djibutianos|franco jibutiano|francos jibutianos|djf'),
('Dinar iugoslavo', 'dinar iugoslavo|dinares iugoslavos|dinar jugoslavo|dinares jugoslavos|yud'),
('Kwacha zambiano', 'kwacha zambiano|kwacha zambianos|kwachas zambianos|zmw'),
('Ngwee zambiano', 'ngwee zambiano|ngwee zambianos|ngwees zambianos')])
CompoundUnitConnectorRegex = f'(?<spacer>[^.])'
CurrencyPrefixList = dict([('Dólar', '$'),
('Dólar estadunidense', 'us$|u$d|usd$|usd'),
('Dólar do Caribe Oriental', 'ec$|xcd'),
('Dólar australiano', 'a$|aud'),
('Dólar bahamense', 'b$|bsd'),
('Dólar de Barbados', 'bds$|bbd'),
('Dólar de Belizebe', 'bz$|bzd'),
('Dólar bermudense', 'bd$|bmd'),
('Dólar de Brunebi', 'brunéi $|bnd'),
('Dólar de Cingapura', 's$|sgd'),
('Dólar canadense', 'c$|can$|cad'),
('Dólar das Ilhas Cayman', 'ci$|kyd'),
('Dólar neozelandês', 'nz$|nzd'),
('Dólar fijgiano', 'fj$|fjd'),
('Dólar guianense', 'gy$|gyd'),
('Dólar de Hong Kong', 'hk$|hkd'),
('Dólar jamaicano', 'j$|ja$|jmd'),
('Dólar liberiano', 'l$|lrd'),
('Dólar namibiano', 'n$|nad'),
('Dólar das Ilhas Salomão', 'si$|sbd'),
('Novo dólar taiwanês', 'nt$|twd'),
('Real brasileiro', 'r$|brl'),
('Guarani', '₲|gs.|pyg'),
('Dólar de Trinidade e Tobago', 'tt$|ttd'),
('Yuan chinês', '¥|cny|rmb'),
('Yen', '¥|jpy'),
('Euro', '€|eur'),
('Florim', 'ƒ'),
('Libra', '£|gbp'),
('Colón costarriquenho', '₡'),
('Lira turca', '₺')])
AmbiguousCurrencyUnitList = ['le']
InformationSuffixList = dict([('bit', 'bit|bits'),
('kilobit', 'kilobit|kilobits|kb|kbit'),
('megabit', 'megabit|megabits|Mb|Mbit'),
('gigabit', 'gigabit|gigabits|Gb|Gbit'),
('terabit', 'terabit|terabits|Tb|Tbit'),
('petabit', 'petabit|petabits|Pb|Pbit'),
('kibibit', 'kibibit|kibibits|kib|kibit'),
('mebibit', 'mebibit|mebibits|Mib|Mibit'),
('gibibit', 'gibibit|gibibits|Gib|Gibit'),
('tebibit', 'tebibit|tebibits|Tib|Tibit'),
('pebibit', 'pebibit|pebibits|Pib|Pibit'),
('byte', 'byte|bytes'),
('kilobyte', 'kilobyte|kilobytes|kB|kByte'),
('megabyte', 'megabyte|megabytes|MB|MByte'),
('gigabyte', 'gigabyte|gigabytes|GB|GByte'),
('terabyte', 'terabyte|terabytes|TB|TByte'),
('petabyte', 'petabyte|petabytes|PB|PByte'),
('kibibyte', 'kibibyte|kibibytes|kiB|kiByte'),
('mebibyte', 'mebibyte|mebibytes|MiB|MiByte'),
('gibibyte', 'gibibyte|gibibytes|GiB|GiByte'),
('tebibyte', 'tebibyte|tebibytes|TiB|TiByte'),
('pebibyte', 'pebibyte|pebibytes|PiB|PiByte')])
AmbiguousDimensionUnitList = ['ton', 'tonelada', 'área', 'area', 'áreas', 'areas', 'milha', 'milhas']
BuildPrefix = f'(?<=(\\s|^|\\P{{L}}))'
BuildSuffix = f'(?=(\\s|\\P{{L}}|$))'
ConnectorToken = 'de'
LengthSuffixList = dict([('Quilômetro', 'km|quilometro|quilômetro|quilómetro|quilometros|quilômetros|quilómetros'),
('Hectômetro', 'hm|hectometro|hectômetro|hectómetro|hectometros|hectômetros|hectómetros'),
('Decâmetro', 'decametro|decâmetro|decámetro|decametros|decâmetro|decámetros|dam'),
('Metro', 'm|m.|metro|metros'),
('Decímetro', 'dm|decimetro|decímetro|decimetros|decímetros'),
('Centímetro', 'cm|centimetro|centímetro|centimetros|centimetros'),
('Milímetro', 'mm|milimetro|milímetro|milimetros|milímetros'),
('Micrômetro', 'µm|um|micrometro|micrômetro|micrómetro|micrometros|micrômetros|micrómetros|micron|mícron|microns|mícrons|micra'),
('Nanômetro', 'nm|nanometro|nanômetro|nanómetro|nanometros|nanômetros|nanómetros|milimicron|milimícron|milimicrons|milimícrons'),
('Picômetro', 'pm|picometro|picômetro|picómetro|picometros|picômetros|picómetros'),
('Milha', 'mi|milha|milhas'),
('Jarda', 'yd|jarda|jardas'),
('Polegada', 'polegada|polegadas|\"'),
('Pé', 'pé|pe|pés|pes|ft'),
('Ano luz', 'ano luz|anos luz|al')])
AmbiguousLengthUnitList = ['mi', 'milha', 'milhas']
SpeedSuffixList = dict([('Metro por segundo', 'metro/segundo|m/s|metro por segundo|metros por segundo|metros por segundos'),
('Quilômetro por hora', 'km/h|quilômetro por hora|quilómetro por hora|quilometro por hora|quilômetros por hora|quilómetros por hora|quilometros por hora|quilômetro/hora|quilómetro/hora|quilometro/hora|quilômetros/hora|quilómetros/hora|quilometros/hora'),
('Quilômetro por minuto', 'km/min|quilômetro por minuto|quilómetro por minuto|quilometro por minuto|quilômetros por minuto|quilómetros por minuto|quilometros por minuto|quilômetro/minuto|quilómetro/minuto|quilometro/minuto|quilômetros/minuto|quilómetros/minuto|quilometros/minuto'),
('Quilômetro por segundo', 'km/seg|quilômetro por segundo|quilómetro por segundo|quilometro por segundo|quilômetros por segundo|quilómetros por segundo|quilometros por segundo|quilômetro/segundo|quilómetro/segundo|quilometro/segundo|quilômetros/segundo|quilómetros/segundo|quilometros/segundo'),
('Milha por hora', 'mph|milha por hora|mi/h|milha/hora|milhas/hora|milhas por hora'),
('Nó', 'kt|nó|nós|kn'),
('Pé por segundo', 'ft/s|pé/s|pe/s|ft/seg|pé/seg|pe/seg|pé por segundo|pe por segundo|pés por segundo|pes por segundo'),
('Pé por minuto', 'ft/min|pé/mind|pe/min|pé por minuto|pe por minuto|pés por minuto|pes por minuto'),
('Jarda por minuto', 'jardas por minuto|jardas/minuto|jardas/min'),
('Jarda por segundo', 'jardas por segundo|jardas/segundo|jardas/seg')])
AmbiguousSpeedUnitList = ['nó', 'no', 'nós', 'nos']
TemperatureSuffixList = dict([('Kelvin', 'k|K|kelvin'),
('Grau Rankine', 'r|°r|°ra|grau rankine|graus rankine| rankine'),
('Grau Celsius', '°c|grau c|grau celsius|graus c|graus celsius|celsius|grau centígrado|grau centrigrado|graus centígrados|graus centigrados|centígrado|centígrados|centigrado|centigrados'),
('Grau Fahrenheit', '°f|grau f|graus f|grau fahrenheit|graus fahrenheit|fahrenheit'),
('Grau', '°|graus|grau')])
VolumeSuffixList = dict([('Quilômetro cúbico', 'quilômetro cúbico|quilómetro cúbico|quilometro cubico|quilômetros cúbicos|quilómetros cúbicos|quilometros cubicos|km3|km^3|km³'),
('Hectômetro cúbico', 'hectômetro cúbico|hectómetro cúbico|hectometro cubico|hectômetros cúbicos|hectómetros cúbicos|hectometros cubicos|hm3|hm^3|hm³'),
('Decâmetro cúbico', 'decâmetro cúbico|decámetro cúbico|decametro cubico|decâmetros cúbicos|decámetros cúbicos|decametros cubicosdam3|dam^3|dam³'),
('Metro cúbico', 'metro cúbico|metro cubico|metros cúbicos|metros cubicos|m3|m^3|m³'),
('Decímetro cúbico', 'decímetro cúbico|decimetro cubico|decímetros cúbicos|decimetros cubicos|dm3|dm^3|dm³'),
('Centímetro cúbico', 'centímetro cúbico|centimetro cubico|centímetros cúbicos|centrimetros cubicos|cc|cm3|cm^3|cm³'),
('Milímetro cúbico', 'milímetro cúbico|milimetro cubico|milímetros cúbicos|milimetros cubicos|mm3|mm^3|mm³'),
('Polegada cúbica', 'polegada cúbica|polegada cubica|polegadas cúbicas|polegadas cubicas'),
('Pé cúbico', 'pé cúbico|pe cubico|pés cúbicos|pes cubicos|pé3|pe3|pé^3|pe^3|pé³|pe³|ft3|ft^3|ft³'),
('Jarda cúbica', 'jarda cúbica|jarda cubica|jardas cúbicas|jardas cubicas|yd3|yd^3|yd³'),
('Hectolitro', 'hectolitro|hectolitros|hl'),
('Litro', 'litro|litros|lts|l'),
('Mililitro', 'mililitro|mililitros|ml'),
('Galão', 'galão|galões|galao|galoes'),
('Pint', 'pinta|pintas|pinto|pintos|quartilho|quartilhos|pint|pints'),
('Barril', 'barril|barris|bbl'),
('Onça líquida', 'onça líquida|onca liquida|onças líquidas|oncas liquidas')])
WeightSuffixList = dict([('Tonelada métrica', 'tonelada métrica|tonelada metrica|toneladas métricas|toneladas metricas'),
('Tonelada', 'ton|tonelada|toneladas'),
('Quilograma', 'kg|quilograma|quilogramas|quilo|quilos|kilo|kilos'),
('Hectograma', 'hg|hectograma|hectogramas'),
('Decagrama', 'dag|decagrama|decagramas'),
('Grama', 'g|gr|grama|gramas'),
('Decigrama', 'dg|decigrama|decigramas'),
('Centigrama', 'cg|centigrama|centigramas'),
('Miligrama', 'mg|miligrama|miligramas'),
('Micrograma', 'µg|ug|micrograma|microgramas'),
('Nanograma', 'ng|nanograma|nanogramas'),
('Picograma', 'pg|picograma|picogramas'),
('Libra', 'lb|libra|libras'),
('Onça', 'oz|onça|onca|onças|oncas'),
('Grão', 'grão|grao|grãos|graos|gr'),
('Quilate', 'ct|kt|quilate|quilates')])
# pylint: enable=line-too-long
| 107.454148
| 335
| 0.559739
|
44292370c125f5d4f1916e1028aa8750ee10c13c
| 1,734
|
py
|
Python
|
apps/users/views.py
|
fylein/fyle-intacct-api
|
16e45538ec3a2b7af396742a42302704c33a7bd7
|
[
"MIT"
] | null | null | null |
apps/users/views.py
|
fylein/fyle-intacct-api
|
16e45538ec3a2b7af396742a42302704c33a7bd7
|
[
"MIT"
] | 3
|
2020-07-20T10:54:15.000Z
|
2022-02-23T17:13:49.000Z
|
apps/users/views.py
|
fylein/fyle-intacct-api
|
16e45538ec3a2b7af396742a42302704c33a7bd7
|
[
"MIT"
] | 2
|
2020-07-25T14:50:56.000Z
|
2020-08-02T13:48:37.000Z
|
from rest_framework import generics, status
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from fyle_rest_auth.models import AuthToken
from apps.fyle.connector import FyleConnector
from apps.workspaces.models import FyleCredential, Workspace
class UserProfileView(generics.RetrieveAPIView):
permission_classes = [IsAuthenticated]
def get(self, request, *args, **kwargs):
"""
Get User Details
"""
fyle_credentials = AuthToken.objects.get(user__user_id=request.user)
fyle_connector = FyleConnector(fyle_credentials.refresh_token)
employee_profile = fyle_connector.get_employee_profile()
return Response(
data=employee_profile,
status=status.HTTP_200_OK
)
class FyleOrgsView(generics.ListCreateAPIView):
"""
FyleOrgs view
"""
permission_classes = [IsAuthenticated]
def get(self, request, *args, **kwargs):
"""
Get cluster domain from Fyle
"""
try:
fyle_credentials = AuthToken.objects.get(user__user_id=request.user)
fyle_connector = FyleConnector(fyle_credentials.refresh_token)
cluser_domain = fyle_connector.get_cluster_domain()['cluster_domain']
fyle_orgs = fyle_connector.get_fyle_orgs(cluser_domain=cluser_domain)
return Response(
data=fyle_orgs,
status=status.HTTP_200_OK
)
except FyleCredential.DoesNotExist:
return Response(
data={
'message': 'Invalid / Expired Token'
},
status=status.HTTP_400_BAD_REQUEST
)
| 29.389831
| 81
| 0.653979
|
73c34b1503e6b7863ac80be15afae10936a2c5ce
| 5,576
|
py
|
Python
|
particleFilter.py
|
andrewwarrington/cost-optimal-particle-filter
|
a6acb60ca90c7f7b984182891d39adeb7e05724f
|
[
"MIT"
] | 2
|
2018-06-20T21:17:47.000Z
|
2018-09-07T01:27:26.000Z
|
particleFilter.py
|
andrewwarrington/cost-optimal-particle-filter
|
a6acb60ca90c7f7b984182891d39adeb7e05724f
|
[
"MIT"
] | null | null | null |
particleFilter.py
|
andrewwarrington/cost-optimal-particle-filter
|
a6acb60ca90c7f7b984182891d39adeb7e05724f
|
[
"MIT"
] | null | null | null |
# MIT License
#
# Copyright (c) 2018, Andrew Warrington.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
particleFilter.py
AW
TL;DR -- This module contains the necessary functions for adding particle-filter
like behaviours to a generic state space model.
This script contains the basic functionality for performing (sequential) importance
sampling. The core function is the `iterate' function. This function takes a
vector of particles, an observation, and the standard deviation of this observation
(under the observation model) and resamples the particles according to their
likelihood. This function, in conjunction with a plant model provided outside of
this script, allows you to write a particle filter.
The key `flaw' in this script is that it assumes the observation is zero mean error
about the true state. If the observation function is more complex, then this will
need to be updated. This assumption was made to make the code easier to use.
The permutation matrix must also be provided, that maps states onto observations.
"""
# Import modules.
import numpy as np
import matplotlib.pyplot as plt
import time
import scipy.stats as scis
def iterate(_particles, _observation, _observation_sd):
'''
particleFilter.iterate:
Function takes in the current particles as an NxM length vector (where N is
the number of particles and M is the dimensionality) of the state and a
single observation with dimensionality Hx1 (where H is the dimensionality
of the observation.
Assumes the observations are normally distributed about the true value.
:param _particles: NxM length vector of particles
:param _observation: single observation.
:param _observation_sd: positive float containing the standard deviation of the observation.
:param _new_particle_count: Default: None, how many particles to resample.
:return: Dictionary:
{
'log_weights': the log weight of each of the input
N particles.
'resampled_particles': the vector of newParticleCount
resampled particle indicies.
}
'''
# Retrieve the number of particles, dimensionality of state and dimensionality
# of the observation.
[N, _] = np.shape(_particles)
# Calculate the log probability of each particle under a Gaussian observation
# model.
_log_weights = norm_log_pdf(_particles, _observation, _observation_sd)
# Make the weights zero-mean to improve the numerical stability.
zeroed_log_weights = _log_weights - np.nanmax(_log_weights)
zeroed_weights = np.exp(zeroed_log_weights)
zeroed_weights_sum = np.nansum(zeroed_weights)
zeroed_weights_normalized = zeroed_weights / zeroed_weights_sum
# If we are resampling the same number of particles, we can use TuanAnhLes
# fast systematic resampling code.
uniforms = np.random.rand() / N + np.arange(N) / float(N)
resampled_indexes = np.digitize(uniforms, bins=np.nancumsum(zeroed_weights_normalized))
return {'log_weights': _log_weights, 'resampled_indices': resampled_indexes}
def norm_log_pdf(x, loc=0, sd=1):
'''
particleFilter.normpdf:
Calculate the probability density for a set of particles, given the
normal distribution.
:param x: Input particles.
:param loc: Mean of normal distribution.
:param sd: Standard deviation of normal distribution.
:return: Vector of log-probabilities.
'''
ll = np.sum(scis.norm(loc, sd).logpdf(x), axis=1)
return ll
if __name__ == "__main__":
'''
Define a main function for demonstrative purposes.
'''
print('Particle filter demonstration.')
start = time.time()
steps = 100
observations = np.zeros((steps, 1))
states = np.zeros((steps, 2))
states[0, 0] = np.random.normal(0, 1)
states[0, 1] = np.random.normal(1, 0.1)
for i in range(steps):
if i > 1:
velocity = np.random.normal(states[0, 1], 0.1)
states[i, 0] = states[i-1, 0] + velocity
observations[i] = np.random.normal(states[i, 0], 0.5)
particles = np.random.rand(500, 2)
state_estimate = np.zeros((steps, 2))
for i in range(0, steps):
# Iterate the plant model.
velocities = np.random.normal(particles[:, 1], 0.1)
particles[:, 1] = velocities
particles[:, 0] = particles[:, 0] + velocities
p = 0
# Do the re-sampling step.
it = iterate(np.expand_dims(particles[:, 0], axis=1), observations[i], 0.5)
particles = particles[it['resampled_indices'], :]
log_weights = it['log_weights']
state_estimate[i, :] = np.mean(particles, 0)
end = time.time()
print(end - start)
# Plot some stuff.
plt.plot(state_estimate[:, 0])
plt.plot(observations)
plt.pause(0.001)
print('test complete.')
| 36.207792
| 93
| 0.747669
|
cba299d53e4a87dcb2ad335a5ea6bffb40bf29fc
| 11,203
|
py
|
Python
|
train_binary_imagenetlt_stage1.py
|
caisarl76/MiSLAS
|
64717e7085d211751ef27d2f859a7f99e7f93fd9
|
[
"MIT"
] | null | null | null |
train_binary_imagenetlt_stage1.py
|
caisarl76/MiSLAS
|
64717e7085d211751ef27d2f859a7f99e7f93fd9
|
[
"MIT"
] | null | null | null |
train_binary_imagenetlt_stage1.py
|
caisarl76/MiSLAS
|
64717e7085d211751ef27d2f859a7f99e7f93fd9
|
[
"MIT"
] | null | null | null |
import argparse
import os
import random
import shutil
import time
import warnings
import numpy as np
import pprint
import math
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.optim
import torch.utils.data
import torch.utils.data.distributed
import torch.nn.functional as F
from torch.utils.tensorboard import SummaryWriter
from datasets.imagenet import ImageNet_LT
from utils import config, update_config, create_logger
from utils import AverageMeter, ProgressMeter
from utils import accuracy, calibration
from methods import mixup_data, mixup_criterion
from binary.reactnet_imagenet import reactnet
def parse_args():
parser = argparse.ArgumentParser(description='MiSLAS training (Stage-1)')
parser.add_argument('--cfg',
help='experiment configure file name',
required=True,
type=str)
parser.add_argument('opts',
help="Modify config options using the command-line",
default=None,
nargs=argparse.REMAINDER)
args = parser.parse_args()
update_config(config, args)
return args
best_acc1 = 0
its_ece = 100
def main():
args = parse_args()
logger, model_dir, writer = create_logger(config, args.cfg)
logger.info('\n' + pprint.pformat(args))
logger.info('\n' + str(config))
if config.deterministic:
seed = 0
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
random.seed(seed)
np.random.seed(seed)
os.environ['PYTHONHASHSEED'] = str(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
if config.gpu is not None:
warnings.warn('You have chosen a specific GPU. This will completely '
'disable data parallelism.')
ngpus_per_node = torch.cuda.device_count()
# Simply call main_worker function
global best_acc1, its_ece
# start_time = time.strftime("%Y%m%d_%H%M%S", time.localtime())
if config.gpu is not None:
logger.info("Use GPU: {} for training".format(config.gpu))
model = reactnet()
if not torch.cuda.is_available():
logger.info('using CPU, this will be slow')
raise NotImplementedError("Only DistributedDataParallel is supported.")
elif torch.cuda.device_count() > 1:
print('use %d gpus' %(torch.cuda.device_count()))
model = torch.nn.DataParallel(model).cuda()
torch.backends.cudnn.benchmark = True
print(' Total params: %.2fM' % (sum(p.numel() for p in model.parameters()) / 1000000.0))
# optionally resume from a checkpoint
cur_epoch = 0
if config.resume:
if os.path.isfile(config.resume):
logger.info("=> loading checkpoint '{}'".format(config.resume))
if config.gpu is None:
checkpoint = torch.load(config.resume)
else:
# Map model to be loaded to specified single gpu.
loc = 'cuda:{}'.format(config.gpu)
checkpoint = torch.load(config.resume, map_location=loc)
config.start_epoch = checkpoint['epoch']
best_acc1 = checkpoint['best_acc1']
if config.gpu is not None:
# best_acc1 may be from a checkpoint from a different GPU
best_acc1 = best_acc1.to(config.gpu)
model.load_state_dict(checkpoint['state_dict_model'])
logger.info("=> loaded checkpoint '{}' (epoch {})"
.format(config.resume, checkpoint['epoch']))
else:
logger.info("=> no checkpoint found at '{}'".format(config.resume))
dataset = ImageNet_LT(config.distributed, root=config.data_path,
batch_size=config.batch_size, num_works=config.workers)
train_loader = dataset.train_instance
val_loader = dataset.eval
# define loss function (criterion) and optimizer
criterion = nn.CrossEntropyLoss().cuda()
optimizer = torch.optim.Adam(model.parameters(), lr=config.lr)
scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=50, eta_min=0)
for epoch in range(config.num_epochs):
# adjust_learning_rate(optimizer, scheduler, epoch, config)
# train for one epoch
train(train_loader, model, criterion, optimizer, scheduler, epoch, config, logger, writer)
# evaluate on validation set
acc1, loss, ece = validate(val_loader, model, criterion, config, logger)
# remember best acc@1 and save checkpoint
is_best = acc1 > best_acc1
best_acc1 = max(acc1, best_acc1)
if is_best:
its_ece = ece
logger.info('Best Prec@1: %.3f%% ECE: %.3f%%\n' % (best_acc1, its_ece))
writer.add_scalar('val loss', loss, epoch)
writer.add_scalar('val ece', ece, epoch)
writer.add_scalar('val acc', acc1, epoch)
if epoch % 10 == 0:
save_checkpoint({
'epoch': epoch + 1,
'state_dict_model': model.state_dict(),
'best_acc1': best_acc1,
'its_ece': its_ece,
}, is_best, model_dir)
writer.close()
def train(train_loader, model, criterion, optimizer, scheduler, epoch, config, logger, writer=None):
batch_time = AverageMeter('Time', ':6.3f')
data_time = AverageMeter('Data', ':6.3f')
losses = AverageMeter('Loss', ':.3f')
top1 = AverageMeter('Acc@1', ':6.3f')
top5 = AverageMeter('Acc@5', ':6.3f')
progress = ProgressMeter(
len(train_loader),
[batch_time, losses, top1, top5],
prefix="Epoch: [{}]".format(epoch))
model.train()
training_data_num = len(train_loader.dataset)
end_steps = int(training_data_num / train_loader.batch_size)
end = time.time()
for i, (images, target) in enumerate(train_loader):
if i > end_steps:
break
# measure data loading time
data_time.update(time.time() - end)
if torch.cuda.is_available():
images = images.cuda()
target = target.cuda()
if config.mixup is True:
images, targets_a, targets_b, lam = mixup_data(images, target, alpha=config.alpha)
output = model(images)
loss = mixup_criterion(criterion, output, targets_a, targets_b, lam)
else:
output = model(images)
loss = criterion(output, target)
acc1, acc5 = accuracy(output, target, topk=(1, 5))
losses.update(loss.item(), images.size(0))
top1.update(acc1[0], images.size(0))
top5.update(acc5[0], images.size(0))
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
scheduler.step()
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % config.print_freq == 0:
progress.display(i, logger)
if writer:
writer.add_scalar('train loss', losses.avg, epoch)
writer.add_scalar('train acc', top1.avg, epoch)
def validate(val_loader, model, criterion, config, logger):
batch_time = AverageMeter('Time', ':6.3f')
losses = AverageMeter('Loss', ':.3f')
top1 = AverageMeter('Acc@1', ':6.3f')
top5 = AverageMeter('Acc@5', ':6.3f')
progress = ProgressMeter(
len(val_loader),
[batch_time, losses, top1, top5],
prefix='Eval: ')
# switch to evaluate mode
model.eval()
class_num = torch.zeros(config.num_classes).cuda()
correct = torch.zeros(config.num_classes).cuda()
confidence = np.array([])
pred_class = np.array([])
true_class = np.array([])
with torch.no_grad():
end = time.time()
for i, (images, target) in enumerate(val_loader):
if torch.cuda.is_available():
images = images.cuda()
target = target.cuda()
output = model(images)
loss = criterion(output, target)
# measure accuracy and record loss
acc1, acc5 = accuracy(output, target, topk=(1, 5))
losses.update(loss.item(), images.size(0))
top1.update(acc1[0], images.size(0))
top5.update(acc5[0], images.size(0))
_, predicted = output.max(1)
target_one_hot = F.one_hot(target, config.num_classes)
predict_one_hot = F.one_hot(predicted, config.num_classes)
class_num = class_num + target_one_hot.sum(dim=0).to(torch.float)
correct = correct + (target_one_hot + predict_one_hot == 2).sum(dim=0).to(torch.float)
prob = torch.softmax(output, dim=1)
confidence_part, pred_class_part = torch.max(prob, dim=1)
confidence = np.append(confidence, confidence_part.cpu().numpy())
pred_class = np.append(pred_class, pred_class_part.cpu().numpy())
true_class = np.append(true_class, target.cpu().numpy())
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % config.print_freq == 0:
progress.display(i, logger)
acc_classes = correct / class_num
head_acc = acc_classes[config.head_class_idx[0]:config.head_class_idx[1]].mean() * 100
med_acc = acc_classes[config.med_class_idx[0]:config.med_class_idx[1]].mean() * 100
tail_acc = acc_classes[config.tail_class_idx[0]:config.tail_class_idx[1]].mean() * 100
logger.info(
'* Acc@1 {top1.avg:.3f}% Acc@5 {top5.avg:.3f}% HAcc {head_acc:.3f}% MAcc {med_acc:.3f}% TAcc {tail_acc:.3f}%.'.format(
top1=top1, top5=top5, head_acc=head_acc, med_acc=med_acc, tail_acc=tail_acc))
cal = calibration(true_class, pred_class, confidence, num_bins=15)
logger.info('* ECE {ece:.3f}%.'.format(ece=cal['expected_calibration_error'] * 100))
return top1.avg, losses.avg, cal['expected_calibration_error'] * 100
def save_checkpoint(state, is_best, model_dir):
filename = model_dir + '/current.pth.tar'
torch.save(state, filename)
if is_best:
shutil.copyfile(filename, model_dir + '/model_best.pth.tar')
def adjust_learning_rate(optimizer, epoch, config):
"""Sets the learning rate"""
if config.cos:
lr_min = 0
lr_max = config.lr
lr = lr_min + 0.5 * (lr_max - lr_min) * (1 + math.cos(epoch / config.num_epochs * 3.1415926535))
else:
epoch = epoch + 1
if epoch <= 5:
lr = config.lr * epoch / 5
elif epoch > 180:
lr = config.lr * 0.01
elif epoch > 160:
lr = config.lr * 0.1
else:
lr = config.lr
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def create_writer(cfg, model_dir):
# time_str = time.strftime('%Y%m%d%H%M')
log_dir = model_dir.replace('ckps', 'logs')
print('=> creating {}'.format(log_dir))
os.makedirs(log_dir, exist_ok=True)
writer = SummaryWriter(log_dir)
return writer
if __name__ == '__main__':
main()
| 34.791925
| 130
| 0.615639
|
f6b1ce7a2f3bea6e53b6d6e845eae637cb95f39b
| 40,035
|
py
|
Python
|
mongoengine/base/document.py
|
decklord/mongoengine
|
cf75dac1c612ca21e32f403348b77c1da7766703
|
[
"MIT"
] | null | null | null |
mongoengine/base/document.py
|
decklord/mongoengine
|
cf75dac1c612ca21e32f403348b77c1da7766703
|
[
"MIT"
] | null | null | null |
mongoengine/base/document.py
|
decklord/mongoengine
|
cf75dac1c612ca21e32f403348b77c1da7766703
|
[
"MIT"
] | null | null | null |
import copy
import operator
import numbers
from collections import Hashable
from functools import partial
import pymongo
from bson import json_util, ObjectId
from bson.dbref import DBRef
from bson.son import SON
from mongoengine import signals
from mongoengine.common import _import_class
from mongoengine.errors import (ValidationError, InvalidDocumentError,
LookUpError, FieldDoesNotExist)
from mongoengine.python_support import PY3, txt_type
from mongoengine.base.common import get_document, ALLOW_INHERITANCE
from mongoengine.base.datastructures import (
BaseDict,
BaseList,
EmbeddedDocumentList,
StrictDict,
SemiStrictDict
)
from mongoengine.base.fields import ComplexBaseField
__all__ = ('BaseDocument', 'NON_FIELD_ERRORS')
NON_FIELD_ERRORS = '__all__'
class BaseDocument(object):
__slots__ = ('_changed_fields', '_initialised', '_created', '_data',
'_dynamic_fields', '_auto_id_field', '_db_field_map', '__weakref__')
_dynamic = False
_dynamic_lock = True
STRICT = False
def __init__(self, *args, **values):
"""
Initialise a document or embedded document
:param __auto_convert: Try and will cast python objects to Object types
:param values: A dictionary of values for the document
"""
self._initialised = False
self._created = True
if args:
# Combine positional arguments with named arguments.
# We only want named arguments.
field = iter(self._fields_ordered)
# If its an automatic id field then skip to the first defined field
if self._auto_id_field:
next(field)
for value in args:
name = next(field)
if name in values:
raise TypeError(
"Multiple values for keyword argument '" + name + "'")
values[name] = value
__auto_convert = values.pop("__auto_convert", True)
# 399: set default values only to fields loaded from DB
__only_fields = set(values.pop("__only_fields", values))
_created = values.pop("_created", True)
signals.pre_init.send(self.__class__, document=self, values=values)
# Check if there are undefined fields supplied to the constructor,
# if so raise an Exception.
if not self._dynamic and (self._meta.get('strict', True) or _created):
for var in values.keys():
if var not in self._fields.keys() + ['id', 'pk', '_cls', '_text_score']:
msg = (
"The field '{0}' does not exist on the document '{1}'"
).format(var, self._class_name)
raise FieldDoesNotExist(msg)
if self.STRICT and not self._dynamic:
self._data = StrictDict.create(allowed_keys=self._fields_ordered)()
else:
self._data = SemiStrictDict.create(
allowed_keys=self._fields_ordered)()
self._dynamic_fields = SON()
# Assign default values to instance
for key, field in self._fields.iteritems():
if self._db_field_map.get(key, key) in __only_fields:
continue
value = getattr(self, key, None)
setattr(self, key, value)
if "_cls" not in values:
self._cls = self._class_name
# Set passed values after initialisation
if self._dynamic:
dynamic_data = {}
for key, value in values.iteritems():
if key in self._fields or key == '_id':
setattr(self, key, value)
elif self._dynamic:
dynamic_data[key] = value
else:
FileField = _import_class('FileField')
for key, value in values.iteritems():
if key == '__auto_convert':
continue
key = self._reverse_db_field_map.get(key, key)
if key in self._fields or key in ('id', 'pk', '_cls'):
if __auto_convert and value is not None:
field = self._fields.get(key)
if field and not isinstance(field, FileField):
value = field.to_python(value)
setattr(self, key, value)
else:
self._data[key] = value
# Set any get_fieldname_display methods
self.__set_field_display()
if self._dynamic:
self._dynamic_lock = False
for key, value in dynamic_data.iteritems():
setattr(self, key, value)
# Flag initialised
self._initialised = True
self._created = _created
signals.post_init.send(self.__class__, document=self)
def __delattr__(self, *args, **kwargs):
"""Handle deletions of fields"""
field_name = args[0]
if field_name in self._fields:
default = self._fields[field_name].default
if callable(default):
default = default()
setattr(self, field_name, default)
else:
super(BaseDocument, self).__delattr__(*args, **kwargs)
def __setattr__(self, name, value):
# Handle dynamic data only if an initialised dynamic document
if self._dynamic and not self._dynamic_lock:
if not hasattr(self, name) and not name.startswith('_'):
DynamicField = _import_class("DynamicField")
field = DynamicField(db_field=name)
field.name = name
self._dynamic_fields[name] = field
self._fields_ordered += (name,)
if not name.startswith('_'):
value = self.__expand_dynamic_values(name, value)
# Handle marking data as changed
if name in self._dynamic_fields:
self._data[name] = value
if hasattr(self, '_changed_fields'):
self._mark_as_changed(name)
try:
self__created = self._created
except AttributeError:
self__created = True
if (self._is_document and not self__created and
name in self._meta.get('shard_key', tuple()) and
self._data.get(name) != value):
OperationError = _import_class('OperationError')
msg = "Shard Keys are immutable. Tried to update %s" % name
raise OperationError(msg)
try:
self__initialised = self._initialised
except AttributeError:
self__initialised = False
# Check if the user has created a new instance of a class
if (self._is_document and self__initialised and
self__created and name == self._meta.get('id_field')):
super(BaseDocument, self).__setattr__('_created', False)
super(BaseDocument, self).__setattr__(name, value)
def __getstate__(self):
data = {}
for k in ('_changed_fields', '_initialised', '_created',
'_dynamic_fields', '_fields_ordered'):
if hasattr(self, k):
data[k] = getattr(self, k)
data['_data'] = self.to_mongo()
return data
def __setstate__(self, data):
if isinstance(data["_data"], SON):
data["_data"] = self.__class__._from_son(data["_data"])._data
for k in ('_changed_fields', '_initialised', '_created', '_data',
'_dynamic_fields'):
if k in data:
setattr(self, k, data[k])
if '_fields_ordered' in data:
if self._dynamic:
setattr(self, '_fields_ordered', data['_fields_ordered'])
else:
_super_fields_ordered = type(self)._fields_ordered
setattr(self, '_fields_ordered', _super_fields_ordered)
dynamic_fields = data.get('_dynamic_fields') or SON()
for k in dynamic_fields.keys():
setattr(self, k, data["_data"].get(k))
def __iter__(self):
return iter(self._fields_ordered)
def __getitem__(self, name):
"""Dictionary-style field access, return a field's value if present.
"""
try:
if name in self._fields_ordered:
return getattr(self, name)
except AttributeError:
pass
raise KeyError(name)
def __setitem__(self, name, value):
"""Dictionary-style field access, set a field's value.
"""
# Ensure that the field exists before settings its value
if not self._dynamic and name not in self._fields:
raise KeyError(name)
return setattr(self, name, value)
def __contains__(self, name):
try:
val = getattr(self, name)
return val is not None
except AttributeError:
return False
def __len__(self):
return len(self._data)
def __repr__(self):
try:
u = self.__str__()
except (UnicodeEncodeError, UnicodeDecodeError):
u = '[Bad Unicode data]'
repr_type = str if u is None else type(u)
return repr_type('<%s: %s>' % (self.__class__.__name__, u))
def __str__(self):
if hasattr(self, '__unicode__'):
if PY3:
return self.__unicode__()
else:
return unicode(self).encode('utf-8')
return txt_type('%s object' % self.__class__.__name__)
def __eq__(self, other):
if isinstance(other, self.__class__) and hasattr(other, 'id') and other.id is not None:
return self.id == other.id
if isinstance(other, DBRef):
return self._get_collection_name() == other.collection and self.id == other.id
if self.id is None:
return self is other
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
if getattr(self, 'pk', None) is None:
# For new object
return super(BaseDocument, self).__hash__()
else:
return hash(self.pk)
def clean(self):
"""
Hook for doing document level data cleaning before validation is run.
Any ValidationError raised by this method will not be associated with
a particular field; it will have a special-case association with the
field defined by NON_FIELD_ERRORS.
"""
pass
def get_text_score(self):
"""
Get text score from text query
"""
if '_text_score' not in self._data:
raise InvalidDocumentError('This document is not originally built from a text query')
return self._data['_text_score']
def to_mongo(self, use_db_field=True, fields=None):
"""
Return as SON data ready for use with MongoDB.
"""
if not fields:
fields = []
data = SON()
data["_id"] = None
data['_cls'] = self._class_name
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
# only root fields ['test1.a', 'test2'] => ['test1', 'test2']
root_fields = set([f.split('.')[0] for f in fields])
for field_name in self:
if root_fields and field_name not in root_fields:
continue
value = self._data.get(field_name, None)
field = self._fields.get(field_name)
if field is None and self._dynamic:
field = self._dynamic_fields.get(field_name)
if value is not None:
if isinstance(field, EmbeddedDocumentField):
if fields:
key = '%s.' % field_name
embedded_fields = [
i.replace(key, '') for i in fields
if i.startswith(key)]
else:
embedded_fields = []
value = field.to_mongo(value, use_db_field=use_db_field,
fields=embedded_fields)
else:
value = field.to_mongo(value)
# Handle self generating fields
if value is None and field._auto_gen:
value = field.generate()
self._data[field_name] = value
if value is not None:
if use_db_field:
data[field.db_field] = value
else:
data[field.name] = value
# If "_id" has not been set, then try and set it
Document = _import_class("Document")
if isinstance(self, Document):
if data["_id"] is None:
data["_id"] = self._data.get("id", None)
if data['_id'] is None:
data.pop('_id')
# Only add _cls if allow_inheritance is True
if (not hasattr(self, '_meta') or
not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)):
data.pop('_cls')
return data
def validate(self, clean=True):
"""Ensure that all fields' values are valid and that required fields
are present.
"""
# Ensure that each field is matched to a valid value
errors = {}
if clean:
try:
self.clean()
except ValidationError, error:
errors[NON_FIELD_ERRORS] = error
# Get a list of tuples of field names and their current values
fields = [(self._fields.get(name, self._dynamic_fields.get(name)),
self._data.get(name)) for name in self._fields_ordered]
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
GenericEmbeddedDocumentField = _import_class(
"GenericEmbeddedDocumentField")
for field, value in fields:
if value is not None:
try:
if isinstance(field, (EmbeddedDocumentField,
GenericEmbeddedDocumentField)):
field._validate(value, clean=clean)
else:
field._validate(value)
except ValidationError, error:
errors[field.name] = error.errors or error
except (ValueError, AttributeError, AssertionError), error:
errors[field.name] = error
elif field.required and not getattr(field, '_auto_gen', False):
errors[field.name] = ValidationError('Field is required',
field_name=field.name)
if errors:
pk = "None"
if hasattr(self, 'pk'):
pk = self.pk
elif self._instance and hasattr(self._instance, 'pk'):
pk = self._instance.pk
message = "ValidationError (%s:%s) " % (self._class_name, pk)
raise ValidationError(message, errors=errors)
def to_json(self, *args, **kwargs):
"""Converts a document to JSON.
:param use_db_field: Set to True by default but enables the output of the json structure with the field names
and not the mongodb store db_names in case of set to False
"""
use_db_field = kwargs.pop('use_db_field', True)
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)
@classmethod
def from_json(cls, json_data, created=False):
"""Converts json data to an unsaved document instance"""
return cls._from_son(json_util.loads(json_data), created=created)
def __expand_dynamic_values(self, name, value):
"""expand any dynamic values to their correct types / values"""
if not isinstance(value, (dict, list, tuple)):
return value
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
is_list = False
if not hasattr(value, 'items'):
is_list = True
value = dict([(k, v) for k, v in enumerate(value)])
if not is_list and '_cls' in value:
cls = get_document(value['_cls'])
return cls(**value)
data = {}
for k, v in value.items():
key = name if is_list else k
data[k] = self.__expand_dynamic_values(key, v)
if is_list: # Convert back to a list
data_items = sorted(data.items(), key=operator.itemgetter(0))
value = [v for k, v in data_items]
else:
value = data
# Convert lists / values so we can watch for any changes on them
if (isinstance(value, (list, tuple)) and
not isinstance(value, BaseList)):
if issubclass(type(self), EmbeddedDocumentListField):
value = EmbeddedDocumentList(value, self, name)
else:
value = BaseList(value, self, name)
elif isinstance(value, dict) and not isinstance(value, BaseDict):
value = BaseDict(value, self, name)
return value
def _mark_as_changed(self, key):
"""Marks a key as explicitly changed by the user
"""
if not key:
return
if not hasattr(self, '_changed_fields'):
return
if '.' in key:
key, rest = key.split('.', 1)
key = self._db_field_map.get(key, key)
key = '%s.%s' % (key, rest)
else:
key = self._db_field_map.get(key, key)
if key not in self._changed_fields:
levels, idx = key.split('.'), 1
while idx <= len(levels):
if '.'.join(levels[:idx]) in self._changed_fields:
break
idx += 1
else:
self._changed_fields.append(key)
# remove lower level changed fields
level = '.'.join(levels[:idx]) + '.'
remove = self._changed_fields.remove
for field in copy.copy(self._changed_fields):
if field.startswith(level):
remove(field)
def _clear_changed_fields(self):
"""Using get_changed_fields iterate and remove any fields that are
marked as changed"""
for changed in self._get_changed_fields():
parts = changed.split(".")
data = self
for part in parts:
if isinstance(data, list):
try:
data = data[int(part)]
except IndexError:
data = None
elif isinstance(data, dict):
data = data.get(part, None)
else:
data = getattr(data, part, None)
if hasattr(data, "_changed_fields"):
if hasattr(data, "_is_document") and data._is_document:
continue
data._changed_fields = []
self._changed_fields = []
def _nestable_types_changed_fields(self, changed_fields, key, data, inspected):
# Loop list / dict fields as they contain documents
# Determine the iterator to use
if not hasattr(data, 'items'):
iterator = enumerate(data)
else:
iterator = data.iteritems()
for index, value in iterator:
list_key = "%s%s." % (key, index)
# don't check anything lower if this key is already marked
# as changed.
if list_key[:-1] in changed_fields:
continue
if hasattr(value, '_get_changed_fields'):
changed = value._get_changed_fields(inspected)
changed_fields += ["%s%s" % (list_key, k)
for k in changed if k]
elif isinstance(value, (list, tuple, dict)):
self._nestable_types_changed_fields(
changed_fields, list_key, value, inspected)
def _get_changed_fields(self, inspected=None):
"""Returns a list of all fields that have explicitly been changed.
"""
EmbeddedDocument = _import_class("EmbeddedDocument")
DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument")
ReferenceField = _import_class("ReferenceField")
SortedListField = _import_class("SortedListField")
changed_fields = []
changed_fields += getattr(self, '_changed_fields', [])
inspected = inspected or set()
if hasattr(self, 'id') and isinstance(self.id, Hashable):
if self.id in inspected:
return changed_fields
inspected.add(self.id)
for field_name in self._fields_ordered:
db_field_name = self._db_field_map.get(field_name, field_name)
key = '%s.' % db_field_name
data = self._data.get(field_name, None)
field = self._fields.get(field_name)
if hasattr(data, 'id'):
if data.id in inspected:
continue
if isinstance(field, ReferenceField):
continue
elif (isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument))
and db_field_name not in changed_fields):
# Find all embedded fields that have been changed
changed = data._get_changed_fields(inspected)
changed_fields += ["%s%s" % (key, k) for k in changed if k]
elif (isinstance(data, (list, tuple, dict)) and
db_field_name not in changed_fields):
if (hasattr(field, 'field') and
isinstance(field.field, ReferenceField)):
continue
elif isinstance(field, SortedListField) and field._ordering:
# if ordering is affected whole list is changed
if any(map(lambda d: field._ordering in d._changed_fields, data)):
changed_fields.append(db_field_name)
continue
self._nestable_types_changed_fields(
changed_fields, key, data, inspected)
return changed_fields
def _delta(self):
"""Returns the delta (set, unset) of the changes for a document.
Gets any values that have been explicitly changed.
"""
# Handles cases where not loaded from_son but has _id
doc = self.to_mongo()
set_fields = self._get_changed_fields()
unset_data = {}
parts = []
if hasattr(self, '_changed_fields'):
set_data = {}
# Fetch each set item from its path
for path in set_fields:
parts = path.split('.')
d = doc
new_path = []
for p in parts:
if isinstance(d, (ObjectId, DBRef)):
break
elif isinstance(d, list) and p.isdigit():
try:
d = d[int(p)]
except IndexError:
d = None
elif hasattr(d, 'get'):
d = d.get(p)
new_path.append(p)
path = '.'.join(new_path)
set_data[path] = d
else:
set_data = doc
if '_id' in set_data:
del set_data['_id']
# Determine if any changed items were actually unset.
for path, value in set_data.items():
if value or isinstance(value, (numbers.Number, bool)):
continue
# If we've set a value that ain't the default value don't unset it.
default = None
if (self._dynamic and len(parts) and parts[0] in
self._dynamic_fields):
del set_data[path]
unset_data[path] = 1
continue
elif path in self._fields:
default = self._fields[path].default
else: # Perform a full lookup for lists / embedded lookups
d = self
parts = path.split('.')
db_field_name = parts.pop()
for p in parts:
if isinstance(d, list) and p.isdigit():
d = d[int(p)]
elif (hasattr(d, '__getattribute__') and
not isinstance(d, dict)):
real_path = d._reverse_db_field_map.get(p, p)
d = getattr(d, real_path)
else:
d = d.get(p)
if hasattr(d, '_fields'):
field_name = d._reverse_db_field_map.get(db_field_name,
db_field_name)
if field_name in d._fields:
default = d._fields.get(field_name).default
else:
default = None
if default is not None:
if callable(default):
default = default()
if default != value:
continue
del set_data[path]
unset_data[path] = 1
return set_data, unset_data
@classmethod
def _get_collection_name(cls):
"""Returns the collection name for this class. None for abstract class
"""
return cls._meta.get('collection', None)
@classmethod
def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False):
"""Create an instance of a Document (subclass) from a PyMongo SON.
"""
if not only_fields:
only_fields = []
# get the class name from the document, falling back to the given
# class if unavailable
class_name = son.get('_cls', cls._class_name)
data = dict(("%s" % key, value) for key, value in son.iteritems())
# Return correct subclass for document type
if class_name != cls._class_name:
cls = get_document(class_name)
changed_fields = []
errors_dict = {}
fields = cls._fields
if not _auto_dereference:
fields = copy.copy(fields)
for field_name, field in fields.iteritems():
field._auto_dereference = _auto_dereference
if field.db_field in data:
value = data[field.db_field]
try:
data[field_name] = (value if value is None
else field.to_python(value))
if field_name != field.db_field:
del data[field.db_field]
except (AttributeError, ValueError), e:
errors_dict[field_name] = e
elif field.default:
default = field.default
if callable(default):
default = default()
if isinstance(default, BaseDocument):
changed_fields.append(field_name)
elif not only_fields or field_name in only_fields:
changed_fields.append(field_name)
if errors_dict:
errors = "\n".join(["%s - %s" % (k, v)
for k, v in errors_dict.items()])
msg = ("Invalid data to create a `%s` instance.\n%s"
% (cls._class_name, errors))
raise InvalidDocumentError(msg)
if cls.STRICT:
data = dict((k, v)
for k, v in data.iteritems() if k in cls._fields)
obj = cls(__auto_convert=False, _created=created, __only_fields=only_fields, **data)
obj._changed_fields = changed_fields
if not _auto_dereference:
obj._fields = fields
return obj
@classmethod
def _build_index_specs(cls, meta_indexes):
"""Generate and merge the full index specs
"""
geo_indices = cls._geo_indices()
unique_indices = cls._unique_with_indexes()
index_specs = [cls._build_index_spec(spec)
for spec in meta_indexes]
def merge_index_specs(index_specs, indices):
if not indices:
return index_specs
spec_fields = [v['fields']
for k, v in enumerate(index_specs)]
# Merge unique_indexes with existing specs
for k, v in enumerate(indices):
if v['fields'] in spec_fields:
index_specs[spec_fields.index(v['fields'])].update(v)
else:
index_specs.append(v)
return index_specs
index_specs = merge_index_specs(index_specs, geo_indices)
index_specs = merge_index_specs(index_specs, unique_indices)
return index_specs
@classmethod
def _build_index_spec(cls, spec):
"""Build a PyMongo index spec from a MongoEngine index spec.
"""
if isinstance(spec, basestring):
spec = {'fields': [spec]}
elif isinstance(spec, (list, tuple)):
spec = {'fields': list(spec)}
elif isinstance(spec, dict):
spec = dict(spec)
index_list = []
direction = None
# Check to see if we need to include _cls
allow_inheritance = cls._meta.get('allow_inheritance',
ALLOW_INHERITANCE)
include_cls = (allow_inheritance and not spec.get('sparse', False) and
spec.get('cls', True) and '_cls' not in spec['fields'])
# 733: don't include cls if index_cls is False unless there is an explicit cls with the index
include_cls = include_cls and (spec.get('cls', False) or cls._meta.get('index_cls', True))
if "cls" in spec:
spec.pop('cls')
for key in spec['fields']:
# If inherited spec continue
if isinstance(key, (list, tuple)):
continue
# ASCENDING from +
# DESCENDING from -
# TEXT from $
# HASHED from #
# GEOSPHERE from (
# GEOHAYSTACK from )
# GEO2D from *
direction = pymongo.ASCENDING
if key.startswith("-"):
direction = pymongo.DESCENDING
elif key.startswith("$"):
direction = pymongo.TEXT
elif key.startswith("#"):
direction = pymongo.HASHED
elif key.startswith("("):
direction = pymongo.GEOSPHERE
elif key.startswith(")"):
direction = pymongo.GEOHAYSTACK
elif key.startswith("*"):
direction = pymongo.GEO2D
if key.startswith(("+", "-", "*", "$", "#", "(", ")")):
key = key[1:]
# Use real field name, do it manually because we need field
# objects for the next part (list field checking)
parts = key.split('.')
if parts in (['pk'], ['id'], ['_id']):
key = '_id'
else:
fields = cls._lookup_field(parts)
parts = []
for field in fields:
try:
if field != "_id":
field = field.db_field
except AttributeError:
pass
parts.append(field)
key = '.'.join(parts)
index_list.append((key, direction))
# Don't add cls to a geo index
if include_cls and direction not in (
pymongo.GEO2D, pymongo.GEOHAYSTACK, pymongo.GEOSPHERE):
index_list.insert(0, ('_cls', 1))
if index_list:
spec['fields'] = index_list
if spec.get('sparse', False) and len(spec['fields']) > 1:
raise ValueError(
'Sparse indexes can only have one field in them. '
'See https://jira.mongodb.org/browse/SERVER-2193')
return spec
@classmethod
def _unique_with_indexes(cls, namespace=""):
"""
Find and set unique indexes
"""
unique_indexes = []
for field_name, field in cls._fields.items():
sparse = field.sparse
# Generate a list of indexes needed by uniqueness constraints
if field.unique:
unique_fields = [field.db_field]
# Add any unique_with fields to the back of the index spec
if field.unique_with:
if isinstance(field.unique_with, basestring):
field.unique_with = [field.unique_with]
# Convert unique_with field names to real field names
unique_with = []
for other_name in field.unique_with:
parts = other_name.split('.')
# Lookup real name
parts = cls._lookup_field(parts)
name_parts = [part.db_field for part in parts]
unique_with.append('.'.join(name_parts))
# Unique field should be required
parts[-1].required = True
sparse = (not sparse and
parts[-1].name not in cls.__dict__)
unique_fields += unique_with
# Add the new index to the list
fields = [("%s%s" % (namespace, f), pymongo.ASCENDING)
for f in unique_fields]
index = {'fields': fields, 'unique': True, 'sparse': sparse}
unique_indexes.append(index)
if field.__class__.__name__ == "ListField":
field = field.field
# Grab any embedded document field unique indexes
if (field.__class__.__name__ == "EmbeddedDocumentField" and
field.document_type != cls):
field_namespace = "%s." % field_name
doc_cls = field.document_type
unique_indexes += doc_cls._unique_with_indexes(field_namespace)
return unique_indexes
@classmethod
def _geo_indices(cls, inspected=None, parent_field=None):
inspected = inspected or []
geo_indices = []
inspected.append(cls)
geo_field_type_names = ["EmbeddedDocumentField", "GeoPointField",
"PointField", "LineStringField", "PolygonField"]
geo_field_types = tuple([_import_class(field)
for field in geo_field_type_names])
for field in cls._fields.values():
if not isinstance(field, geo_field_types):
continue
if hasattr(field, 'document_type'):
field_cls = field.document_type
if field_cls in inspected:
continue
if hasattr(field_cls, '_geo_indices'):
geo_indices += field_cls._geo_indices(
inspected, parent_field=field.db_field)
elif field._geo_index:
field_name = field.db_field
if parent_field:
field_name = "%s.%s" % (parent_field, field_name)
geo_indices.append({'fields':
[(field_name, field._geo_index)]})
return geo_indices
@classmethod
def _lookup_field(cls, parts):
"""Lookup a field based on its attribute and return a list containing
the field's parents and the field.
"""
ListField = _import_class("ListField")
DynamicField = _import_class('DynamicField')
if not isinstance(parts, (list, tuple)):
parts = [parts]
fields = []
field = None
for field_name in parts:
# Handle ListField indexing:
if field_name.isdigit() and isinstance(field, ListField):
fields.append(field_name)
continue
if field is None:
# Look up first field from the document
if field_name == 'pk':
# Deal with "primary key" alias
field_name = cls._meta['id_field']
if field_name in cls._fields:
field = cls._fields[field_name]
elif cls._dynamic:
field = DynamicField(db_field=field_name)
elif cls._meta.get("allow_inheritance", False) or cls._meta.get("abstract", False):
# 744: in case the field is defined in a subclass
for subcls in cls.__subclasses__():
try:
field = subcls._lookup_field([field_name])[0]
except LookUpError:
continue
if field is not None:
break
else:
raise LookUpError('Cannot resolve field "%s"' % field_name)
else:
raise LookUpError('Cannot resolve field "%s"'
% field_name)
else:
ReferenceField = _import_class('ReferenceField')
GenericReferenceField = _import_class('GenericReferenceField')
if isinstance(field, (ReferenceField, GenericReferenceField)):
raise LookUpError('Cannot perform join in mongoDB: %s' %
'__'.join(parts))
if hasattr(getattr(field, 'field', None), 'lookup_member'):
new_field = field.field.lookup_member(field_name)
elif cls._dynamic and (isinstance(field, DynamicField) or
getattr(getattr(field, 'document_type'), '_dynamic')):
new_field = DynamicField(db_field=field_name)
else:
# Look up subfield on the previous field or raise
try:
new_field = field.lookup_member(field_name)
except AttributeError:
raise LookUpError('Cannot resolve subfield or operator {} '
'on the field {}'.format(
field_name, field.name))
if not new_field and isinstance(field, ComplexBaseField):
fields.append(field_name)
continue
elif not new_field:
raise LookUpError('Cannot resolve field "%s"'
% field_name)
field = new_field # update field to the new field type
fields.append(field)
return fields
@classmethod
def _translate_field_name(cls, field, sep='.'):
"""Translate a field attribute name to a database field name.
"""
parts = field.split(sep)
parts = [f.db_field for f in cls._lookup_field(parts)]
return '.'.join(parts)
def __set_field_display(self):
"""Dynamically set the display value for a field with choices"""
for attr_name, field in self._fields.items():
if field.choices:
if self._dynamic:
obj = self
else:
obj = type(self)
setattr(obj,
'get_%s_display' % attr_name,
partial(self.__get_field_display, field=field))
def __get_field_display(self, field):
"""Returns the display value for a choice field"""
value = getattr(self, field.name)
if field.choices and isinstance(field.choices[0], (list, tuple)):
return dict(field.choices).get(value, value)
return value
| 38.982473
| 117
| 0.538379
|
03f341a03895e3abae62115082c1768dff794909
| 6,053
|
py
|
Python
|
triangular_lattice/fill_bucket.py
|
ssh0/growing-string
|
2e43916e91157dfb4253775149b35ec9d81ef14d
|
[
"MIT"
] | null | null | null |
triangular_lattice/fill_bucket.py
|
ssh0/growing-string
|
2e43916e91157dfb4253775149b35ec9d81ef14d
|
[
"MIT"
] | 1
|
2016-04-14T08:15:28.000Z
|
2016-04-27T02:57:13.000Z
|
triangular_lattice/fill_bucket.py
|
ssh0/growing-string
|
2e43916e91157dfb4253775149b35ec9d81ef14d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
#
# written by Shotaro Fujimoto
from growing_string import Main
from triangular import LatticeTriangular as LT
from strings import String
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.tri as tri
from matplotlib.patches import Polygon
from matplotlib.collections import PatchCollection
class FillBucket(object):
def __init__(self, main, plot_type='fill'):
self.lattice = main.lattice
self.lattice_X = main.lattice_X
self.lattice_Y = main.lattice_Y
self.doubled_lattice = np.zeros((self.lattice.Lx * 2, self.lattice.Ly),
dtype=np.bool)
self.define_kagome_lattice()
self.string = main.strings[0]
self.plot_type = plot_type
doubled_lattice = self.create_doubled_lattice()
self.doubled_lattice = self.fill_inside(doubled_lattice)
def create_doubled_lattice(self):
str_pos = self.string.pos.tolist()
check_index = [(i, j)
for i in range(self.lattice.Lx)
for j in range(self.lattice.Ly)
if [i, j] in str_pos]
for i, j in check_index:
k = str_pos.index([i, j])
vec = self.string.vec[k]
if vec in [0, 3]:
continue
if vec == 1:
x = 2 * i
y = j
elif vec == 2:
x = 2 * i - 1
y = j
elif vec == 4:
x = 2 * i
y = j - 1
elif vec == 5:
x = 2 * i + 1
y = j - 1
self.doubled_lattice[x, y] = True
return self.doubled_lattice
def fill_inside(self, arr):
"""Fill inside
arr: (m x n: boolean ndarray)
"""
size_x, size_y = arr.shape
ret_arr = np.zeros((size_x, size_y), dtype=np.bool)
for j in range(size_y):
flag = False
for i in range(size_x):
tf = arr[i, j]
if flag ^ tf:
ret_arr[i, j] = True
if tf:
flag = not flag
return ret_arr
def define_kagome_lattice(self):
size_x, size_y = self.lattice.Lx, self.lattice.Ly
x_even = self.lattice_X + 0.5 * self.lattice.dx
y_even = self.lattice_Y + self.lattice.dy / 3.
x_odd = np.roll(self.lattice_X, -1, axis=0)
y_odd = np.roll(self.lattice_Y, -1, axis=0) + (2 * self.lattice.dy) / 3.
self.kagome_X = np.hstack((x_even, x_odd)).reshape(2 * size_x, size_y)
self.kagome_Y = np.hstack((y_even, y_odd)).reshape(2 * size_x, size_y)
def plot_all(self, plot_type=None):
if plot_type is None:
plot_type = self.plot_type
self.fig, self.ax = plt.subplots(figsize=(8, 8))
lattice_X = self.lattice.coordinates_x
lattice_Y = self.lattice.coordinates_y
X_min, X_max = min(lattice_X) - 0.1, max(lattice_X) + 0.1
Y_min, Y_max = min(lattice_Y) - 0.1, max(lattice_Y) + 0.1
self.ax.set_xlim([X_min, X_max])
self.ax.set_ylim([Y_min, Y_max])
self.ax.set_xticklabels([])
self.ax.set_yticklabels([])
self.ax.set_aspect('equal')
triang = tri.Triangulation(lattice_X, lattice_Y)
self.ax.triplot(triang, color='#d5d5d5', lw=0.5)
self.lines = [self.ax.plot([], [], linestyle='-',
color='black',
markerfacecolor='black',
markeredgecolor='black')[0]
for i in range(self.lattice.Lx)]
i = 0
s = self.string
start = 0
for j, pos1, pos2 in zip(range(len(s.pos) - 1), s.pos[:-1], s.pos[1:]):
dist_x = abs(self.lattice_X[pos1[0], pos1[1]] -
self.lattice_X[pos2[0], pos2[1]])
dist_y = abs(self.lattice_Y[pos1[0], pos1[1]] -
self.lattice_Y[pos2[0], pos2[1]])
if dist_x > 1.5 * self.lattice.dx or dist_y > 1.5 * self.lattice.dy:
x = s.pos_x[start:j + 1]
y = s.pos_y[start:j + 1]
X = [self.lattice_X[_x, _y] for _x, _y in zip(x, y)]
Y = [self.lattice_Y[_x, _y] for _x, _y in zip(x, y)]
self.lines[i].set_data(X, Y)
start = j + 1
i += 1
else:
x = s.pos_x[start:]
y = s.pos_y[start:]
X = [self.lattice_X[_x, _y] for _x, _y in zip(x, y)]
Y = [self.lattice_Y[_x, _y] for _x, _y in zip(x, y)]
self.lines[i].set_data(X, Y)
i += 1
dx = self.lattice.dx
dy = self.lattice.dy
if plot_type == 'fill':
X = [self.lattice_X[_x, _y] for _x, _y in zip(s.pos_x, s.pos_y)]
Y = [self.lattice_Y[_x, _y] for _x, _y in zip(s.pos_x, s.pos_y)]
patches = [Polygon(np.array([X, Y]).T.tolist())]
p = PatchCollection(patches, color='green')
self.ax.add_collection(p)
elif plot_type == 'point':
# # plot by Point
index = np.where(self.doubled_lattice)
X = self.kagome_X[index]
Y = self.kagome_Y[index]
self.ax.plot(X, Y, 'r.', alpha=0.5)
plt.show()
if __name__ == '__main__':
L = 100
frames = 1000
params = {
'Lx': L,
'Ly': L,
'frames': frames,
'beta': 2.,
'weight_const': 0.5,
'boundary': {'h': 'periodic', 'v': 'periodic'},
# 'boundary': {'h': 'reflective', 'v': 'reflective'},
'plot': False,
'plot_surface': False,
'interval': 0,
}
# loop
main = Main(strings=[{'id': 1, 'x': L / 4, 'y': L / 2, 'vec': [0, 4, 2]}],
**params
)
bucket = FillBucket(main, plot_type='fill')
# bucket.plot_all(plot_type='point')
bucket.plot_all(plot_type='fill')
| 34.005618
| 80
| 0.506195
|
8a05cde1f9681b8b083699e7006dce030bf080ed
| 4,902
|
py
|
Python
|
l-sort-helper.py
|
indraniel/google-cloud-sv-pipeline-helpers
|
415dfd38a63930cd448f97a925540ba24d5e6184
|
[
"0BSD"
] | null | null | null |
l-sort-helper.py
|
indraniel/google-cloud-sv-pipeline-helpers
|
415dfd38a63930cd448f97a925540ba24d5e6184
|
[
"0BSD"
] | null | null | null |
l-sort-helper.py
|
indraniel/google-cloud-sv-pipeline-helpers
|
415dfd38a63930cd448f97a925540ba24d5e6184
|
[
"0BSD"
] | null | null | null |
#!/usr/bin/env python
from __future__ import print_function
import argparse, contextlib, os, sys, tempfile, shutil, datetime
import subprocess as sp
import google.auth
from googleapiclient import discovery
from google.cloud import storage
import requests
def log(msg):
timestamp = datetime.datetime.now().strftime("%Y-%m-%d %T")
print('[-- {} --] {}'.format(timestamp, msg), file=sys.stderr)
# based on:
# https://stackoverflow.com/questions/3223604/how-to-create-a-temporary-directory-and-get-the-path-file-name-in-python
# http://kitchingroup.cheme.cmu.edu/blog/2013/06/16/Automatic-temporary-directory-changing/
@contextlib.contextmanager
def cd(path=None, cleanup=None):
prevdir = os.getcwd()
os.chdir(os.path.expanduser(path))
try:
yield
except Exception as err:
raise RuntimeError(err)
finally:
os.chdir(prevdir)
if cleanup: cleanup()
@contextlib.contextmanager
def tempdir():
dirpath = '/mnt/disks/scratch/l-sort'
def cleanup():
pass
#shutil.rmtree(dirpath)
with cd(path=dirpath, cleanup=cleanup):
yield dirpath
def run(cmd):
p = sp.Popen(
cmd,
shell=True,
executable='/bin/bash',
env=os.environ
)
p.wait()
import pdb; pdb.set_trace()
#res = p.stdout.read().strip().decode("utf-8", "replace")
#err = p.stderr.read().strip().decode("utf-8", "replace")
#log("res: {} | err: {}".format(res, err))
if p.returncode != 0:
raise RuntimeError(cmd)
#return res
def download_blob(storage_client, vcf):
bucket_name = os.path.dirname(vcf).split('/')[2]
source_blob_name = "/".join(vcf.split('/')[3:])
basefile = os.path.basename(vcf)
dstpath = os.path.join(os.getcwd(), basefile)
#bucket = storage_client.get_bucket(bucket_name)
#blob = bucket.blob(source_blob_name)
#blob.download_to_filename(dstpath)
return dstpath
def get_input_vcfs(input_fof):
vcfs = []
with open(input_fof, 'r') as f:
for line in f:
if line.startswith('#'): continue
vcf = line.rstrip()
vcfs.append(vcf)
return vcfs
def download_vcfs(storage_client, outfile, input_vcfs):
total = len(input_vcfs)
locally_downloaded_vcfs = []
for (i, vcf) in enumerate(input_vcfs):
# needed by htslib/bcftools to directly read off the bucket
#token = get_access_token()
#os.environ['GCS_OAUTH_TOKEN'] = token
#log("Downloading input vcf [ {} | {} ] : {}".format(i, total, vcf))
vcfpath = download_blob(storage_client, vcf)
#log("\tDownloaded vcf to: {}".format(vcfpath))
locally_downloaded_vcfs.append(vcfpath)
return locally_downloaded_vcfs
def activate_google_storage_client():
credentials, project_id = google.auth.default()
storage_client = storage.Client(credentials=credentials, project=project_id)
return storage_client
# see https://cloud.google.com/compute/docs/access/create-enable-service-accounts-for-instances#applications
def get_access_token():
METADATA_URL = 'http://metadata.google.internal/computeMetadata/v1/'
METADATA_HEADERS = {'Metadata-Flavor': 'Google'}
SERVICE_ACCOUNT = 'default'
token_url = '{}instance/service-accounts/{}/token'
token_url = token_url.format(METADATA_URL, SERVICE_ACCOUNT)
# Request an access token from the metadata server.
r = requests.get(token_url, headers=METADATA_HEADERS)
r.raise_for_status()
# Extract the access token from the response.
access_token = r.json()['access_token']
return access_token
def l_sort(master_vcf_file):
python = '/home/idas/svtools/venv2/bin/python'
svtools = '/home/idas/svtools/venv2/bin/svtools'
tmp_dir = '/mnt/disks/scratch/l-sort/tmp'
cmd = '{python} {svtools} lsort -f /mnt/disks/scratch/l-sort/{vcf_list} -t {tmp_dir}'.format(
python=python,
svtools=svtools,
vcf_list=master_vcf_file,
tmp_dir=tmp_dir
)
log("starting lsort: {}".format(cmd))
run(cmd)
log("finished lsort: {}".format(cmd))
def make_arg_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input", type=str, required=True,
help="the input data to sort"
)
parser.add_argument(
"-o", "--output", type=str, required=True,
help="the sorted gvcf output file"
)
return parser
def main():
parser = make_arg_parser()
args = parser.parse_args()
sc = activate_google_storage_client()
input_vcfs = get_input_vcfs(args.input)
with tempdir() as dirpath:
log("Using tmp directory: {}".format(dirpath))
vcfs = download_vcfs(sc, args.output, input_vcfs)
with open('master-vcf-list.txt', 'w') as f:
print("\n".join(vcfs), file=f)
l_sort('master-vcf-list.txt')
if __name__ == "__main__":
main()
| 30.447205
| 118
| 0.659119
|
666604f0d68b35e85b865f3e38aae4be1944e9f2
| 10,774
|
py
|
Python
|
examples/wiki_parser/wiki_dump_parse.py
|
J007X/forte
|
257f5d4798c632a67e63a4d549c81f2b3b87b714
|
[
"Apache-2.0"
] | 163
|
2019-11-01T19:25:40.000Z
|
2022-03-30T22:49:45.000Z
|
examples/wiki_parser/wiki_dump_parse.py
|
J007X/forte
|
257f5d4798c632a67e63a4d549c81f2b3b87b714
|
[
"Apache-2.0"
] | 633
|
2019-11-01T20:07:08.000Z
|
2022-03-31T23:11:20.000Z
|
examples/wiki_parser/wiki_dump_parse.py
|
KGerring/forte
|
7dc6e6c7d62d9a4126bdfc5ca02d15be3ffd61ca
|
[
"Apache-2.0"
] | 62
|
2019-11-01T19:41:33.000Z
|
2022-03-24T11:14:21.000Z
|
# Copyright 2019 The Forte Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This creates a pipeline to parse the Wikipedia dump and save the results
as MultiPacks onto disk.
"""
import logging
import os
import pickle
import sys
from typing import Dict, Optional
from forte.common.resources import Resources
from forte.data.data_pack import DataPack
from forte.datasets.wikipedia.dbpedia.db_utils import (
load_redirects,
print_progress,
)
from forte.datasets.wikipedia.dbpedia import (
DBpediaWikiReader,
WikiArticleWriter,
WikiStructReader,
WikiAnchorReader,
WikiPropertyReader,
WikiInfoBoxReader,
)
from forte.data.base_reader import PackReader
from forte.datasets.wikipedia.dbpedia.dbpedia_datasets import (
WikiCategoryReader,
WikiPackReader,
)
from forte.pipeline import Pipeline
def add_wiki_info(
reader: WikiPackReader,
resources: Resources,
wiki_info_data_path: str,
input_pack_path: str,
output_path: str,
prompt_name: str,
use_input_index=False,
skip_existing=True,
resume_from_last=False,
input_index_file_path: Optional[str] = "article.idx",
output_index_file_name: Optional[str] = "article.idx",
):
"""
Add wiki resource into the data pack.
Args:
reader: The info reader that loads the data pack.
resources: The resources object that should contain the redirects.
wiki_info_data_path: The path containing the wiki data.
input_pack_path: The initial data pack path.
output_path: The resulting output path.
prompt_name: a name to show during processing.
use_input_index: whether to use the input index to determine the
output path.
skip_existing: whether to skip this function if the folder exists.
resume_from_last: whether to resume from last end point, at most one
can be true between this and `skip_existing`
input_index_file_path: the full file path to the input index.
output_index_file_name: the file path to write the output index,
this is relative to `output_path`.
Returns:
"""
pl = Pipeline[DataPack](resources)
if resume_from_last and skip_existing:
raise ValueError(
"resume_from_last and skip_existing cannot both be " "true."
)
out_index_path = os.path.join(output_path, output_index_file_name)
if skip_existing and os.path.exists(out_index_path):
print_progress(
f"\n{out_index_path} exist, skipping {prompt_name}", "\n"
)
return
if resume_from_last:
if not os.path.exists(out_index_path):
raise ValueError(f"Configured to do resume but path "
f"{out_index_path} does not exists.")
print_progress(
f"\nWill resume from last from {out_index_path}", "\n"
)
pl.set_reader(
reader,
config={
"pack_index": input_index_file_path,
"pack_dir": input_pack_path,
"resume_index": out_index_path,
},
)
else:
pl.set_reader(
reader,
config={
"pack_index": input_index_file_path,
"pack_dir": input_pack_path,
},
)
pl.add(
WikiArticleWriter(),
config={
"output_dir": output_path,
"zip_pack": True,
"drop_record": True,
"use_input_index": use_input_index,
"input_index_file": input_index_file_path,
"output_index_file": output_index_file_name,
"append_to_index": resume_from_last,
},
)
print_progress(f"Start running the {prompt_name} pipeline.", "\n")
pl.run(wiki_info_data_path)
print_progress(f"Done collecting {prompt_name}.", "\n")
def read_wiki_text(
nif_context: str,
output_dir: str,
resources: Resources,
skip_existing: bool = False,
):
if skip_existing and os.path.exists(output_dir):
print_progress(f"\n{output_dir} exist, skipping reading text", "\n")
return
pl = Pipeline[DataPack](resources)
pl.set_reader(DBpediaWikiReader())
pl.add(
WikiArticleWriter(),
config={
"output_dir": output_dir,
"zip_pack": True,
"drop_record": True,
},
)
print_progress("Start running wiki text pipeline.", "\n")
pl.run(nif_context)
print_progress("Done collecting wiki text.", "\n")
def cache_redirects(
base_output_path: str, redirect_path: str
) -> Dict[str, str]:
redirect_pickle = os.path.join(base_output_path, "redirects.pickle")
redirect_map: Dict[str, str]
if os.path.exists(redirect_pickle):
redirect_map = pickle.load(open(redirect_pickle, "rb"))
else:
redirect_map = load_redirects(redirect_path)
with open(redirect_pickle, "wb") as pickle_f:
pickle.dump(redirect_map, pickle_f)
return redirect_map
def main(
nif_context: str,
nif_page_structure: str,
mapping_literals: str,
mapping_objects: str,
nif_text_links: str,
redirects: str,
info_boxs_properties: str,
categories: str,
base_output_path: str,
resume_existing: bool,
):
# Whether to skip the whole step.
if resume_existing:
skip_existing = False
else:
skip_existing = True
# The datasets are read in a few steps.
# 0. Load redirects between wikipedia pages.
print_progress("Loading redirects", "\n")
redirect_map: Dict[str, str] = cache_redirects(base_output_path, redirects)
resources: Resources = Resources()
resources.update(redirects=redirect_map)
print_progress("Done loading.", "\n")
# 1. Read the wiki text.
raw_pack_dir = os.path.join(base_output_path, "nif_raw")
read_wiki_text(nif_context, raw_pack_dir, resources, True)
print_progress("Done reading wikipedia text.", "\n")
# Use the same index structure for all writers.
main_index = os.path.join(raw_pack_dir, "article.idx")
# 2. Add wiki page structures, create a new directory for it.
struct_dir = raw_pack_dir + "_struct"
add_wiki_info(
WikiStructReader(),
resources,
nif_page_structure,
raw_pack_dir,
struct_dir,
"page_structures",
use_input_index=True,
skip_existing=skip_existing,
resume_from_last=resume_existing,
input_index_file_path=main_index,
)
print_progress("Done reading wikipedia structures.", "\n")
# 3. Add wiki links, create a new directory for it.
link_dir = struct_dir + "_links"
add_wiki_info(
WikiAnchorReader(),
resources,
nif_text_links,
struct_dir,
link_dir,
"anchor_links",
use_input_index=True,
skip_existing=True,
resume_from_last=resume_existing,
input_index_file_path=main_index,
)
print_progress("Done reading wikipedia anchors.", "\n")
# 4 The following steps add info boxes:
# 4.1 Add un-mapped infobox, we directly write to the previous directory
property_dir = link_dir
add_wiki_info(
WikiPropertyReader(),
resources,
info_boxs_properties,
link_dir,
property_dir,
"info_box_properties",
use_input_index=True,
skip_existing=True,
resume_from_last=resume_existing,
output_index_file_name="properties.idx",
input_index_file_path=main_index,
)
print_progress("Done reading wikipedia info-boxes properties.", "\n")
# 4.1 Add mapped literal, we directly write to the previous directory.
literal_dir = property_dir
add_wiki_info(
WikiInfoBoxReader(),
resources,
mapping_literals,
property_dir,
literal_dir,
"literals",
use_input_index=True,
skip_existing=True,
resume_from_last=resume_existing,
output_index_file_name="literals.idx",
input_index_file_path=main_index,
)
print_progress("Done reading wikipedia info-boxes literals.", "\n")
# 4.1 Add mapped object, we directly write to the previous directory.
mapping_dir = literal_dir
add_wiki_info(
WikiInfoBoxReader(),
resources,
mapping_objects,
literal_dir,
mapping_dir,
"objects",
use_input_index=True,
skip_existing=True,
resume_from_last=resume_existing,
output_index_file_name="objects.idx",
input_index_file_path=main_index,
)
print_progress("Done reading wikipedia info-boxes objects.", "\n")
# 4.2 Add category, directly write to previous directory.
category_dir = mapping_dir
add_wiki_info(
WikiCategoryReader(),
resources,
categories,
mapping_dir,
category_dir,
"categories",
use_input_index=True,
skip_existing=True,
resume_from_last=resume_existing,
output_index_file_name="categories.idx",
input_index_file_path=main_index,
)
def get_path(dataset: str):
p = os.path.join(base_dir, dataset)
if os.path.exists(p):
return p
else:
raise FileNotFoundError(
f"The dataset {dataset} is not found in "
f"base directory {base_dir}"
)
if __name__ == "__main__":
base_dir = sys.argv[1]
pack_output = sys.argv[2]
resume = sys.argv[3]
will_resume = resume.upper().startswith("TRUE")
if not os.path.exists(pack_output):
os.makedirs(pack_output)
logging.basicConfig(
format="%(asctime)s - %(message)s",
level=logging.INFO,
filename=os.path.join(pack_output, "dump.log"),
)
main(
get_path("nif_context_en.tql.bz2"),
get_path("nif_page_structure_en.tql.bz2"),
get_path("mappingbased_literals_en.tql.bz2"),
get_path("mappingbased_objects_en.tql.bz2"),
get_path("nif_text_links_en.tql.bz2"),
get_path("redirects_en.tql.bz2"),
get_path("infobox_properties_mapped_en.tql.bz2"),
get_path("article_categories_en.tql.bz2"),
pack_output,
will_resume,
)
| 30.435028
| 79
| 0.656395
|
b7e4c4d08669f5a68375197c9738d5caf8806fd6
| 813
|
py
|
Python
|
sandbox/manage.py
|
juwaini/django-mfa
|
910b0f544ae5bebc02434cc6b176f74b5040f3b7
|
[
"MIT"
] | 172
|
2016-09-12T15:40:37.000Z
|
2022-03-30T10:24:55.000Z
|
sandbox/manage.py
|
juwaini/django-mfa
|
910b0f544ae5bebc02434cc6b176f74b5040f3b7
|
[
"MIT"
] | 60
|
2016-08-27T14:31:00.000Z
|
2022-01-04T18:21:15.000Z
|
sandbox/manage.py
|
juwaini/django-mfa
|
910b0f544ae5bebc02434cc6b176f74b5040f3b7
|
[
"MIT"
] | 74
|
2016-08-25T09:28:18.000Z
|
2022-03-05T03:01:48.000Z
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_django_mfa.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| 35.347826
| 79
| 0.645756
|
92a06113ebfdbbc4b44cec5f18c4895a0227d4af
| 44
|
py
|
Python
|
blog/blog/__init__.py
|
Asura0528/blog
|
dafed165b34b9370e0194e6055869814c8d3fc95
|
[
"MIT"
] | null | null | null |
blog/blog/__init__.py
|
Asura0528/blog
|
dafed165b34b9370e0194e6055869814c8d3fc95
|
[
"MIT"
] | null | null | null |
blog/blog/__init__.py
|
Asura0528/blog
|
dafed165b34b9370e0194e6055869814c8d3fc95
|
[
"MIT"
] | null | null | null |
import pymysql
pymysql.install_as_MySQLdb()
| 44
| 44
| 0.863636
|
3fe42cdc00cb4e0f9d59eb801fd39500215ae5eb
| 2,282
|
py
|
Python
|
tests/integration/test_ip_resolve.py
|
thisismyrobot/dnstwister
|
624b2d95f0febaae7183666efce99164fd03466f
|
[
"Apache-1.1"
] | 42
|
2016-04-06T19:41:23.000Z
|
2021-06-23T23:52:57.000Z
|
tests/integration/test_ip_resolve.py
|
dnstwister/dnstwister
|
624b2d95f0febaae7183666efce99164fd03466f
|
[
"Apache-1.1"
] | 126
|
2015-12-05T03:41:20.000Z
|
2021-06-12T01:04:57.000Z
|
tests/integration/test_ip_resolve.py
|
thisismyrobot/dnstwister
|
624b2d95f0febaae7183666efce99164fd03466f
|
[
"Apache-1.1"
] | 21
|
2015-12-07T19:39:36.000Z
|
2020-04-06T18:06:29.000Z
|
"""Test resolving IPs."""
import socket
from dnstwister import tools
from dnstwister.core.domain import Domain
def test_resolve(webapp):
"""Test we can resolve IP addresses."""
domain = 'dnstwister.report'
hexdomain = Domain(domain).to_hex()
response = webapp.get('/api/ip/{}'.format(hexdomain))
assert response.status_code == 200
payload = response.json
ip_addr = payload['ip']
del payload['ip']
assert payload == {
u'domain': u'dnstwister.report',
u'domain_as_hexadecimal': hexdomain,
u'error': False,
u'fuzz_url': u'http://localhost/api/fuzz/{}'.format(hexdomain),
u'parked_score_url': u'http://localhost/api/parked/{}'.format(hexdomain),
u'url': u'http://localhost/api/ip/{}'.format(hexdomain),
}
# Will throw socket.error exception if this is not a valid IP address.
socket.inet_aton(ip_addr)
def test_unicode_resolve(webapp):
"""Check we can resolve a unicode domain.
"""
domain = 'xn--sterreich-z7a.icom.museum'
hexdomain = Domain(domain).to_hex()
response = webapp.get('/api/ip/{}'.format(hexdomain))
assert response.status_code == 200
payload = response.json
ip_addr = payload['ip']
del payload['ip']
assert payload == {
u'domain': u'xn--sterreich-z7a.icom.museum',
u'domain_as_hexadecimal': u'786e2d2d7374657272656963682d7a37612e69636f6d2e6d757365756d',
u'error': False,
u'fuzz_url': u'http://localhost/api/fuzz/786e2d2d7374657272656963682d7a37612e69636f6d2e6d757365756d',
u'parked_score_url': u'http://localhost/api/parked/786e2d2d7374657272656963682d7a37612e69636f6d2e6d757365756d',
u'url': u'http://localhost/api/ip/786e2d2d7374657272656963682d7a37612e69636f6d2e6d757365756d'
}
# Will throw socket.error exception if this is not a valid IP address.
socket.inet_aton(ip_addr)
def test_failed_resolve(webapp):
"""Test basic failure to resolve an IP for a domain - because it's
unregistered.
"""
domain = 'imprettysurethatthisdomaindoesnotexist.com'
response = webapp.get('/api/ip/{}'.format(Domain(domain).to_hex()))
assert response.status_code == 200
assert response.json['ip'] is False
assert response.json['error'] is False
| 33.072464
| 119
| 0.687555
|
e777468820bc4f6d075744ba2800d099f12309a4
| 173
|
py
|
Python
|
portfolio_project/guestbook/admin.py
|
KimEunYeol/web-portfolio
|
cd56191bdf12c712b252015d3ecb31b037f922ed
|
[
"MIT"
] | null | null | null |
portfolio_project/guestbook/admin.py
|
KimEunYeol/web-portfolio
|
cd56191bdf12c712b252015d3ecb31b037f922ed
|
[
"MIT"
] | 7
|
2021-03-19T03:41:33.000Z
|
2022-03-12T00:30:35.000Z
|
portfolio_project/guestbook/admin.py
|
KimEunYeol/web-portfolio
|
cd56191bdf12c712b252015d3ecb31b037f922ed
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import GuestBook, Comment, Like
admin.site.register(GuestBook)
admin.site.register(Comment)
admin.site.register(Like)
| 24.714286
| 44
| 0.780347
|
0536d2d272fd292dd96cac951041f07e4e3e068e
| 663
|
py
|
Python
|
var/spack/repos/builtin/packages/py-webencodings/package.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 11
|
2015-10-04T02:17:46.000Z
|
2018-02-07T18:23:00.000Z
|
var/spack/repos/builtin/packages/py-webencodings/package.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 22
|
2017-08-01T22:45:10.000Z
|
2022-03-10T07:46:31.000Z
|
var/spack/repos/builtin/packages/py-webencodings/package.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 4
|
2016-06-10T17:57:39.000Z
|
2018-09-11T04:59:38.000Z
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyWebencodings(PythonPackage):
"""This is a Python implementation of the WHATWG Encoding standard."""
homepage = "https://github.com/gsnedders/python-webencodings"
pypi = "webencodings/webencodings-0.5.1.tar.gz"
version('0.5.1', sha256='b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923')
depends_on('py-setuptools', type='build')
depends_on('python@2.6:2.8,3.3:', type=('build', 'run'))
| 34.894737
| 95
| 0.731523
|
658cf32b85dfadac1ebe4c43bdbc82b694230d62
| 690
|
py
|
Python
|
alipay/aop/api/response/AlipayMobilePublicFollowAddResponse.py
|
articuly/alipay-sdk-python-all
|
0259cd28eca0f219b97dac7f41c2458441d5e7a6
|
[
"Apache-2.0"
] | null | null | null |
alipay/aop/api/response/AlipayMobilePublicFollowAddResponse.py
|
articuly/alipay-sdk-python-all
|
0259cd28eca0f219b97dac7f41c2458441d5e7a6
|
[
"Apache-2.0"
] | null | null | null |
alipay/aop/api/response/AlipayMobilePublicFollowAddResponse.py
|
articuly/alipay-sdk-python-all
|
0259cd28eca0f219b97dac7f41c2458441d5e7a6
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipayMobilePublicFollowAddResponse(AlipayResponse):
def __init__(self):
super(AlipayMobilePublicFollowAddResponse, self).__init__()
self._code = None
@property
def code(self):
return self._code
@code.setter
def code(self, value):
self._code = value
def parse_response_content(self, response_content):
response = super(AlipayMobilePublicFollowAddResponse, self).parse_response_content(response_content)
if 'code' in response:
self.code = response['code']
| 26.538462
| 108
| 0.701449
|
efebe7129f01e339e8cbea1971b7ff61898f8369
| 5,055
|
py
|
Python
|
tensorflow_datasets/translate/ted_multi.py
|
sourcery-ai-bot/datasets
|
b623ab0abf3f03bacf6a7ba22c8d37bf76a4db28
|
[
"Apache-2.0"
] | 1
|
2021-05-10T10:41:27.000Z
|
2021-05-10T10:41:27.000Z
|
tensorflow_datasets/translate/ted_multi.py
|
sourcery-ai-bot/datasets
|
b623ab0abf3f03bacf6a7ba22c8d37bf76a4db28
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_datasets/translate/ted_multi.py
|
sourcery-ai-bot/datasets
|
b623ab0abf3f03bacf6a7ba22c8d37bf76a4db28
|
[
"Apache-2.0"
] | 1
|
2021-07-04T11:07:35.000Z
|
2021-07-04T11:07:35.000Z
|
# coding=utf-8
# Copyright 2021 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TED talk multilingual data set."""
import csv
import os
import six
import tensorflow.compat.v2 as tf
import tensorflow_datasets.public_api as tfds
_DESCRIPTION = """\
Massively multilingual (60 language) data set derived from TED Talk transcripts.
Each record consists of parallel arrays of language and text. Missing and
incomplete translations will be filtered out.
"""
_CITATION = """\
@InProceedings{qi-EtAl:2018:N18-2,
author = {Qi, Ye and Sachan, Devendra and Felix, Matthieu and Padmanabhan, Sarguna and Neubig, Graham},
title = {When and Why Are Pre-Trained Word Embeddings Useful for Neural Machine Translation?},
booktitle = {Proceedings of the 2018 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, Volume 2 (Short Papers)},
month = {June},
year = {2018},
address = {New Orleans, Louisiana},
publisher = {Association for Computational Linguistics},
pages = {529--535},
abstract = {The performance of Neural Machine Translation (NMT) systems often suffers in low-resource scenarios where sufficiently large-scale parallel corpora cannot be obtained. Pre-trained word embeddings have proven to be invaluable for improving performance in natural language analysis tasks, which often suffer from paucity of data. However, their utility for NMT has not been extensively explored. In this work, we perform five sets of experiments that analyze when we can expect pre-trained word embeddings to help in NMT tasks. We show that such embeddings can be surprisingly effective in some cases -- providing gains of up to 20 BLEU points in the most favorable setting.},
url = {http://www.aclweb.org/anthology/N18-2084}
}
"""
_DATA_URL = 'http://phontron.com/data/ted_talks.tar.gz'
_LANGUAGES = ('en', 'es', 'pt-br', 'fr', 'ru', 'he', 'ar', 'ko', 'zh-cn', 'it',
'ja', 'zh-tw', 'nl', 'ro', 'tr', 'de', 'vi', 'pl', 'pt', 'bg',
'el', 'fa', 'sr', 'hu', 'hr', 'uk', 'cs', 'id', 'th', 'sv', 'sk',
'sq', 'lt', 'da', 'calv', 'my', 'sl', 'mk', 'fr-ca', 'fi', 'hy',
'hi', 'nb', 'ka', 'mn', 'et', 'ku', 'gl', 'mr', 'zh', 'ur', 'eo',
'ms', 'az', 'ta', 'bn', 'kk', 'be', 'eu', 'bs')
class TedMultiTranslate(tfds.core.GeneratorBasedBuilder):
"""TED talk multilingual data set."""
VERSION = tfds.core.Version('1.1.0')
def _info(self):
return tfds.core.DatasetInfo(
builder=self,
description=_DESCRIPTION,
features=tfds.features.FeaturesDict({
'translations':
tfds.features.TranslationVariableLanguages(languages=_LANGUAGES
),
'talk_name':
tfds.features.Text(),
}),
homepage='https://github.com/neulab/word-embeddings-for-nmt',
citation=_CITATION,
)
def _split_generators(self, dl_manager):
dl_dir = dl_manager.download_and_extract(_DATA_URL)
return [
tfds.core.SplitGenerator(
name=tfds.Split.TRAIN,
gen_kwargs={
'data_file': os.path.join(dl_dir, 'all_talks_train.tsv')
}),
tfds.core.SplitGenerator(
name=tfds.Split.VALIDATION,
gen_kwargs={'data_file': os.path.join(dl_dir,
'all_talks_dev.tsv')}),
tfds.core.SplitGenerator(
name=tfds.Split.TEST,
gen_kwargs={
'data_file': os.path.join(dl_dir, 'all_talks_test.tsv')
}),
]
def _generate_examples(self, data_file):
"""This function returns the examples in the raw (text) form."""
with tf.io.gfile.GFile(data_file) as f:
reader = csv.DictReader(f, delimiter='\t', quoting=csv.QUOTE_NONE)
for idx, row in enumerate(reader):
# Everything in the row except for 'talk_name' will be a translation.
# Missing/incomplete translations will contain the string "__NULL__" or
# "_ _ NULL _ _".
yield idx, {
'translations': {
lang: text
for lang, text in six.iteritems(row)
if lang != 'talk_name' and _is_translation_complete(text)
},
'talk_name': row['talk_name']
}
def _is_translation_complete(text):
return text and '__NULL__' not in text and '_ _ NULL _ _' not in text
| 43.577586
| 689
| 0.638773
|
ee722539010226d828ced54d20c00b25f05ecb01
| 43
|
py
|
Python
|
test/data/invalid/expression.py
|
pombredanne/read_version
|
34b11acfb34fe14249354c2c0449cf1b53b07c71
|
[
"MIT"
] | 7
|
2018-09-25T18:03:41.000Z
|
2021-03-30T05:56:34.000Z
|
test/data/invalid/expression.py
|
pombredanne/read_version
|
34b11acfb34fe14249354c2c0449cf1b53b07c71
|
[
"MIT"
] | 2
|
2020-02-02T22:57:41.000Z
|
2020-12-05T04:04:05.000Z
|
test/data/invalid/expression.py
|
pombredanne/read_version
|
34b11acfb34fe14249354c2c0449cf1b53b07c71
|
[
"MIT"
] | 3
|
2019-06-26T17:41:40.000Z
|
2020-03-19T17:19:04.000Z
|
__version__ = ".".join(map(str, range(3)))
| 21.5
| 42
| 0.627907
|
1b626fe33138ae239925e97eecb0f832172e7fcc
| 64
|
py
|
Python
|
prototype/test/pythonvm_book/test_ext_list.py
|
zoloypzuo/ZeloPy
|
43d9242a509737fe1bb66deba73aa9e749b53c62
|
[
"MIT"
] | null | null | null |
prototype/test/pythonvm_book/test_ext_list.py
|
zoloypzuo/ZeloPy
|
43d9242a509737fe1bb66deba73aa9e749b53c62
|
[
"MIT"
] | null | null | null |
prototype/test/pythonvm_book/test_ext_list.py
|
zoloypzuo/ZeloPy
|
43d9242a509737fe1bb66deba73aa9e749b53c62
|
[
"MIT"
] | null | null | null |
class A(list):
pass
a = A()
a.append(3)
print a[0]
print a
| 8
| 14
| 0.578125
|
dd1739a6378e6cd5309214595da2ef47a2ec999d
| 12,925
|
py
|
Python
|
dl1_data_handler/dl_eventsources.py
|
LucaRomanato/dl1-data-handler
|
68fb46e724fa16163d0cd963983eaca3a3b68645
|
[
"MIT"
] | null | null | null |
dl1_data_handler/dl_eventsources.py
|
LucaRomanato/dl1-data-handler
|
68fb46e724fa16163d0cd963983eaca3a3b68645
|
[
"MIT"
] | null | null | null |
dl1_data_handler/dl_eventsources.py
|
LucaRomanato/dl1-data-handler
|
68fb46e724fa16163d0cd963983eaca3a3b68645
|
[
"MIT"
] | null | null | null |
from astropy import units as u
from astropy.coordinates import Angle
from ctapipe.containers import DataContainer, TelescopePointingContainer
from ctapipe.instrument import TelescopeDescription, SubarrayDescription, OpticsDescription, CameraGeometry, CameraReadout, CameraDescription
from ctapipe.io.eventsource import EventSource
from dl1_data_handler import containers
import glob
import numpy as np
from scipy.stats import norm
import re
X_MAX_UNIT = u.g / (u.cm ** 2)
class DLMAGICEventSource(EventSource):
def __init__(self, **kwargs):
"""
Constructor
Parameters
----------
kwargs: dict
Parameters to be passed.
NOTE: The file mask of the data to read can be passed with
the 'input_url' parameter.
"""
try:
import uproot
except ImportError:
raise ImportError("The 'uproot' package is required for the DLMAGICEventSource class.")
self.file_list = glob.glob(kwargs['input_url'])
self.file_list.sort()
# Since EventSource can not handle file wild cards as input_url
# We substitute the input_url with first file matching
# the specified file mask.
del kwargs['input_url']
super().__init__(input_url=self.file_list[0], **kwargs)
# get run number
mask = r".*_M\d_za\d+to\d+_\d_(\d+)_Y_.*"
parsed_info = re.findall(mask, self.file_list[0])
self.run_number = parsed_info[0]
# MAGIC telescope positions in m wrt. to the center of CTA simulations
self.magic_tel_positions = {
1: [-27.24, -146.66, 50.00] * u.m,
2: [-96.44, -96.77, 51.00] * u.m
}
self.magic_tel_positions = self.magic_tel_positions
# MAGIC telescope description
optics = OpticsDescription.from_name('MAGIC')
geom = CameraGeometry.from_name('MAGICCam')
# Camera Readout for NectarCam used as a placeholder
readout = CameraReadout('MAGICCam', sampling_rate = u.Quantity(1, u.GHz), reference_pulse_shape = np.array([norm.pdf(np.arange(96),48,6)]), reference_pulse_sample_width = u.Quantity(1, u.ns))
camera = CameraDescription('MAGICCam', geom, readout)
self.magic_tel_description = TelescopeDescription(name='MAGIC', tel_type = 'LST', optics=optics, camera=camera)
self.magic_tel_descriptions = {1: self.magic_tel_description, 2: self.magic_tel_description}
self.magic_subarray = SubarrayDescription('MAGIC', self.magic_tel_positions, self.magic_tel_descriptions)
# Open ROOT files
file1 = uproot.open(self.file_list[0])
self.event_M1 = file1["Events"]
file2 = uproot.open(self.file_list[1])
self.event_M2 = file2["Events"]
self.meta = file1["RunHeaders"]
self._mc_header = self._parse_mc_header()
@property
def is_simulation(self):
"""
Whether the currently open file is simulated
Returns
-------
bool
"""
return True
@property
def datalevels(self):
"""
The datalevels provided by this event source
Returns
-------
tuple[str]
"""
return ('R0','R1','DL0')
@property
def subarray(self):
"""
Obtain the subarray from the EventSource
Returns
-------
ctapipe.instrument.SubarrayDescription
"""
return self.magic_subarray
@property
def obs_id(self):
"""
The current observation id
Returns
-------
int
"""
return self.run_number
@staticmethod
#This function was taken from the general MAGICEventSource by ctapipe_io_magic (https://github.com/cta-observatory/ctapipe_io_magic).
def is_compatible(file_mask):
"""
This method checks if the specified file mask corresponds
to MAGIC data files. The result will be True only if all
the files are of ROOT format and contain an 'Events' tree.
Parameters
----------
file_mask: str
A file mask to check
Returns
-------
bool:
True if the masked files are MAGIC data runs, False otherwise.
"""
is_magic_root_file = True
file_list = glob.glob(file_mask)
for file_path in file_list:
try:
with uproot.open(file_path) as input_data:
if 'Events' not in input_data:
is_magic_root_file = False
except ValueError:
# uproot raises ValueError if the file is not a ROOT file
is_magic_root_file = False
return is_magic_root_file
def _generator(self):
"""
Stereo event generator. Yields DataContainer instances, filled
with the read event data.
Returns
-------
"""
counter = 0
data = DataContainer()
data.meta['origin'] = "MAGIC"
data.meta['input_url'] = self.input_url
data.meta['is_simulation'] = True
data.mcheader = self._mc_header
#Reading data from root file for Events table
eventid_M1 = np.asarray(self.event_M1["MRawEvtHeader.fStereoEvtNumber"].array())
eventid_M2 = np.asarray(self.event_M2["MRawEvtHeader.fStereoEvtNumber"].array())
src_pos_cam_Y = np.asarray(self.event_M1["MSrcPosCam.fY"].array())
pointing_altitude = np.asarray(self.event_M1["MPointingPos.fZd"].array())
src_pos_cam_X = np.asarray(self.event_M1["MSrcPosCam.fX"].array())
pointing_azimuth = np.asarray(self.event_M1["MPointingPos.fAz"].array())
core_x = np.asarray(self.event_M1["MMcEvt.fCoreX"].array())
core_y = np.asarray(self.event_M1["MMcEvt.fCoreY"].array())
mc_energy = np.asarray(self.event_M1["MMcEvt.fEnergy"].array())/1000.0
h_first_int = np.asarray(self.event_M1["MMcEvt.fZFirstInteraction"].array())
mask = r".([A-Z]+)_M\d_za\d+to\d+_\d_\d+_Y_.*"
primary_id = re.findall(mask, self.file_list[0])[0]
if primary_id == 'GA':
shower_primary_id = 1
stereo_total = np.max(eventid_M1)
#Reading data from root file for Image table
charge_M1 = self.event_M1["MCerPhotEvt.fPixels.fPhot"].array()
peak_time_M1 = self.event_M1["MArrivalTime.fData"].array()
charge_M1 = np.asarray(charge_M1)
peak_time_M1 = np.asarray(peak_time_M1)
charge_M2 = self.event_M2["MCerPhotEvt.fPixels.fPhot"].array()
peak_time_M2 = self.event_M2["MArrivalTime.fData"].array()
charge_M2 = np.asarray(charge_M2)
peak_time_M2 = np.asarray(peak_time_M2)
total_events = min(len(self.event_M1["MCerPhotEvt.fPixels.fPhot"].array()), len(self.event_M2["MCerPhotEvt.fPixels.fPhot"].array()))
#Iterating over all events, and saving only stereo ones
tels_in_file = ["m1", "m2"]
tels_with_data = {1,2}
for i in range(0, total_events):
if eventid_M1[i] != 0:
obs_id = self.run_number
event_id = eventid_M1[i]
i2 = np.where(eventid_M2==eventid_M1[i])
i2 = i2[0].astype(int)
data.count = counter
# Setting up the Data container
data.index.obs_id = obs_id
data.index.event_id = event_id
data.r0.tel.clear()
data.r1.tel.clear()
data.dl0.tel.clear()
# Filling the DL1 container with the event data
for tel_i, tel_id in enumerate(tels_in_file):
#Adding telescope pointing container
data.pointing.tel[tel_i+1].azimuth = u.Quantity(np.deg2rad(pointing_azimuth[i]), u.rad)
data.pointing.tel[tel_i+1].altitude = u.Quantity(np.deg2rad(90.0 - pointing_altitude[i]), u.rad)
#Adding MC data
#The src_pos_cam_X/src_pos_cam_Y values are stored as alt/az to follow the generic data format.
data.mc.alt = Angle(np.deg2rad(src_pos_cam_Y[i] * 0.00337), u.rad)
data.mc.az = Angle(np.deg2rad(src_pos_cam_X[i] * 0.00337), u.rad)
data.mc.x_max = u.Quantity(0, X_MAX_UNIT)
data.mc.h_first_int = u.Quantity(h_first_int[i], u.m)
data.mc.core_x = u.Quantity(core_x[i], u.m)
data.mc.core_y = u.Quantity(core_y[i], u.m)
data.mc.energy = u.Quantity(mc_energy[i], u.TeV)
data.mc.shower_primary_id = shower_primary_id
# Adding event charge and peak positions per pixel
if tel_i == 0:
data.dl1.tel[tel_i + 1].image = charge_M1[i][:1039]
data.dl1.tel[tel_i + 1].peak_time = peak_time_M1[i][:1039]
else:
data.dl1.tel[tel_i + 1].image = charge_M2[i][:1039]
data.dl1.tel[tel_i + 1].peak_time = peak_time_M2[i][:1039]
# Setting the telescopes with data
data.r0.tels_with_data = tels_with_data
data.r1.tels_with_data = tels_with_data
data.dl0.tels_with_data = tels_with_data
yield data
counter += 1
return
def _parse_mc_header(self):
return containers.MAGICMCHeaderContainer(
corsika_version = self.meta["MMcRunHeader.fCorsikaVersion"].array()[0],
refl_version = self.meta["MMcRunHeader.fReflVersion"].array()[0],
cam_version = self.meta["MMcRunHeader.fCamVersion"].array()[0],
run_number = self.meta["MMcRunHeader.fMcRunNumber"].array()[0],
prod_site = self.meta["MMcRunHeader.fProductionSite"].array()[0],
date_run_mmcs = self.meta["MMcRunHeader.fDateRunMMCs"].array()[0],
date_run_cam = self.meta["MMcRunHeader.fDateRunCamera"].array()[0],
shower_theta_max = Angle(self.meta["MMcRunHeader.fShowerThetaMax"].array()[0], u.deg),
shower_theta_min = Angle(self.meta["MMcRunHeader.fShowerThetaMin"].array()[0], u.deg),
shower_phi_max = Angle(self.meta["MMcRunHeader.fShowerPhiMax"].array()[0], u.deg),
shower_phi_min = Angle(self.meta["MMcRunHeader.fShowerPhiMin"].array()[0], u.deg),
c_wave_lower = self.meta["MMcRunHeader.fCWaveLower"].array()[0],
c_wave_upper = self.meta["MMcRunHeader.fCWaveUpper"].array()[0],
num_obs_lev = self.meta["MMcRunHeader.fNumObsLev"].array()[0],
height_lev = self.meta["MMcRunHeader.fHeightLev[10]"].array(),
slope_spec = self.meta["MMcRunHeader.fSlopeSpec"].array()[0],
rand_pointing_cone_semi_angle = Angle(self.meta["MMcRunHeader.fRandomPointingConeSemiAngle"].array()[0], u.deg),
impact_max = self.meta["MMcRunHeader.fImpactMax"].array()[0],
star_field_rotate = self.meta["MMcRunHeader.fStarFieldRotate"].array()[0],
star_field_ra_h = self.meta["MMcRunHeader.fStarFieldRaH"].array()[0],
star_field_ra_m = self.meta["MMcRunHeader.fStarFieldRaM"].array()[0],
star_field_ra_s = self.meta["MMcRunHeader.fStarFieldRaS"].array()[0],
star_field_dec_d = self.meta["MMcRunHeader.fStarFieldDeD"].array()[0],
star_field_dec_m = self.meta["MMcRunHeader.fStarFieldDeM"].array()[0],
star_field_dec_s = self.meta["MMcRunHeader.fStarFieldDeS"].array()[0],
num_trig_cond = self.meta["MMcRunHeader.fNumTrigCond"].array()[0],
all_evts_trig = self.meta["MMcRunHeader.fAllEvtsTriggered"].array()[0],
mc_evt = self.meta["MMcRunHeader.fMcEvt"].array()[0],
mc_trig = self.meta["MMcRunHeader.fMcTrig"].array()[0],
mc_fadc = self.meta["MMcRunHeader.fMcFadc"].array()[0],
raw_evt = self.meta["MMcRunHeader.fRawEvt"].array()[0],
num_analised_pix = self.meta["MMcRunHeader.fNumAnalisedPixels"].array()[0],
num_simulated_showers = self.meta["MMcRunHeader.fNumSimulatedShowers"].array()[0],
num_stored_showers = self.meta["MMcRunHeader.fNumStoredShowers"].array()[0],
num_events = self.meta["MMcRunHeader.fNumEvents"].array()[0],
num_phe_from_dnsb = self.meta["MMcRunHeader.fNumPheFromDNSB"].array()[0],
elec_noise = self.meta["MMcRunHeader.fElecNoise"].array()[0],
optic_links_noise = self.meta["MMcRunHeader.fOpticLinksNoise"].array()[0] )
| 43.227425
| 199
| 0.598994
|
186dbfaa9e2fdb016ffea78189244c3dd4d398e9
| 2,022
|
py
|
Python
|
dash/categories/models.py
|
rapidpro/dash
|
71c8fcd1ab823ef31512b5ee22eca6158b3575c1
|
[
"BSD-3-Clause"
] | 7
|
2015-06-25T20:09:35.000Z
|
2019-02-12T17:41:46.000Z
|
dash/categories/models.py
|
rapidpro/dash
|
71c8fcd1ab823ef31512b5ee22eca6158b3575c1
|
[
"BSD-3-Clause"
] | 108
|
2015-01-05T13:23:57.000Z
|
2022-02-10T10:55:01.000Z
|
dash/categories/models.py
|
rapidpro/dash
|
71c8fcd1ab823ef31512b5ee22eca6158b3575c1
|
[
"BSD-3-Clause"
] | 9
|
2015-06-15T15:13:13.000Z
|
2019-01-09T18:34:20.000Z
|
from functools import partial
from smartmin.models import SmartModel
from django.db import models
from django.utils.translation import ugettext_lazy as _
from dash.orgs.models import Org
from dash.utils import generate_file_path
class Category(SmartModel):
"""
Every organization can choose to categorize their polls or stories
according to their needs.
"""
name = models.CharField(max_length=64, help_text=_("The name of this category"))
image = models.ImageField(
upload_to=partial(generate_file_path, "categories"),
null=True,
blank=True,
help_text=_("An optional image that can describe this category"),
)
org = models.ForeignKey(
Org,
on_delete=models.PROTECT,
related_name="categories",
help_text=_("The organization this category applies to"),
)
def get_first_image(self):
cat_images = self.images.filter(is_active=True).exclude(image="")
if cat_images and cat_images.first().image:
return cat_images.first().image
def get_label_from_instance(self):
label = str(self)
if isinstance(label, bytes):
label = label.decode("utf-8")
if not self.is_active:
label = "%s %s" % (label, "(Inactive)")
return label
def __str__(self):
return "%s - %s" % (self.org, self.name)
class Meta:
ordering = ["name"]
unique_together = ("name", "org")
verbose_name_plural = _("Categories")
class CategoryImage(SmartModel):
name = models.CharField(max_length=64, help_text=_("The name to describe this image"))
category = models.ForeignKey(
Category, on_delete=models.PROTECT, related_name="images", help_text=_("The category this image represents")
)
image = models.ImageField(
upload_to=partial(generate_file_path, "categories"), help_text=_("The image file to use")
)
def __str__(self):
return "%s - %s" % (self.category.name, self.name)
| 28.885714
| 116
| 0.656281
|
55e5c70712acd0f1400df6091d953d248fa466e8
| 25,270
|
py
|
Python
|
dask/dataframe/utils.py
|
TryTestspace/dask
|
86d4f7d8c6d48ec6c4b1de1b6cfd2d3f4e5a4c1b
|
[
"BSD-3-Clause"
] | null | null | null |
dask/dataframe/utils.py
|
TryTestspace/dask
|
86d4f7d8c6d48ec6c4b1de1b6cfd2d3f4e5a4c1b
|
[
"BSD-3-Clause"
] | null | null | null |
dask/dataframe/utils.py
|
TryTestspace/dask
|
86d4f7d8c6d48ec6c4b1de1b6cfd2d3f4e5a4c1b
|
[
"BSD-3-Clause"
] | 1
|
2021-03-28T04:50:43.000Z
|
2021-03-28T04:50:43.000Z
|
from __future__ import absolute_import, division, print_function
import re
import textwrap
from distutils.version import LooseVersion
from collections import Iterator
import sys
import traceback
from contextlib import contextmanager
import numpy as np
import pandas as pd
import pandas.util.testing as tm
from pandas.api.types import is_categorical_dtype, is_scalar
try:
from pandas.api.types import is_datetime64tz_dtype
except ImportError:
# pandas < 0.19.2
from pandas.core.common import is_datetime64tz_dtype
from ..core import get_deps
from ..local import get_sync
from ..utils import asciitable, is_arraylike
PANDAS_VERSION = LooseVersion(pd.__version__)
def shard_df_on_index(df, divisions):
""" Shard a DataFrame by ranges on its index
Examples
--------
>>> df = pd.DataFrame({'a': [0, 10, 20, 30, 40], 'b': [5, 4 ,3, 2, 1]})
>>> df
a b
0 0 5
1 10 4
2 20 3
3 30 2
4 40 1
>>> shards = list(shard_df_on_index(df, [2, 4]))
>>> shards[0]
a b
0 0 5
1 10 4
>>> shards[1]
a b
2 20 3
3 30 2
>>> shards[2]
a b
4 40 1
>>> list(shard_df_on_index(df, []))[0] # empty case
a b
0 0 5
1 10 4
2 20 3
3 30 2
4 40 1
"""
if isinstance(divisions, Iterator):
divisions = list(divisions)
if not len(divisions):
yield df
else:
divisions = np.array(divisions)
df = df.sort_index()
index = df.index
if is_categorical_dtype(index):
index = index.as_ordered()
indices = index.searchsorted(divisions)
yield df.iloc[:indices[0]]
for i in range(len(indices) - 1):
yield df.iloc[indices[i]: indices[i + 1]]
yield df.iloc[indices[-1]:]
_META_TYPES = "meta : pd.DataFrame, pd.Series, dict, iterable, tuple, optional"
_META_DESCRIPTION = """\
An empty ``pd.DataFrame`` or ``pd.Series`` that matches the dtypes and
column names of the output. This metadata is necessary for many algorithms
in dask dataframe to work. For ease of use, some alternative inputs are
also available. Instead of a ``DataFrame``, a ``dict`` of ``{name: dtype}``
or iterable of ``(name, dtype)`` can be provided. Instead of a series, a
tuple of ``(name, dtype)`` can be used. If not provided, dask will try to
infer the metadata. This may lead to unexpected results, so providing
``meta`` is recommended. For more information, see
``dask.dataframe.utils.make_meta``.
"""
def insert_meta_param_description(*args, **kwargs):
"""Replace `$META` in docstring with param description.
If pad keyword is provided, will pad description by that number of
spaces (default is 8)."""
if not args:
return lambda f: insert_meta_param_description(f, **kwargs)
f = args[0]
indent = " " * kwargs.get('pad', 8)
body = textwrap.wrap(_META_DESCRIPTION, initial_indent=indent,
subsequent_indent=indent, width=78)
descr = '{0}\n{1}'.format(_META_TYPES, '\n'.join(body))
if f.__doc__:
if '$META' in f.__doc__:
f.__doc__ = f.__doc__.replace('$META', descr)
else:
# Put it at the end of the parameters section
parameter_header = 'Parameters\n%s----------' % indent[4:]
first, last = re.split('Parameters\\n[ ]*----------', f.__doc__)
parameters, rest = last.split('\n\n', 1)
f.__doc__ = '{0}{1}{2}\n{3}{4}\n\n{5}'.format(first, parameter_header,
parameters, indent[4:],
descr, rest)
return f
@contextmanager
def raise_on_meta_error(funcname=None, udf=False):
"""Reraise errors in this block to show metadata inference failure.
Parameters
----------
funcname : str, optional
If provided, will be added to the error message to indicate the
name of the method that failed.
"""
try:
yield
except Exception as e:
exc_type, exc_value, exc_traceback = sys.exc_info()
tb = ''.join(traceback.format_tb(exc_traceback))
msg = "Metadata inference failed{0}.\n\n"
if udf:
msg += ("You have supplied a custom function and Dask is unable to \n"
"determine the type of output that that function returns. \n\n"
"To resolve this please provide a meta= keyword.\n"
"The docstring of the Dask function you ran should have more information.\n\n")
msg += ("Original error is below:\n"
"------------------------\n"
"{1}\n\n"
"Traceback:\n"
"---------\n"
"{2}")
msg = msg.format(" in `{0}`".format(funcname) if funcname else "", repr(e), tb)
raise ValueError(msg)
UNKNOWN_CATEGORIES = '__UNKNOWN_CATEGORIES__'
def has_known_categories(x):
"""Returns whether the categories in `x` are known.
Parameters
----------
x : Series or CategoricalIndex
"""
x = getattr(x, '_meta', x)
if isinstance(x, pd.Series):
return UNKNOWN_CATEGORIES not in x.cat.categories
elif isinstance(x, pd.CategoricalIndex):
return UNKNOWN_CATEGORIES not in x.categories
raise TypeError("Expected Series or CategoricalIndex")
def strip_unknown_categories(x):
"""Replace any unknown categoricals with empty categoricals.
Useful for preventing ``UNKNOWN_CATEGORIES`` from leaking into results.
"""
if isinstance(x, (pd.Series, pd.DataFrame)):
x = x.copy()
if isinstance(x, pd.DataFrame):
cat_mask = x.dtypes == 'category'
if cat_mask.any():
cats = cat_mask[cat_mask].index
for c in cats:
if not has_known_categories(x[c]):
x[c].cat.set_categories([], inplace=True)
elif isinstance(x, pd.Series):
if is_categorical_dtype(x.dtype) and not has_known_categories(x):
x.cat.set_categories([], inplace=True)
if (isinstance(x.index, pd.CategoricalIndex) and not
has_known_categories(x.index)):
x.index = x.index.set_categories([])
elif isinstance(x, pd.CategoricalIndex) and not has_known_categories(x):
x = x.set_categories([])
return x
def clear_known_categories(x, cols=None, index=True):
"""Set categories to be unknown.
Parameters
----------
x : DataFrame, Series, Index
cols : iterable, optional
If x is a DataFrame, set only categoricals in these columns to unknown.
By default, all categorical columns are set to unknown categoricals
index : bool, optional
If True and x is a Series or DataFrame, set the clear known categories
in the index as well.
"""
if isinstance(x, (pd.Series, pd.DataFrame)):
x = x.copy()
if isinstance(x, pd.DataFrame):
mask = x.dtypes == 'category'
if cols is None:
cols = mask[mask].index
elif not mask.loc[cols].all():
raise ValueError("Not all columns are categoricals")
for c in cols:
x[c].cat.set_categories([UNKNOWN_CATEGORIES], inplace=True)
elif isinstance(x, pd.Series):
if is_categorical_dtype(x.dtype):
x.cat.set_categories([UNKNOWN_CATEGORIES], inplace=True)
if index and isinstance(x.index, pd.CategoricalIndex):
x.index = x.index.set_categories([UNKNOWN_CATEGORIES])
elif isinstance(x, pd.CategoricalIndex):
x = x.set_categories([UNKNOWN_CATEGORIES])
return x
def _empty_series(name, dtype, index=None):
if isinstance(dtype, str) and dtype == 'category':
return pd.Series(pd.Categorical([UNKNOWN_CATEGORIES]),
name=name, index=index).iloc[:0]
return pd.Series([], dtype=dtype, name=name, index=index)
def make_meta(x, index=None):
"""Create an empty pandas object containing the desired metadata.
Parameters
----------
x : dict, tuple, list, pd.Series, pd.DataFrame, pd.Index, dtype, scalar
To create a DataFrame, provide a `dict` mapping of `{name: dtype}`, or
an iterable of `(name, dtype)` tuples. To create a `Series`, provide a
tuple of `(name, dtype)`. If a pandas object, names, dtypes, and index
should match the desired output. If a dtype or scalar, a scalar of the
same dtype is returned.
index : pd.Index, optional
Any pandas index to use in the metadata. If none provided, a
`RangeIndex` will be used.
Examples
--------
>>> make_meta([('a', 'i8'), ('b', 'O')])
Empty DataFrame
Columns: [a, b]
Index: []
>>> make_meta(('a', 'f8'))
Series([], Name: a, dtype: float64)
>>> make_meta('i8')
1
"""
if hasattr(x, '_meta'):
return x._meta
if isinstance(x, (pd.Series, pd.DataFrame)):
return x.iloc[0:0]
elif isinstance(x, pd.Index):
return x[0:0]
elif is_arraylike(x):
return x[:0]
index = index if index is None else index[0:0]
if isinstance(x, dict):
return pd.DataFrame({c: _empty_series(c, d, index=index)
for (c, d) in x.items()}, index=index)
if isinstance(x, tuple) and len(x) == 2:
return _empty_series(x[0], x[1], index=index)
elif isinstance(x, (list, tuple)):
if not all(isinstance(i, tuple) and len(i) == 2 for i in x):
raise ValueError("Expected iterable of tuples of (name, dtype), "
"got {0}".format(x))
return pd.DataFrame({c: _empty_series(c, d, index=index) for (c, d) in x},
columns=[c for c, d in x], index=index)
elif not hasattr(x, 'dtype') and x is not None:
# could be a string, a dtype object, or a python type. Skip `None`,
# because it is implictly converted to `dtype('f8')`, which we don't
# want here.
try:
dtype = np.dtype(x)
return _scalar_from_dtype(dtype)
except Exception:
# Continue on to next check
pass
if is_scalar(x):
return _nonempty_scalar(x)
raise TypeError("Don't know how to create metadata from {0}".format(x))
if PANDAS_VERSION >= "0.20.0":
_numeric_index_types = (pd.Int64Index, pd.Float64Index, pd.UInt64Index)
else:
_numeric_index_types = (pd.Int64Index, pd.Float64Index)
def _nonempty_index(idx):
typ = type(idx)
if typ is pd.RangeIndex:
return pd.RangeIndex(2, name=idx.name)
elif typ in _numeric_index_types:
return typ([1, 2], name=idx.name)
elif typ is pd.Index:
return pd.Index(['a', 'b'], name=idx.name)
elif typ is pd.DatetimeIndex:
start = '1970-01-01'
# Need a non-monotonic decreasing index to avoid issues with
# partial string indexing see https://github.com/dask/dask/issues/2389
# and https://github.com/pandas-dev/pandas/issues/16515
# This doesn't mean `_meta_nonempty` should ever rely on
# `self.monotonic_increasing` or `self.monotonic_decreasing`
data = [start, '1970-01-02'] if idx.freq is None else None
return pd.DatetimeIndex(data, start=start, periods=2, freq=idx.freq,
tz=idx.tz, name=idx.name)
elif typ is pd.PeriodIndex:
return pd.PeriodIndex(start='1970-01-01', periods=2, freq=idx.freq,
name=idx.name)
elif typ is pd.TimedeltaIndex:
start = np.timedelta64(1, 'D')
data = [start, start + 1] if idx.freq is None else None
return pd.TimedeltaIndex(data, start=start, periods=2, freq=idx.freq,
name=idx.name)
elif typ is pd.CategoricalIndex:
if len(idx.categories) == 0:
data = _nonempty_index(idx.categories)
cats = None
else:
data = _nonempty_index(_nonempty_index(idx.categories))
cats = idx.categories
return pd.CategoricalIndex(data, categories=cats,
ordered=idx.ordered, name=idx.name)
elif typ is pd.MultiIndex:
levels = [_nonempty_index(l) for l in idx.levels]
labels = [[0, 0] for i in idx.levels]
return pd.MultiIndex(levels=levels, labels=labels, names=idx.names)
raise TypeError("Don't know how to handle index of "
"type {0}".format(type(idx).__name__))
_simple_fake_mapping = {
'b': np.bool_(True),
'V': np.void(b' '),
'M': np.datetime64('1970-01-01'),
'm': np.timedelta64(1),
'S': np.str_('foo'),
'a': np.str_('foo'),
'U': np.unicode_('foo'),
'O': 'foo'
}
def _scalar_from_dtype(dtype):
if dtype.kind in ('i', 'f', 'u'):
return dtype.type(1)
elif dtype.kind == 'c':
return dtype.type(complex(1, 0))
elif dtype.kind in _simple_fake_mapping:
o = _simple_fake_mapping[dtype.kind]
return o.astype(dtype) if dtype.kind in ('m', 'M') else o
else:
raise TypeError("Can't handle dtype: {0}".format(dtype))
def _nonempty_scalar(x):
if isinstance(x, (pd.Timestamp, pd.Timedelta, pd.Period)):
return x
elif np.isscalar(x):
dtype = x.dtype if hasattr(x, 'dtype') else np.dtype(type(x))
return _scalar_from_dtype(dtype)
else:
raise TypeError("Can't handle meta of type "
"'{0}'".format(type(x).__name__))
def _nonempty_series(s, idx):
dtype = s.dtype
if is_datetime64tz_dtype(dtype):
entry = pd.Timestamp('1970-01-01', tz=dtype.tz)
data = [entry, entry]
elif is_categorical_dtype(dtype):
if len(s.cat.categories):
data = [s.cat.categories[0]] * 2
cats = s.cat.categories
else:
data = _nonempty_index(s.cat.categories)
cats = None
data = pd.Categorical(data, categories=cats,
ordered=s.cat.ordered)
else:
entry = _scalar_from_dtype(dtype)
data = np.array([entry, entry], dtype=dtype)
return pd.Series(data, name=s.name, index=idx)
def meta_nonempty(x):
"""Create a nonempty pandas object from the given metadata.
Returns a pandas DataFrame, Series, or Index that contains two rows
of fake data.
"""
if isinstance(x, pd.Index):
return _nonempty_index(x)
elif isinstance(x, pd.Series):
idx = _nonempty_index(x.index)
return _nonempty_series(x, idx)
elif isinstance(x, pd.DataFrame):
idx = _nonempty_index(x.index)
data = {i: _nonempty_series(x.iloc[:, i], idx)
for i, c in enumerate(x.columns)}
res = pd.DataFrame(data, index=idx,
columns=np.arange(len(x.columns)))
res.columns = x.columns
return res
elif is_scalar(x):
return _nonempty_scalar(x)
else:
raise TypeError("Expected Index, Series, DataFrame, or scalar, "
"got {0}".format(type(x).__name__))
def check_meta(x, meta, funcname=None, numeric_equal=True):
"""Check that the dask metadata matches the result.
If metadata matches, ``x`` is passed through unchanged. A nice error is
raised if metadata doesn't match.
Parameters
----------
x : DataFrame, Series, or Index
meta : DataFrame, Series, or Index
The expected metadata that ``x`` should match
funcname : str, optional
The name of the function in which the metadata was specified. If
provided, the function name will be included in the error message to be
more helpful to users.
numeric_equal : bool, optionl
If True, integer and floating dtypes compare equal. This is useful due
to panda's implicit conversion of integer to floating upon encountering
missingness, which is hard to infer statically.
"""
eq_types = {'i', 'f'} if numeric_equal else {}
def equal_dtypes(a, b):
if is_categorical_dtype(a) != is_categorical_dtype(b):
return False
if (a is '-' or b is '-'):
return False
if is_categorical_dtype(a) and is_categorical_dtype(b):
# Pandas 0.21 CategoricalDtype compat
if (PANDAS_VERSION >= '0.21.0' and
(UNKNOWN_CATEGORIES in a.categories or
UNKNOWN_CATEGORIES in b.categories)):
return True
return a == b
return (a.kind in eq_types and b.kind in eq_types) or (a == b)
if not isinstance(meta, (pd.Series, pd.Index, pd.DataFrame)):
raise TypeError("Expected partition to be DataFrame, Series, or "
"Index, got `%s`" % type(meta).__name__)
if type(x) != type(meta):
errmsg = ("Expected partition of type `%s` but got "
"`%s`" % (type(meta).__name__, type(x).__name__))
elif isinstance(meta, pd.DataFrame):
dtypes = pd.concat([x.dtypes, meta.dtypes], axis=1)
bad = [(col, a, b) for col, a, b in dtypes.fillna('-').itertuples()
if not equal_dtypes(a, b)]
if not bad:
return x
errmsg = ("Partition type: `%s`\n%s" %
(type(meta).__name__,
asciitable(['Column', 'Found', 'Expected'], bad)))
else:
if equal_dtypes(x.dtype, meta.dtype):
return x
errmsg = ("Partition type: `%s`\n%s" %
(type(meta).__name__,
asciitable(['', 'dtype'], [('Found', x.dtype),
('Expected', meta.dtype)])))
raise ValueError("Metadata mismatch found%s.\n\n"
"%s" % ((" in `%s`" % funcname if funcname else ""),
errmsg))
def index_summary(idx, name=None):
"""Summarized representation of an Index.
"""
n = len(idx)
if name is None:
name = idx.__class__.__name__
if n:
head = idx[0]
tail = idx[-1]
summary = ', {} to {}'.format(head, tail)
else:
summary = ''
return "{}: {} entries{}".format(name, n, summary)
###############################################################
# Testing
###############################################################
def _check_dask(dsk, check_names=True, check_dtypes=True, result=None):
import dask.dataframe as dd
if hasattr(dsk, 'dask'):
if result is None:
result = dsk.compute(scheduler='sync')
if isinstance(dsk, dd.Index):
assert isinstance(result, pd.Index), type(result)
assert isinstance(dsk._meta, pd.Index), type(dsk._meta)
if check_names:
assert dsk.name == result.name
assert dsk._meta.name == result.name
if isinstance(result, pd.MultiIndex):
assert result.names == dsk._meta.names
if check_dtypes:
assert_dask_dtypes(dsk, result)
elif isinstance(dsk, dd.Series):
assert isinstance(result, pd.Series), type(result)
assert isinstance(dsk._meta, pd.Series), type(dsk._meta)
if check_names:
assert dsk.name == result.name, (dsk.name, result.name)
assert dsk._meta.name == result.name
if check_dtypes:
assert_dask_dtypes(dsk, result)
_check_dask(dsk.index, check_names=check_names,
check_dtypes=check_dtypes, result=result.index)
elif isinstance(dsk, dd.DataFrame):
assert isinstance(result, pd.DataFrame), type(result)
assert isinstance(dsk.columns, pd.Index), type(dsk.columns)
assert isinstance(dsk._meta, pd.DataFrame), type(dsk._meta)
if check_names:
tm.assert_index_equal(dsk.columns, result.columns)
tm.assert_index_equal(dsk._meta.columns, result.columns)
if check_dtypes:
assert_dask_dtypes(dsk, result)
_check_dask(dsk.index, check_names=check_names,
check_dtypes=check_dtypes, result=result.index)
elif isinstance(dsk, dd.core.Scalar):
assert (np.isscalar(result) or
isinstance(result, (pd.Timestamp, pd.Timedelta)))
if check_dtypes:
assert_dask_dtypes(dsk, result)
else:
msg = 'Unsupported dask instance {0} found'.format(type(dsk))
raise AssertionError(msg)
return result
return dsk
def _maybe_sort(a):
# sort by value, then index
try:
if isinstance(a, pd.DataFrame):
a = a.sort_values(by=a.columns.tolist())
else:
a = a.sort_values()
except (TypeError, IndexError, ValueError):
pass
return a.sort_index()
def assert_eq(a, b, check_names=True, check_dtypes=True,
check_divisions=True, check_index=True, **kwargs):
if check_divisions:
assert_divisions(a)
assert_divisions(b)
if hasattr(a, 'divisions') and hasattr(b, 'divisions'):
at = type(np.asarray(a.divisions).tolist()[0]) # numpy to python
bt = type(np.asarray(b.divisions).tolist()[0]) # scalar conversion
assert at == bt, (at, bt)
assert_sane_keynames(a)
assert_sane_keynames(b)
a = _check_dask(a, check_names=check_names, check_dtypes=check_dtypes)
b = _check_dask(b, check_names=check_names, check_dtypes=check_dtypes)
if not check_index:
a = a.reset_index(drop=True)
b = b.reset_index(drop=True)
if isinstance(a, pd.DataFrame):
a = _maybe_sort(a)
b = _maybe_sort(b)
tm.assert_frame_equal(a, b, **kwargs)
elif isinstance(a, pd.Series):
a = _maybe_sort(a)
b = _maybe_sort(b)
tm.assert_series_equal(a, b, check_names=check_names, **kwargs)
elif isinstance(a, pd.Index):
tm.assert_index_equal(a, b, **kwargs)
else:
if a == b:
return True
else:
if np.isnan(a):
assert np.isnan(b)
else:
assert np.allclose(a, b)
return True
def assert_dask_graph(dask, label):
if hasattr(dask, 'dask'):
dask = dask.dask
assert isinstance(dask, dict)
for k in dask:
if isinstance(k, tuple):
k = k[0]
if k.startswith(label):
return True
raise AssertionError("given dask graph doesn't contain label: {label}"
.format(label=label))
def assert_divisions(ddf):
if not hasattr(ddf, 'divisions'):
return
if not hasattr(ddf, 'index'):
return
if not ddf.known_divisions:
return
def index(x):
return (x if isinstance(x, pd.Index)
else x.index.get_level_values(0))
results = get_sync(ddf.dask, ddf.__dask_keys__())
for i, df in enumerate(results[:-1]):
if len(df):
assert index(df).min() >= ddf.divisions[i]
assert index(df).max() < ddf.divisions[i + 1]
if len(results[-1]):
assert index(results[-1]).min() >= ddf.divisions[-2]
assert index(results[-1]).max() <= ddf.divisions[-1]
def assert_sane_keynames(ddf):
if not hasattr(ddf, 'dask'):
return
for k in ddf.dask.keys():
while isinstance(k, tuple):
k = k[0]
assert isinstance(k, (str, bytes))
assert len(k) < 100
assert ' ' not in k
if sys.version_info[0] >= 3:
assert k.split('-')[0].isidentifier()
def assert_dask_dtypes(ddf, res, numeric_equal=True):
"""Check that the dask metadata matches the result.
If `numeric_equal`, integer and floating dtypes compare equal. This is
useful due to the implicit conversion of integer to floating upon
encountering missingness, which is hard to infer statically."""
eq_types = {'O', 'S', 'U', 'a'} # treat object and strings alike
if numeric_equal:
eq_types.update(('i', 'f'))
if isinstance(res, pd.DataFrame):
for col, a, b in pd.concat([ddf._meta.dtypes, res.dtypes],
axis=1).itertuples():
assert (a.kind in eq_types and b.kind in eq_types) or (a == b)
elif isinstance(res, (pd.Series, pd.Index)):
a = ddf._meta.dtype
b = res.dtype
assert (a.kind in eq_types and b.kind in eq_types) or (a == b)
else:
if hasattr(ddf._meta, 'dtype'):
a = ddf._meta.dtype
if not hasattr(res, 'dtype'):
assert np.isscalar(res)
b = np.dtype(type(res))
else:
b = res.dtype
assert (a.kind in eq_types and b.kind in eq_types) or (a == b)
else:
assert type(ddf._meta) == type(res)
def assert_max_deps(x, n, eq=True):
dependencies, dependents = get_deps(x.dask)
if eq:
assert max(map(len, dependencies.values())) == n
else:
assert max(map(len, dependencies.values())) <= n
| 35.793201
| 99
| 0.583657
|
075a7595e71f210ad83aaf171f320bbf4edd767e
| 604
|
py
|
Python
|
dcosdev/basic/local_config.py
|
iss-lab/dcosdev
|
669b97aedc71a8d1eccd5f9ce722bc0fda64ceff
|
[
"Apache-2.0"
] | null | null | null |
dcosdev/basic/local_config.py
|
iss-lab/dcosdev
|
669b97aedc71a8d1eccd5f9ce722bc0fda64ceff
|
[
"Apache-2.0"
] | null | null | null |
dcosdev/basic/local_config.py
|
iss-lab/dcosdev
|
669b97aedc71a8d1eccd5f9ce722bc0fda64ceff
|
[
"Apache-2.0"
] | null | null | null |
template = """
values:
package-name: %(package-name)s
package-version: snapshot
artifacts-url: http://%(minio-host)s/artifacts/%(package-name)s
minio-host: %(minio-host)s
minio-access-key: %(minio-access-key)s
minio-secret-key: %(minio-secret-key)s
upgrades-from: ""
downgrades-to: ""
documentation-path: https://github.com/YOURNAMEHERE/dcos-%(package-name)s
issues-path: https://github.com/YOURNAMEHERE/dcos-%(package-name)s/issues
maintainer: https://github.com/YOURNAMEHERE/dcos-%(package-name)s
release-version: 0
universe-path: "dist/universe"
is-complete-path: true
"""
| 33.555556
| 75
| 0.716887
|
ac77d0669647aa1194ef21ccf20ac74e057e9f3c
| 614
|
py
|
Python
|
metarecord/management/commands/export_data.py
|
kerkkoheiskanen/helerm
|
bdaf801a940d42325a1076b42bb0edef831fbac9
|
[
"MIT"
] | 2
|
2017-04-21T15:36:23.000Z
|
2020-12-04T09:32:39.000Z
|
metarecord/management/commands/export_data.py
|
kerkkoheiskanen/helerm
|
bdaf801a940d42325a1076b42bb0edef831fbac9
|
[
"MIT"
] | 168
|
2016-10-05T12:58:41.000Z
|
2021-08-31T14:29:56.000Z
|
metarecord/management/commands/export_data.py
|
kerkkoheiskanen/helerm
|
bdaf801a940d42325a1076b42bb0edef831fbac9
|
[
"MIT"
] | 7
|
2016-10-13T12:51:36.000Z
|
2021-01-21T13:05:04.000Z
|
from django.core.management.base import BaseCommand
from metarecord.exporter.jhs import JHSExporter, JHSExporterException
class Command(BaseCommand):
help = "Export ERMCS data to JHS191 XML file"
def __init__(self):
super().__init__()
def add_arguments(self, parser):
parser.add_argument('filename', type=str)
def handle(self, *args, **options):
filename = options['filename']
jhs_exporter = JHSExporter()
try:
jhs_exporter.export_data(filename)
except JHSExporterException as e:
self.stderr.write(self.style.ERROR(e))
| 26.695652
| 69
| 0.674267
|
566f0b726a51a0dbb21e7a582ea822f60fbf5309
| 2,194
|
py
|
Python
|
kvh_gyro/scripts/gyro_node.py
|
jake3991/Argonaut
|
c006b02688804369a172d0c6f31e4962258ecbea
|
[
"BSD-3-Clause"
] | 6
|
2021-05-06T23:55:26.000Z
|
2022-03-26T23:13:46.000Z
|
kvh_gyro/scripts/gyro_node.py
|
jake3991/Argonaut
|
c006b02688804369a172d0c6f31e4962258ecbea
|
[
"BSD-3-Clause"
] | null | null | null |
kvh_gyro/scripts/gyro_node.py
|
jake3991/Argonaut
|
c006b02688804369a172d0c6f31e4962258ecbea
|
[
"BSD-3-Clause"
] | 1
|
2021-08-17T14:41:24.000Z
|
2021-08-17T14:41:24.000Z
|
#!/usr/bin/env python
import serial
from time import time
import struct
import numpy as np
import rospy
from kvh_gyro.msg import gyro
if __name__ == '__main__':
#init the node
rospy.init_node('gyro_node')
#define the publisher
pub = rospy.Publisher('/gyro', gyro, queue_size=1000)
#define some parameters
serialPort = "/dev/ttyUSB0" # RS422 converter port for IMU
baudrate = 921600 #default baud rate, runs at 1000hz
packetSepChar = '\xfe\x81\xff\x55'
imuStatus = 'unknown'
l = [''] # List of queued packets
p = '' # Packet to parse
# Initialize serial connection.
try:
ser = serial.Serial(serialPort, baudrate, timeout=0)
rospy.loginfo("Starting Gyro")
except serial.SerialException:
rospy.loginfo("Failed to Start Gyro")
exit(1)
#while loop to get the data from the gyroscope
while rospy.is_shutdown() == False:
d = ser.readline() # Raw data
l = l[:-1] + (l[-1] + d).split(packetSepChar) # Get packets. The last element in l may not necessarily be a whole packet.
# If we have at least one whole packet
if len(l) > 1:
try:
p = l[0]
l = l[1:] # Pop off packet that was just read.
# Parse gyro data as big-endian floats.
if len(p) == 32:
dx = struct.unpack('>f', p[:4])[0]
dy = struct.unpack('>f', p[4:8])[0]
dz = struct.unpack('>f', p[8:12])[0]
imuStatus = bin(ord(p[24]))
#publish the gyro data
msg = gyro()
msg.status = True
msg.header.stamp = rospy.Time.now()
msg.delta = [dx, dy, dz]
pub.publish(msg)
else:
#publish an error message, somthing went wrong
msg = gyro()
msg.status = False
msg.header.stamp = rospy.Time.now()
pub.publish(msg)
except:
pass # Sometimes we'll mangle the first packet. Ignore this.
| 32.264706
| 131
| 0.522789
|
f34beb240b8face32acf68eac9dbfe9a6c0f6248
| 17,706
|
py
|
Python
|
geoviews/plotting/mpl/__init__.py
|
nickhand/geoviews
|
964afebf4950a9c9b69b5f44e751f74a52597eb8
|
[
"BSD-3-Clause"
] | null | null | null |
geoviews/plotting/mpl/__init__.py
|
nickhand/geoviews
|
964afebf4950a9c9b69b5f44e751f74a52597eb8
|
[
"BSD-3-Clause"
] | null | null | null |
geoviews/plotting/mpl/__init__.py
|
nickhand/geoviews
|
964afebf4950a9c9b69b5f44e751f74a52597eb8
|
[
"BSD-3-Clause"
] | null | null | null |
import copy
import numpy as np
import param
import matplotlib.ticker as mticker
from cartopy import crs as ccrs
from cartopy.io.img_tiles import GoogleTiles
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
try:
from owslib.wmts import WebMapTileService
except:
WebMapTileService = None
from holoviews.core import Store, HoloMap, Layout, Overlay, Element, NdLayout
from holoviews.core import util
from holoviews.core.data import GridInterface
from holoviews.core.options import SkipRendering, Options
from holoviews.plotting.mpl import (
ElementPlot, PointPlot, AnnotationPlot, TextPlot, LabelsPlot,
LayoutPlot as HvLayoutPlot, OverlayPlot as HvOverlayPlot,
PathPlot, PolygonPlot, RasterPlot, ContourPlot, GraphPlot,
TriMeshPlot, QuadMeshPlot, VectorFieldPlot, HexTilesPlot
)
from holoviews.plotting.mpl.util import get_raster_array, wrap_formatter
from ...element import (Image, Points, Feature, WMTS, Tiles, Text,
LineContours, FilledContours, is_geographic,
Path, Polygons, Shape, RGB, Contours, Nodes,
EdgePaths, Graph, TriMesh, QuadMesh, VectorField,
HexTiles, Labels)
from ...util import geo_mesh, poly_types
from ..plot import ProjectionPlot
from ...operation import project_points, project_path, project_graph, project_quadmesh
class LayoutPlot(ProjectionPlot, HvLayoutPlot):
"""
Extends HoloViews LayoutPlot with functionality to determine
the correct projection for each axis.
"""
vspace = param.Number(default=0.3, doc="""
Specifies the space between vertically adjacent elements in the grid.
Default value is set conservatively to avoid overlap of subplots.""")
v17_layout_format = True
class GeoOverlayPlot(ProjectionPlot, HvOverlayPlot):
"""
Extends HoloViews OverlayPlot with functionality to determine
the correct projection for each axis.
"""
global_extent = param.Boolean(default=False, doc="""
Set the extent of the Axes to the limits of the projection.""")
_propagate_options = HvOverlayPlot._propagate_options + ['global_extent']
def __init__(self, element, **params):
super(GeoOverlayPlot, self).__init__(element, **params)
plot_opts = self.lookup_options(self.hmap.last, 'plot').options
self.geographic = any(self.hmap.traverse(is_geographic, [Element]))
if 'aspect' not in plot_opts and self.geographic:
self.aspect = 'equal'
def _finalize_axis(self, *args, **kwargs):
gridlabels = self.geographic and isinstance(self.projection, (ccrs.PlateCarree, ccrs.Mercator))
if gridlabels:
xaxis, yaxis = self.xaxis, self.yaxis
self.xaxis = self.yaxis = None
try:
ret = super(GeoOverlayPlot, self)._finalize_axis(*args, **kwargs)
except Exception as e:
raise e
finally:
if gridlabels:
self.xaxis, self.yaxis = xaxis, yaxis
axis = self.handles['axis']
if self.show_grid:
axis.grid()
if self.global_extent:
axis.set_global()
return ret
class GeoPlot(ProjectionPlot, ElementPlot):
"""
Plotting baseclass for geographic plots with a cartopy projection.
"""
apply_ranges = param.Boolean(default=False, doc="""
Do not use ranges to compute plot extents by default.""")
global_extent = param.Boolean(default=False, doc="""
Whether the plot should display the whole globe.""")
projection = param.Parameter(default=ccrs.PlateCarree())
# Project operation to apply to the element
_project_operation = None
def __init__(self, element, **params):
if 'projection' not in params:
el = element.last if isinstance(element, HoloMap) else element
params['projection'] = el.crs
super(GeoPlot, self).__init__(element, **params)
plot_opts = self.lookup_options(self.hmap.last, 'plot').options
self.geographic = is_geographic(self.hmap.last)
if 'aspect' not in plot_opts:
self.aspect = 'equal' if self.geographic else 'square'
def _process_grid(self, gl):
if not self.show_grid:
gl.xlines = False
gl.ylines = False
if self.xaxis and self.xaxis != 'bare':
if isinstance(self.xticks, list):
gl.xlocator = mticker.FixedLocator(self.xticks)
elif isinstance(self.xticks, int):
gl.xlocator = mticker.MaxNLocator(self.xticks)
if self.xaxis in ['bottom', 'top-bare']:
gl.xlabels_top = False
elif self.xaxis in ['top', 'bottom-bare']:
gl.xlabels_bottom = False
if self.xformatter is None:
gl.xformatter = LONGITUDE_FORMATTER
else:
gl.xformatter = wrap_formatter(self.xformatter)
if self.yaxis and self.yaxis != 'bare':
if isinstance(self.yticks, list):
gl.ylocator = mticker.FixedLocator(self.yticks)
elif isinstance(self.yticks, int):
gl.ylocator = mticker.MaxNLocator(self.yticks)
if self.yaxis in ['left', 'right-bare']:
gl.ylabels_right = False
elif self.yaxis in ['right', 'left-bare']:
gl.ylabels_left = False
if self.yformatter is None:
gl.yformatter = LATITUDE_FORMATTER
else:
gl.yformatter = wrap_formatter(self.yformatter)
def _finalize_axis(self, *args, **kwargs):
gridlabels = self.geographic and isinstance(self.projection, (ccrs.PlateCarree, ccrs.Mercator))
if gridlabels:
xaxis, yaxis = self.xaxis, self.yaxis
self.xaxis = self.yaxis = None
try:
ret = super(GeoPlot, self)._finalize_axis(*args, **kwargs)
except Exception as e:
raise e
finally:
if gridlabels:
self.xaxis, self.yaxis = xaxis, yaxis
axis = self.handles['axis']
# Only PlateCarree and Mercator plots support grid labels.
if 'gridlines' in self.handles:
gl = self.handles['gridlines']
else:
self.handles['gridlines'] = gl = axis.gridlines(
draw_labels=gridlabels and self.zorder == 0)
self._process_grid(gl)
if self.global_extent:
axis.set_global()
return ret
def get_data(self, element, ranges, style):
if self._project_operation and self.geographic:
element = self._project_operation(element, projection=self.projection)
return super(GeoPlot, self).get_data(element, ranges, style)
def teardown_handles(self):
"""
Delete artist handle so it can be redrawn.
"""
try:
self.handles['artist'].remove()
except ValueError:
pass
class GeoImagePlot(GeoPlot, RasterPlot):
"""
Draws a pcolormesh plot from the data in a Image Element.
"""
style_opts = ['alpha', 'cmap', 'visible', 'filterrad', 'clims', 'norm']
def get_data(self, element, ranges, style):
self._norm_kwargs(element, ranges, style, element.vdims[0])
style.pop('interpolation', None)
xs, ys, zs = geo_mesh(element)
xs = GridInterface._infer_interval_breaks(xs)
ys = GridInterface._infer_interval_breaks(ys)
if self.geographic:
style['transform'] = element.crs
return (xs, ys, zs), style, {}
def init_artists(self, ax, plot_args, plot_kwargs):
artist = ax.pcolormesh(*plot_args, **plot_kwargs)
return {'artist': artist}
def update_handles(self, *args):
"""
Update the elements of the plot.
"""
return GeoPlot.update_handles(self, *args)
class GeoQuadMeshPlot(GeoPlot, QuadMeshPlot):
_project_operation = project_quadmesh
def get_data(self, element, ranges, style):
if self._project_operation and self.geographic:
element = self._project_operation(element, projection=self.projection)
return super(GeoPlot, self).get_data(element, ranges, style)
class GeoRGBPlot(GeoImagePlot):
"""
Draws a imshow plot from the data in a RGB Element.
"""
style_opts = ['alpha', 'visible', 'filterrad']
def get_data(self, element, ranges, style):
self._norm_kwargs(element, ranges, style, element.vdims[0])
style.pop('interpolation', None)
zs = get_raster_array(element)[::-1]
l, b, r, t = element.bounds.lbrt()
style['extent'] = [l, r, b, t]
if self.geographic:
style['transform'] = element.crs
return (zs,), style, {}
def init_artists(self, ax, plot_args, plot_kwargs):
artist = ax.imshow(*plot_args, **plot_kwargs)
return {'artist': artist}
def update_handles(self, *args):
"""
Update the elements of the plot.
"""
return GeoPlot.update_handles(self, *args)
class GeoPointPlot(GeoPlot, PointPlot):
"""
Draws a scatter plot from the data in a Points Element.
"""
apply_ranges = param.Boolean(default=True)
_project_operation = project_points
class GeoLabelsPlot(GeoPlot, LabelsPlot):
"""
Draws a scatter plot from the data in a Labels Element.
"""
apply_ranges = param.Boolean(default=True)
_project_operation = project_points
class GeoHexTilesPlot(GeoPlot, HexTilesPlot):
"""
Draws a scatter plot from the data in a Points Element.
"""
apply_ranges = param.Boolean(default=True)
_project_operation = project_points
class GeoVectorFieldPlot(GeoPlot, VectorFieldPlot):
"""
Draws a vector field plot from the data in a VectorField Element.
"""
apply_ranges = param.Boolean(default=True)
_project_operation = project_points
class GeometryPlot(GeoPlot):
def init_artists(self, ax, plot_args, plot_kwargs):
if self.geographic:
artist = ax.add_geometries(*plot_args, **plot_kwargs)
return {'artist': artist}
else:
return super(GeometryPlot, self).init_artist(ax, plot_args, plot_kwargs)
class GeoPathPlot(GeoPlot, PathPlot):
"""
Draws a Path plot from a Path Element.
"""
apply_ranges = param.Boolean(default=True)
_project_operation = project_path
class GeoContourPlot(GeoPlot, ContourPlot):
"""
Draws a contour plot from a Contours Element.
"""
apply_ranges = param.Boolean(default=True)
_project_operation = project_path
class GeoPolygonPlot(GeoPlot, PolygonPlot):
"""
Draws a scatter plot from the data in a Points Element.
"""
apply_ranges = param.Boolean(default=True)
_project_operation = project_path
class LineContourPlot(GeoContourPlot):
"""
Draws a contour plot.
"""
levels = param.ClassSelector(default=10, class_=(list, int), doc="""
A list of scalar values used to specify the contour levels.""")
class FilledContourPlot(GeoPolygonPlot):
"""
Draws a filled contour plot.
"""
levels = param.ClassSelector(default=10, class_=(list, int), doc="""
A list of scalar values used to specify the contour levels.""")
class GeoShapePlot(GeometryPlot, PolygonPlot):
"""
Draws a scatter plot from the data in a Points Element.
"""
apply_ranges = param.Boolean(default=True)
def get_data(self, element, ranges, style):
if self.geographic:
if not isinstance(element.data['geometry'], poly_types):
style['facecolor'] = 'none'
vdim = element.vdims[0] if element.vdims else None
value = element.level
if vdim is not None and (value is not None and np.isfinite(value)):
self._norm_kwargs(element, ranges, style, vdim)
style['clim'] = style.pop('vmin'), style.pop('vmax')
style['array'] = np.array([value])
return ([element.data['geometry']], element.crs), style, {}
else:
SkipRendering('Shape can only be plotted on geographic plot, '
'supply a coordinate reference system.')
class GeoGraphPlot(GeoPlot, GraphPlot):
apply_ranges = param.Boolean(default=True)
_project_operation = project_graph
class GeoTriMeshPlot(GeoPlot, TriMeshPlot):
apply_ranges = param.Boolean(default=True)
_project_operation = project_graph
########################################
# Geographic features and annotations #
########################################
class FeaturePlot(GeoPlot):
"""
Draws a feature from a Features Element.
"""
scale = param.ObjectSelector(default='110m',
objects=['10m', '50m', '110m'],
doc="The scale of the Feature in meters.")
style_opts = ['alpha', 'facecolor', 'edgecolor', 'linestyle', 'linewidth',
'visible']
def get_data(self, element, ranges, style):
if hasattr(element.data, 'with_scale'):
feature = element.data.with_scale(self.scale)
else:
feature = copy.copy(element.data)
feature.scale = self.scale
return (feature,), style, {}
def init_artists(self, ax, plot_args, plot_kwargs):
return {'artist': ax.add_feature(*plot_args, **plot_kwargs)}
class WMTSPlot(GeoPlot):
"""
Adds a Web Map Tile Service from a WMTS Element.
"""
zoom = param.Integer(default=8, doc="""
Controls the zoom level of the tile source.""")
style_opts = ['alpha', 'cmap', 'interpolation', 'visible',
'filterrad', 'clims', 'norm']
def get_data(self, element, ranges, style):
if isinstance(element.data, util.basestring):
tile_source = GoogleTiles(url=element.data)
return (tile_source, self.zoom), style, {}
else:
tile_source = element.data
return (tile_source, element.layer), style, {}
def init_artists(self, ax, plot_args, plot_kwargs):
if isinstance(plot_args[0], GoogleTiles):
if 'artist' in self.handles:
return {'artist': self.handles['artist']}
img = ax.add_image(*plot_args, **plot_kwargs)
return {'artist': img or plot_args[0]}
return {'artist': ax.add_wmts(*plot_args, **plot_kwargs)}
def teardown_handles(self):
"""
If no custom update_handles method is supplied this method
is called to tear down any previous handles before replacing
them.
"""
if not isinstance(self.handles.get('artist'), GoogleTiles):
self.handles['artist'].remove()
class GeoAnnotationPlot(AnnotationPlot):
"""
AnnotationPlot handles the display of all annotation elements.
"""
def initialize_plot(self, ranges=None):
annotation = self.hmap.last
key = self.keys[-1]
ranges = self.compute_ranges(self.hmap, key, ranges)
ranges = util.match_spec(annotation, ranges)
axis = self.handles['axis']
opts = self.style[self.cyclic_index]
handles = self.draw_annotation(axis, annotation.data,
annotation.crs, opts)
self.handles['annotations'] = handles
return self._finalize_axis(key, ranges=ranges)
def update_handles(self, key, axis, annotation, ranges, style):
# Clear all existing annotations
for element in self.handles['annotations']:
element.remove()
self.handles['annotations'] = self.draw_annotation(axis,
annotation.data,
annotation.crs,
style)
class GeoTextPlot(GeoAnnotationPlot, TextPlot):
"Draw the Text annotation object"
def draw_annotation(self, axis, data, crs, opts):
(x, y, text, fontsize,
horizontalalignment, verticalalignment, rotation) = data
opts['fontsize'] = fontsize
if crs:
x, y = axis.projection.transform_point(x, y, src_crs=crs)
return [axis.text(x, y, text,
horizontalalignment=horizontalalignment,
verticalalignment=verticalalignment,
rotation=rotation, **opts)]
# Register plots with HoloViews
Store.register({LineContours: LineContourPlot,
FilledContours: FilledContourPlot,
Image: GeoImagePlot,
Feature: FeaturePlot,
WMTS: WMTSPlot,
Tiles: WMTSPlot,
Points: GeoPointPlot,
Labels: GeoLabelsPlot,
VectorField: GeoVectorFieldPlot,
Text: GeoTextPlot,
Layout: LayoutPlot,
NdLayout: LayoutPlot,
Overlay: GeoOverlayPlot,
Polygons: GeoPolygonPlot,
Path: GeoPathPlot,
Contours: GeoContourPlot,
RGB: GeoRGBPlot,
Shape: GeoShapePlot,
Graph: GeoGraphPlot,
TriMesh: GeoTriMeshPlot,
Nodes: GeoPointPlot,
EdgePaths: GeoPathPlot,
HexTiles: GeoHexTilesPlot,
QuadMesh: GeoQuadMeshPlot}, 'matplotlib')
# Define plot and style options
options = Store.options(backend='matplotlib')
options.Shape = Options('style', edgecolor='black', facecolor='#30A2DA')
| 32.369287
| 103
| 0.618096
|
de6250d7270943c600a12566a5457aae31347e86
| 3,075
|
py
|
Python
|
support_tickets/support_tickets/doctype/support_ticket/support_ticket.py
|
8848digital/support-tickets
|
cac253b8ce14c08cd9c3cd498c35d23635deb300
|
[
"MIT"
] | null | null | null |
support_tickets/support_tickets/doctype/support_ticket/support_ticket.py
|
8848digital/support-tickets
|
cac253b8ce14c08cd9c3cd498c35d23635deb300
|
[
"MIT"
] | null | null | null |
support_tickets/support_tickets/doctype/support_ticket/support_ticket.py
|
8848digital/support-tickets
|
cac253b8ce14c08cd9c3cd498c35d23635deb300
|
[
"MIT"
] | 2
|
2021-11-09T10:23:43.000Z
|
2021-12-28T18:35:19.000Z
|
# Copyright (c) 2021, Nirali Satapara and contributors
# For license information, please see license.txt
import frappe
from frappe.model.document import Document
import requests
import json
from support_tickets.api import validate_and_get_project
from frappe.utils import get_url, nowdate
from frappe.utils.data import get_absolute_url
class SupportTicket(Document):
def validate(self):
if self.is_new():
project = validate_and_get_project()
support_settings = frappe.get_single("Support Settings")
server_api_key = support_settings.server_api_key
server_api_secret = support_settings.get_password('server_api_secret')
headers = {'Authorization':'token ' + server_api_key + ':' + server_api_secret,'Content-Type': 'application/json',
'Cookie': 'full_name=Guest; sid=Guest; system_user=no; user_id=Guest; user_image=' }
self.create_issue(headers,project)
#self.update_issue()
@frappe.whitelist()
def update_issue(self):
project = validate_and_get_project()
support_settings = frappe.get_single("Support Settings")
server_api_key = support_settings.server_api_key
server_api_secret = support_settings.get_password('server_api_secret')
server_url = support_settings.server_url
headers = {'Authorization':'token ' + server_api_key + ':' + server_api_secret }
if self.is_new():
self.create_issue(headers,project)
if self.partner_support_id and self.updates:
url = server_url + '/api/resource/Issue/'+self.partner_support_id
r = requests.request("GET", url, headers=headers)
response = r.json()
issue_updates = response['message']['issue_updates']
support_ticket_update =[]
if not issue_updates:
for d in self.updates:
if not d.issue_update_id:
support_ticket_update.append({"description": d.description,"support_ticket_update_id": d.name})
else:
support_ticket_reference_list = [d.get("support_ticket_update_id") for d in issue_updates]
for d in self.updates:
if d.name not in support_ticket_reference_list and not d.issue_update_id:
support_ticket_update.append({"description": d.description,"support_ticket_update_id": d.name})
data = {"issue_updates": issue_updates + support_ticket_update}
try:
r_put = requests.request("PUT", url, headers=headers, data = json.dumps(data))
response_put = r_put.json()
if r_put.status_code == 200:
frappe.msgprint(f"Issue {self.partner_support_id} updated.")
except Exception as e:
frappe.throw(str(e))
def create_issue(self, headers, project):
support_ticket_reference = get_url() + get_absolute_url(self.doctype,self.name)
server_url = frappe.db.get_single_value('Support Settings','server_url')
url = server_url + "/api/resource/Issue"
data = {'subject':self.subject,'description':self.description,'project':project,'support_ticket_reference':support_ticket_reference}
try:
r = requests.request("POST", url, headers=headers, data=json.dumps(data))
except Exception as e:
frappe.throw(str(e))
response = r.json()
self.partner_support_id = response['message']['name']
| 39.423077
| 134
| 0.75252
|
759d87cd5ce17b7c3f2124dad2b1e08d438ce481
| 4,675
|
py
|
Python
|
ansible/modules/storage/infinidat/infini_vol.py
|
EnjoyLifeFund/py36pkgs
|
0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
ansible/modules/storage/infinidat/infini_vol.py
|
EnjoyLifeFund/py36pkgs
|
0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
ansible/modules/storage/infinidat/infini_vol.py
|
EnjoyLifeFund/py36pkgs
|
0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | 1
|
2020-02-13T14:24:57.000Z
|
2020-02-13T14:24:57.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Gregory Shulov (gregory.shulov@gmail.com)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: infini_vol
version_added: 2.3
short_description: Create, Delete or Modify volumes on Infinibox
description:
- This module creates, deletes or modifies volume on Infinibox.
author: Gregory Shulov (@GR360RY)
options:
name:
description:
- Volume Name
required: true
state:
description:
- Creates/Modifies volume when present or removes when absent
required: false
default: present
choices: [ "present", "absent" ]
size:
description:
- Volume size in MB, GB or TB units. See examples.
required: false
pool:
description:
- Pool that volume will reside on
required: true
extends_documentation_fragment:
- infinibox
'''
EXAMPLES = '''
- name: Create new volume named foo under pool named bar
infini_vol:
name: foo
size: 1TB
pool: bar
state: present
user: admin
password: secret
system: ibox001
'''
RETURN = '''
'''
HAS_INFINISDK = True
try:
from infinisdk import InfiniBox, core
except ImportError:
HAS_INFINISDK = False
from ansible.module_utils.infinibox import *
from capacity import KiB, Capacity
@api_wrapper
def get_pool(module, system):
"""Return Pool or None"""
try:
return system.pools.get(name=module.params['pool'])
except:
return None
@api_wrapper
def get_volume(module, system):
"""Return Volume or None"""
try:
return system.volumes.get(name=module.params['name'])
except:
return None
@api_wrapper
def create_volume(module, system):
"""Create Volume"""
if not module.check_mode:
volume = system.volumes.create(name=module.params['name'], pool=get_pool(module, system))
if module.params['size']:
size = Capacity(module.params['size']).roundup(64 * KiB)
volume.update_size(size)
module.exit_json(changed=True)
@api_wrapper
def update_volume(module, volume):
"""Update Volume"""
changed = False
if module.params['size']:
size = Capacity(module.params['size']).roundup(64 * KiB)
if volume.get_size() != size:
if not module.check_mode:
volume.update_size(size)
changed = True
module.exit_json(changed=changed)
@api_wrapper
def delete_volume(module, volume):
""" Delete Volume"""
if not module.check_mode:
volume.delete()
module.exit_json(changed=True)
def main():
argument_spec = infinibox_argument_spec()
argument_spec.update(
dict(
name = dict(required=True),
state = dict(default='present', choices=['present', 'absent']),
pool = dict(required=True),
size = dict()
)
)
module = AnsibleModule(argument_spec, supports_check_mode=True)
if not HAS_INFINISDK:
module.fail_json(msg='infinisdk is required for this module')
if module.params['size']:
try:
Capacity(module.params['size'])
except:
module.fail_json(msg='size (Physical Capacity) should be defined in MB, GB, TB or PB units')
state = module.params['state']
system = get_system(module)
pool = get_pool(module, system)
volume = get_volume(module, system)
if pool is None:
module.fail_json(msg='Pool {} not found'.format(module.params['pool']))
if state == 'present' and not volume:
create_volume(module, system)
elif state == 'present' and volume:
update_volume(module, volume)
elif state == 'absent' and volume:
delete_volume(module, volume)
elif state == 'absent' and not volume:
module.exit_json(changed=False)
# Import Ansible Utilities
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
main()
| 26.412429
| 104
| 0.658182
|
0bad6cd31a08fad028810c8bf15610a4b6f9a4f1
| 3,300
|
py
|
Python
|
facebook_business/adobjects/pageadminnote.py
|
MyrikLD/facebook-python-business-sdk
|
a53c8ba0e8f7d0b41b385c60089f6ba00fa5c814
|
[
"CNRI-Python"
] | 576
|
2018-05-01T19:09:32.000Z
|
2022-03-31T11:45:11.000Z
|
facebook_business/adobjects/pageadminnote.py
|
MyrikLD/facebook-python-business-sdk
|
a53c8ba0e8f7d0b41b385c60089f6ba00fa5c814
|
[
"CNRI-Python"
] | 217
|
2018-05-03T07:31:59.000Z
|
2022-03-29T14:19:52.000Z
|
facebook_business/adobjects/pageadminnote.py
|
MyrikLD/facebook-python-business-sdk
|
a53c8ba0e8f7d0b41b385c60089f6ba00fa5c814
|
[
"CNRI-Python"
] | 323
|
2018-05-01T20:32:26.000Z
|
2022-03-29T07:05:12.000Z
|
# Copyright 2014 Facebook, Inc.
# You are hereby granted a non-exclusive, worldwide, royalty-free license to
# use, copy, modify, and distribute this software in source code or binary
# form for use in connection with the web services and APIs provided by
# Facebook.
# As with any software that integrates with the Facebook platform, your use
# of this software is subject to the Facebook Developer Principles and
# Policies [http://developers.facebook.com/policy/]. This copyright notice
# shall be included in all copies or substantial portions of the software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from facebook_business.adobjects.abstractobject import AbstractObject
from facebook_business.adobjects.abstractcrudobject import AbstractCrudObject
from facebook_business.adobjects.objectparser import ObjectParser
from facebook_business.api import FacebookRequest
from facebook_business.typechecker import TypeChecker
"""
This class is auto-generated.
For any issues or feature requests related to this class, please let us know on
github and we'll fix in our codegen framework. We'll not be able to accept
pull request for this class.
"""
class PageAdminNote(
AbstractCrudObject,
):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isPageAdminNote = True
super(PageAdminNote, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
body = 'body'
field_from = 'from'
id = 'id'
note_label = 'note_label'
user = 'user'
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=PageAdminNote,
api_type='NODE',
response_parser=ObjectParser(reuse_object=self),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {
'body': 'string',
'from': 'Page',
'id': 'string',
'note_label': 'string',
'user': 'User',
}
@classmethod
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info
| 35.483871
| 103
| 0.685152
|
3d7358e0c2f4edf80f80747440716254e7b61aa6
| 16,574
|
py
|
Python
|
lib/python3.6/site-packages/statsmodels/stats/gof.py
|
KshitizSharmaV/Quant_Platform_Python
|
d784aa0604d8de5ba5ca0c3a171e3556c0cd6b39
|
[
"BSD-3-Clause"
] | 1
|
2020-05-09T08:42:52.000Z
|
2020-05-09T08:42:52.000Z
|
lib/python3.6/site-packages/statsmodels/stats/gof.py
|
KshitizSharmaV/Quant_Platform_Python
|
d784aa0604d8de5ba5ca0c3a171e3556c0cd6b39
|
[
"BSD-3-Clause"
] | null | null | null |
lib/python3.6/site-packages/statsmodels/stats/gof.py
|
KshitizSharmaV/Quant_Platform_Python
|
d784aa0604d8de5ba5ca0c3a171e3556c0cd6b39
|
[
"BSD-3-Clause"
] | 1
|
2020-05-09T08:42:58.000Z
|
2020-05-09T08:42:58.000Z
|
'''extra statistical function and helper functions
contains:
* goodness-of-fit tests
- powerdiscrepancy
- gof_chisquare_discrete
- gof_binning_discrete
Author: Josef Perktold
License : BSD-3
changes
-------
2013-02-25 : add chisquare_power, effectsize and "value"
'''
from statsmodels.compat.python import lrange, string_types
import numpy as np
from scipy import stats
# copied from regression/stats.utils
def powerdiscrepancy(observed, expected, lambd=0.0, axis=0, ddof=0):
r"""Calculates power discrepancy, a class of goodness-of-fit tests
as a measure of discrepancy between observed and expected data.
This contains several goodness-of-fit tests as special cases, see the
describtion of lambd, the exponent of the power discrepancy. The pvalue
is based on the asymptotic chi-square distribution of the test statistic.
freeman_tukey:
D(x|\theta) = \sum_j (\sqrt{x_j} - \sqrt{e_j})^2
Parameters
----------
o : Iterable
Observed values
e : Iterable
Expected values
lambd : float or string
* float : exponent `a` for power discrepancy
* 'loglikeratio': a = 0
* 'freeman_tukey': a = -0.5
* 'pearson': a = 1 (standard chisquare test statistic)
* 'modified_loglikeratio': a = -1
* 'cressie_read': a = 2/3
* 'neyman' : a = -2 (Neyman-modified chisquare, reference from a book?)
axis : int
axis for observations of one series
ddof : int
degrees of freedom correction,
Returns
-------
D_obs : Discrepancy of observed values
pvalue : pvalue
References
----------
Cressie, Noel and Timothy R. C. Read, Multinomial Goodness-of-Fit Tests,
Journal of the Royal Statistical Society. Series B (Methodological),
Vol. 46, No. 3 (1984), pp. 440-464
Campbell B. Read: Freeman-Tukey chi-squared goodness-of-fit statistics,
Statistics & Probability Letters 18 (1993) 271-278
Nobuhiro Taneichi, Yuri Sekiya, Akio Suzukawa, Asymptotic Approximations
for the Distributions of the Multinomial Goodness-of-Fit Statistics
under Local Alternatives, Journal of Multivariate Analysis 81, 335?359 (2002)
Steele, M. 1,2, C. Hurst 3 and J. Chaseling, Simulated Power of Discrete
Goodness-of-Fit Tests for Likert Type Data
Examples
--------
>>> observed = np.array([ 2., 4., 2., 1., 1.])
>>> expected = np.array([ 0.2, 0.2, 0.2, 0.2, 0.2])
for checking correct dimension with multiple series
>>> powerdiscrepancy(np.column_stack((observed,observed)).T, 10*expected, lambd='freeman_tukey',axis=1)
(array([[ 2.745166, 2.745166]]), array([[ 0.6013346, 0.6013346]]))
>>> powerdiscrepancy(np.column_stack((observed,observed)).T, 10*expected,axis=1)
(array([[ 2.77258872, 2.77258872]]), array([[ 0.59657359, 0.59657359]]))
>>> powerdiscrepancy(np.column_stack((observed,observed)).T, 10*expected, lambd=0,axis=1)
(array([[ 2.77258872, 2.77258872]]), array([[ 0.59657359, 0.59657359]]))
>>> powerdiscrepancy(np.column_stack((observed,observed)).T, 10*expected, lambd=1,axis=1)
(array([[ 3., 3.]]), array([[ 0.5578254, 0.5578254]]))
>>> powerdiscrepancy(np.column_stack((observed,observed)).T, 10*expected, lambd=2/3.0,axis=1)
(array([[ 2.89714546, 2.89714546]]), array([[ 0.57518277, 0.57518277]]))
>>> powerdiscrepancy(np.column_stack((observed,observed)).T, expected, lambd=2/3.0,axis=1)
(array([[ 2.89714546, 2.89714546]]), array([[ 0.57518277, 0.57518277]]))
>>> powerdiscrepancy(np.column_stack((observed,observed)), expected, lambd=2/3.0, axis=0)
(array([[ 2.89714546, 2.89714546]]), array([[ 0.57518277, 0.57518277]]))
each random variable can have different total count/sum
>>> powerdiscrepancy(np.column_stack((observed,2*observed)), expected, lambd=2/3.0, axis=0)
(array([[ 2.89714546, 5.79429093]]), array([[ 0.57518277, 0.21504648]]))
>>> powerdiscrepancy(np.column_stack((observed,2*observed)), expected, lambd=2/3.0, axis=0)
(array([[ 2.89714546, 5.79429093]]), array([[ 0.57518277, 0.21504648]]))
>>> powerdiscrepancy(np.column_stack((2*observed,2*observed)), expected, lambd=2/3.0, axis=0)
(array([[ 5.79429093, 5.79429093]]), array([[ 0.21504648, 0.21504648]]))
>>> powerdiscrepancy(np.column_stack((2*observed,2*observed)), 20*expected, lambd=2/3.0, axis=0)
(array([[ 5.79429093, 5.79429093]]), array([[ 0.21504648, 0.21504648]]))
>>> powerdiscrepancy(np.column_stack((observed,2*observed)), np.column_stack((10*expected,20*expected)), lambd=2/3.0, axis=0)
(array([[ 2.89714546, 5.79429093]]), array([[ 0.57518277, 0.21504648]]))
>>> powerdiscrepancy(np.column_stack((observed,2*observed)), np.column_stack((10*expected,20*expected)), lambd=-1, axis=0)
(array([[ 2.77258872, 5.54517744]]), array([[ 0.59657359, 0.2357868 ]]))
"""
o = np.array(observed)
e = np.array(expected)
if not isinstance(lambd, string_types):
a = lambd
else:
if lambd == 'loglikeratio': a = 0
elif lambd == 'freeman_tukey': a = -0.5
elif lambd == 'pearson': a = 1
elif lambd == 'modified_loglikeratio': a = -1
elif lambd == 'cressie_read': a = 2/3.0
else:
raise ValueError('lambd has to be a number or one of '
'loglikeratio, freeman_tukey, pearson, '
'modified_loglikeratio or cressie_read')
n = np.sum(o, axis=axis)
nt = n
if n.size>1:
n = np.atleast_2d(n)
if axis == 1:
nt = n.T # need both for 2d, n and nt for broadcasting
if e.ndim == 1:
e = np.atleast_2d(e)
if axis == 0:
e = e.T
if np.all(np.sum(e, axis=axis) == n):
p = e/(1.0*nt)
elif np.all(np.sum(e, axis=axis) == 1):
p = e
e = nt * e
else:
raise ValueError('observed and expected need to have the same '
'number of observations, or e needs to add to 1')
k = o.shape[axis]
if e.shape[axis] != k:
raise ValueError('observed and expected need to have the same '
'number of bins')
# Note: taken from formulas, to simplify cancel n
if a == 0: # log likelihood ratio
D_obs = 2*n * np.sum(o/(1.0*nt) * np.log(o/e), axis=axis)
elif a == -1: # modified log likelihood ratio
D_obs = 2*n * np.sum(e/(1.0*nt) * np.log(e/o), axis=axis)
else:
D_obs = 2*n/a/(a+1) * np.sum(o/(1.0*nt) * ((o/e)**a - 1), axis=axis)
return D_obs, stats.chi2.sf(D_obs,k-1-ddof)
#todo: need also binning for continuous distribution
# and separated binning function to be used for powerdiscrepancy
def gof_chisquare_discrete(distfn, arg, rvs, alpha, msg):
'''perform chisquare test for random sample of a discrete distribution
Parameters
----------
distname : string
name of distribution function
arg : sequence
parameters of distribution
alpha : float
significance level, threshold for p-value
Returns
-------
result : bool
0 if test passes, 1 if test fails
Notes
-----
originally written for scipy.stats test suite,
still needs to be checked for standalone usage, insufficient input checking
may not run yet (after copy/paste)
refactor: maybe a class, check returns, or separate binning from
test results
'''
# define parameters for test
## n=2000
n = len(rvs)
nsupp = 20
wsupp = 1.0/nsupp
## distfn = getattr(stats, distname)
## np.random.seed(9765456)
## rvs = distfn.rvs(size=n,*arg)
# construct intervals with minimum mass 1/nsupp
# intervalls are left-half-open as in a cdf difference
distsupport = lrange(max(distfn.a, -1000), min(distfn.b, 1000) + 1)
last = 0
distsupp = [max(distfn.a, -1000)]
distmass = []
for ii in distsupport:
current = distfn.cdf(ii,*arg)
if current - last >= wsupp-1e-14:
distsupp.append(ii)
distmass.append(current - last)
last = current
if current > (1-wsupp):
break
if distsupp[-1] < distfn.b:
distsupp.append(distfn.b)
distmass.append(1-last)
distsupp = np.array(distsupp)
distmass = np.array(distmass)
# convert intervals to right-half-open as required by histogram
histsupp = distsupp+1e-8
histsupp[0] = distfn.a
# find sample frequencies and perform chisquare test
#TODO: move to compatibility.py
freq, hsupp = np.histogram(rvs,histsupp)
cdfs = distfn.cdf(distsupp,*arg)
(chis,pval) = stats.chisquare(np.array(freq),n*distmass)
return chis, pval, (pval > alpha), 'chisquare - test for %s' \
'at arg = %s with pval = %s' % (msg,str(arg),str(pval))
# copy/paste, remove code duplication when it works
def gof_binning_discrete(rvs, distfn, arg, nsupp=20):
'''get bins for chisquare type gof tests for a discrete distribution
Parameters
----------
rvs : array
sample data
distname : string
name of distribution function
arg : sequence
parameters of distribution
nsupp : integer
number of bins. The algorithm tries to find bins with equal weights.
depending on the distribution, the actual number of bins can be smaller.
Returns
-------
freq : array
empirical frequencies for sample; not normalized, adds up to sample size
expfreq : array
theoretical frequencies according to distribution
histsupp : array
bin boundaries for histogram, (added 1e-8 for numerical robustness)
Notes
-----
The results can be used for a chisquare test ::
(chis,pval) = stats.chisquare(freq, expfreq)
originally written for scipy.stats test suite,
still needs to be checked for standalone usage, insufficient input checking
may not run yet (after copy/paste)
refactor: maybe a class, check returns, or separate binning from
test results
todo :
optimal number of bins ? (check easyfit),
recommendation in literature at least 5 expected observations in each bin
'''
# define parameters for test
## n=2000
n = len(rvs)
wsupp = 1.0/nsupp
## distfn = getattr(stats, distname)
## np.random.seed(9765456)
## rvs = distfn.rvs(size=n,*arg)
# construct intervals with minimum mass 1/nsupp
# intervalls are left-half-open as in a cdf difference
distsupport = lrange(max(distfn.a, -1000), min(distfn.b, 1000) + 1)
last = 0
distsupp = [max(distfn.a, -1000)]
distmass = []
for ii in distsupport:
current = distfn.cdf(ii,*arg)
if current - last >= wsupp-1e-14:
distsupp.append(ii)
distmass.append(current - last)
last = current
if current > (1-wsupp):
break
if distsupp[-1] < distfn.b:
distsupp.append(distfn.b)
distmass.append(1-last)
distsupp = np.array(distsupp)
distmass = np.array(distmass)
# convert intervals to right-half-open as required by histogram
histsupp = distsupp+1e-8
histsupp[0] = distfn.a
# find sample frequencies and perform chisquare test
freq,hsupp = np.histogram(rvs,histsupp)
#freq,hsupp = np.histogram(rvs,histsupp,new=True)
cdfs = distfn.cdf(distsupp,*arg)
return np.array(freq), n*distmass, histsupp
# -*- coding: utf-8 -*-
"""Extension to chisquare goodness-of-fit test
Created on Mon Feb 25 13:46:53 2013
Author: Josef Perktold
License: BSD-3
"""
def chisquare(f_obs, f_exp=None, value=0, ddof=0, return_basic=True):
'''chisquare goodness-of-fit test
The null hypothesis is that the distance between the expected distribution
and the observed frequencies is ``value``. The alternative hypothesis is
that the distance is larger than ``value``. ``value`` is normalized in
terms of effect size.
The standard chisquare test has the null hypothesis that ``value=0``, that
is the distributions are the same.
Notes
-----
The case with value greater than zero is similar to an equivalence test,
that the exact null hypothesis is replaced by an approximate hypothesis.
However, TOST "reverses" null and alternative hypothesis, while here the
alternative hypothesis is that the distance (divergence) is larger than a
threshold.
References
----------
McLaren, ...
Drost,...
See Also
--------
powerdiscrepancy
scipy.stats.chisquare
'''
f_obs = np.asarray(f_obs)
n_bins = len(f_obs)
nobs = f_obs.sum(0)
if f_exp is None:
# uniform distribution
f_exp = np.empty(n_bins, float)
f_exp.fill(nobs / float(n_bins))
f_exp = np.asarray(f_exp, float)
chisq = ((f_obs - f_exp)**2 / f_exp).sum(0)
if value == 0:
pvalue = stats.chi2.sf(chisq, n_bins - 1 - ddof)
else:
pvalue = stats.ncx2.sf(chisq, n_bins - 1 - ddof, value**2 * nobs)
if return_basic:
return chisq, pvalue
else:
return chisq, pvalue #TODO: replace with TestResults
def chisquare_power(effect_size, nobs, n_bins, alpha=0.05, ddof=0):
'''power of chisquare goodness of fit test
effect size is sqrt of chisquare statistic divided by nobs
Parameters
----------
effect_size : float
This is the deviation from the Null of the normalized chi_square
statistic. This follows Cohen's definition (sqrt).
nobs : int or float
number of observations
n_bins : int (or float)
number of bins, or points in the discrete distribution
alpha : float in (0,1)
significance level of the test, default alpha=0.05
Returns
-------
power : float
power of the test at given significance level at effect size
Notes
-----
This function also works vectorized if all arguments broadcast.
This can also be used to calculate the power for power divergence test.
However, for the range of more extreme values of the power divergence
parameter, this power is not a very good approximation for samples of
small to medium size (Drost et al. 1989)
References
----------
Drost, ...
See Also
--------
chisquare_effectsize
statsmodels.stats.GofChisquarePower
'''
crit = stats.chi2.isf(alpha, n_bins - 1 - ddof)
power = stats.ncx2.sf(crit, n_bins - 1 - ddof, effect_size**2 * nobs)
return power
def chisquare_effectsize(probs0, probs1, correction=None, cohen=True, axis=0):
'''effect size for a chisquare goodness-of-fit test
Parameters
----------
probs0 : array_like
probabilities or cell frequencies under the Null hypothesis
probs1 : array_like
probabilities or cell frequencies under the Alternative hypothesis
probs0 and probs1 need to have the same length in the ``axis`` dimension.
and broadcast in the other dimensions
Both probs0 and probs1 are normalized to add to one (in the ``axis``
dimension).
correction : None or tuple
If None, then the effect size is the chisquare statistic divide by
the number of observations.
If the correction is a tuple (nobs, df), then the effectsize is
corrected to have less bias and a smaller variance. However, the
correction can make the effectsize negative. In that case, the
effectsize is set to zero.
Pederson and Johnson (1990) as referenced in McLaren et all. (1994)
cohen : bool
If True, then the square root is returned as in the definition of the
effect size by Cohen (1977), If False, then the original effect size
is returned.
axis : int
If the probability arrays broadcast to more than 1 dimension, then
this is the axis over which the sums are taken.
Returns
-------
effectsize : float
effect size of chisquare test
'''
probs0 = np.asarray(probs0, float)
probs1 = np.asarray(probs1, float)
probs0 = probs0 / probs0.sum(axis)
probs1 = probs1 / probs1.sum(axis)
d2 = ((probs1 - probs0)**2 / probs0).sum(axis)
if correction is not None:
nobs, df = correction
diff = ((probs1 - probs0) / probs0).sum(axis)
d2 = np.maximum((d2 * nobs - diff - df) / (nobs - 1.), 0)
if cohen:
return np.sqrt(d2)
else:
return d2
| 33.963115
| 129
| 0.635151
|
13f986181749dd8ce53e92b3a1daf9566558cffb
| 4,414
|
py
|
Python
|
contrib/seeds/generate-seeds.py
|
puzcoin/tpc
|
60d0df689be4f215ecadfbb9c0df823b6e916758
|
[
"MIT"
] | null | null | null |
contrib/seeds/generate-seeds.py
|
puzcoin/tpc
|
60d0df689be4f215ecadfbb9c0df823b6e916758
|
[
"MIT"
] | null | null | null |
contrib/seeds/generate-seeds.py
|
puzcoin/tpc
|
60d0df689be4f215ecadfbb9c0df823b6e916758
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2014-2017 Wladimir J. van der Laan
# Copyright (c) 2015-2018 The PIVX developers
# Copyright (c) 2018 The Tpc developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Script to generate list of seed nodes for chainparams.cpp.
This script expects two text files in the directory that is passed as an
argument:
nodes_main.txt
nodes_test.txt
These files must consist of lines in the format
<ip>
<ip>:<port>
[<ipv6>]
[<ipv6>]:<port>
<onion>.onion
0xDDBBCCAA (IPv4 little-endian old pnSeeds format)
The output will be two data structures with the peers in binary format:
static SeedSpec6 pnSeed6_main[]={
...
}
static SeedSpec6 pnSeed6_test[]={
...
}
These should be pasted into `src/chainparamsseeds.h`.
'''
from base64 import b32decode
from binascii import a2b_hex
import sys, os
import re
# ipv4 in ipv6 prefix
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
# tor-specific ipv6 prefix
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % s)
return pchOnionCat + vchAddr
elif '.' in addr: # IPv4
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): # IPv4-in-little-endian
return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
elif s.count(':') > 1: # ipv6, no port
host = s
port = ''
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef BITCOIN_CHAINPARAMSSEEDS_H\n')
g.write('#define BITCOIN_CHAINPARAMSSEEDS_H\n')
g.write('/**\n')
g.write(' * List of fixed seed nodes for the bitcoin network\n')
g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n')
g.write(' *\n')
g.write(' * Each line contains a 16-byte IPv6 address and a port.\n')
g.write(' * IPv4 as well as onion addresses are wrapped inside a IPv6 address accordingly.\n')
g.write(' */\n')
with open(os.path.join(indir,'nodes_main.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_main', 12700)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_test', 51474)
g.write('#endif // BITCOIN_CHAINPARAMSSEEDS_H\n')
if __name__ == '__main__':
main()
| 31.528571
| 98
| 0.584957
|
3104f6810c3585c8a4540d18cdc099d97189fa4e
| 4,555
|
py
|
Python
|
storm_analysis/test/test_scmos_cal.py
|
oxfordni/storm-analysis
|
835a5c17497c563c3632db561ae7e7c9144a8dd1
|
[
"CNRI-Python"
] | null | null | null |
storm_analysis/test/test_scmos_cal.py
|
oxfordni/storm-analysis
|
835a5c17497c563c3632db561ae7e7c9144a8dd1
|
[
"CNRI-Python"
] | null | null | null |
storm_analysis/test/test_scmos_cal.py
|
oxfordni/storm-analysis
|
835a5c17497c563c3632db561ae7e7c9144a8dd1
|
[
"CNRI-Python"
] | null | null | null |
#!/usr/bin/env python
"""
sCMOS calibration tests.
"""
import numpy
import numpy.random
import tifffile
import storm_analysis
import storm_analysis.simulator.camera as camera
import storm_analysis.sCMOS.camera_calibration as camCal
import storm_analysis.sCMOS.movie_to_calib_format as movieToCalFmt
def test_create_cal_1():
cal_name = storm_analysis.getPathOutputTest("scmos.npy")
x_size = 110
y_size = 100
gain = 2.0
offset = 100.0
read_noise = 1.0
[cam_offset, cam_variance, cam_gain] = camera.createSCMOSCalibration(cal_name,
x_size,
y_size,
gain,
read_noise,
hot_fraction = 0.0,
hot_lambda = 10.0,
offset = offset)
# Check values.
assert numpy.allclose(cam_offset, offset * numpy.ones((x_size, y_size)))
assert numpy.allclose(cam_gain, gain * numpy.ones((x_size, y_size)))
assert (abs(numpy.mean(cam_variance/(gain*gain)) - 1.0) < 0.1)
def test_create_cal_2():
cal_name = storm_analysis.getPathOutputTest("scmos.npy")
x_size = 110
y_size = 100
gain = 2.0
offset = 100.0
read_noise = 0.0
[cam_offset, cam_variance, cam_gain] = camera.createSCMOSCalibration(cal_name,
x_size,
y_size,
gain,
read_noise,
hot_fraction = 1.0,
hot_lambda = 10.0,
offset = offset)
# Check values.
assert numpy.allclose(cam_offset, offset * numpy.ones((x_size, y_size)))
assert numpy.allclose(cam_gain, gain * numpy.ones((x_size, y_size)))
assert (abs(numpy.mean(numpy.sqrt(cam_variance))/(gain * 10.0) - 1.0) < 0.1)
def test_mtcf_1():
tif_name = storm_analysis.getPathOutputTest("mtcf.tif")
rd_noise = numpy.ones((10,5))
rd_noise[5:,:] = 2.0
offset = 10.0*numpy.ones(rd_noise.shape)
offset[3:,:] = 20.0
# Create calibration movie.
with tifffile.TiffWriter(tif_name) as tf:
for i in range(1000):
image = numpy.random.normal(scale = rd_noise, size = rd_noise.shape)
image += offset
tf.save(numpy.round(image).astype(numpy.uint16))
[frame_mean, N, NN] = movieToCalFmt.movieToCalibration(tif_name)
mean = N/float(frame_mean.size)
variance = NN/float(frame_mean.size) - mean*mean
rd_sqr = rd_noise * rd_noise
assert(numpy.allclose(mean, offset, rtol = 0.1))
assert(numpy.allclose(rd_sqr, variance, rtol = 0.5))
def test_cam_cal_1():
size = (12,10)
cam_gain = 1.5 * numpy.ones(size)
cam_offset = 1000.0 * numpy.ones(size)
cam_var = 2.0 * numpy.ones(size)
n_frames = 20000
# Create calibration files.
scmos_files = []
for i, name in enumerate(["dark.npy", "light1.npy", "light2.npy", "light3.npy", "light4.npy"]):
f_name = storm_analysis.getPathOutputTest(name)
scmos_files.append(f_name)
mean = i * 500 * cam_gain
var = mean * cam_gain + cam_var
mean += cam_offset
N = mean * n_frames
NN = (var + mean*mean) * n_frames
numpy.save(f_name, [numpy.array([n_frames]), N, NN])
# Check.
[cal_offset, cal_var, cal_gain] = camCal.cameraCalibration(scmos_files,
show_fit_plots = False,
show_mean_plots = False)
assert(numpy.allclose(cal_offset, cam_offset))
assert(numpy.allclose(cal_var, cam_var))
assert(numpy.allclose(cal_gain, cam_gain))
if (__name__ == "__main__"):
test_create_cal_1()
test_create_cal_2()
test_mtcf_1()
test_cam_cal_1()
| 35.038462
| 99
| 0.496817
|
fedd40d8ac52c6a92c25f5ce5e14a65b22390c9f
| 950
|
py
|
Python
|
buildroot/support/testing/tests/core/test_rootfs_overlay.py
|
bramkragten/operating-system
|
27fc2de146f1ef047316a4b58a236c72d26da81c
|
[
"Apache-2.0"
] | 617
|
2015-01-04T14:33:56.000Z
|
2022-03-24T22:42:25.000Z
|
buildroot/support/testing/tests/core/test_rootfs_overlay.py
|
bramkragten/operating-system
|
27fc2de146f1ef047316a4b58a236c72d26da81c
|
[
"Apache-2.0"
] | 631
|
2015-01-01T22:53:25.000Z
|
2022-03-17T18:41:00.000Z
|
buildroot/support/testing/tests/core/test_rootfs_overlay.py
|
bramkragten/operating-system
|
27fc2de146f1ef047316a4b58a236c72d26da81c
|
[
"Apache-2.0"
] | 133
|
2015-03-03T18:40:05.000Z
|
2022-03-18T13:34:26.000Z
|
import os
import subprocess
import infra.basetest
def compare_file(file1, file2):
return subprocess.call(["cmp", file1, file2])
class TestRootfsOverlay(infra.basetest.BRTest):
rootfs_overlay_path = infra.filepath("tests/core/rootfs-overlay")
config = infra.basetest.BASIC_TOOLCHAIN_CONFIG + \
infra.basetest.MINIMAL_CONFIG + \
"""
BR2_ROOTFS_OVERLAY="{0}1 {0}2"
""".format(rootfs_overlay_path)
def test_run(self):
target_file = os.path.join(self.builddir, "target", "test-file1")
overlay_file = "{}1/test-file1".format(self.rootfs_overlay_path)
ret = compare_file(overlay_file, target_file)
self.assertEqual(ret, 0)
target_file = os.path.join(self.builddir, "target", "etc", "test-file2")
overlay_file = "{}2/etc/test-file2".format(self.rootfs_overlay_path)
ret = compare_file(overlay_file, target_file)
self.assertEqual(ret, 0)
| 30.645161
| 80
| 0.677895
|
ab2053febc170faa88aeaf07e58b9f23c86d650b
| 265,449
|
py
|
Python
|
groupdocs/SignatureApi.py
|
groupdocs-legacy-sdk/python
|
80e5ef5a9a14ac4a7815c6cf933b5b2997381455
|
[
"Apache-2.0"
] | null | null | null |
groupdocs/SignatureApi.py
|
groupdocs-legacy-sdk/python
|
80e5ef5a9a14ac4a7815c6cf933b5b2997381455
|
[
"Apache-2.0"
] | null | null | null |
groupdocs/SignatureApi.py
|
groupdocs-legacy-sdk/python
|
80e5ef5a9a14ac4a7815c6cf933b5b2997381455
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""
Copyright 2012 GroupDocs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
"""
import sys
import os
from models import *
from groupdocs.FileStream import FileStream
from groupdocs.ApiClient import ApiException
class SignatureApi(object):
def __init__(self, apiClient):
self.apiClient = apiClient
self.__basePath = "https://api.groupdocs.com/v2.0"
@property
def basePath(self):
return self.__basePath
@basePath.setter
def basePath(self, value):
self.__basePath = value
def GetSignatureTemplateRecipients(self, userId, templateGuid, **kwargs):
"""Get template recipients
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
Returns: SignatureTemplateRecipientsResponse
"""
if( userId == None or templateGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureTemplateRecipients" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}/recipients'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplateRecipientsResponse')
return responseObject
def DeleteSignatureTemplateRecipient(self, userId, templateGuid, recipientGuid, **kwargs):
"""Remove recipient from template
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
recipientGuid, str: Recipient GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or templateGuid == None or recipientGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid', 'recipientGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteSignatureTemplateRecipient" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}/recipients/{recipientGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def ModifySignatureTemplateRecipient(self, userId, templateGuid, recipientGuid, nickname, roleGuid, **kwargs):
"""Update template recipient
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
recipientGuid, str: Recipient GUID (required)
nickname, str: Nickname of the recipient (required)
roleGuid, str: Role GUID (required)
order, str: Display order of the recipient (optional)
Returns: SignatureTemplateRecipientResponse
"""
if( userId == None or templateGuid == None or recipientGuid == None or nickname == None or roleGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid', 'recipientGuid', 'nickname', 'roleGuid', 'order']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method ModifySignatureTemplateRecipient" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}/recipient/{recipientGuid}?nickname={nickname}&role={roleGuid}&order={order}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('nickname' in params):
queryParams['nickname'] = self.apiClient.toPathValue(params['nickname'])
if ('roleGuid' in params):
queryParams['role'] = self.apiClient.toPathValue(params['roleGuid'])
if ('order' in params):
queryParams['order'] = self.apiClient.toPathValue(params['order'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplateRecipientResponse')
return responseObject
def AddSignatureTemplateDocument(self, userId, templateGuid, documentGuid, **kwargs):
"""Add document to template
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
documentGuid, str: Document GUID (required)
order, int: Display order of the document (optional)
parseFields, bool: Try to parse fields in document (optional)
Returns: SignatureTemplateDocumentResponse
"""
if( userId == None or templateGuid == None or documentGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid', 'documentGuid', 'order', 'parseFields']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method AddSignatureTemplateDocument" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}/document/{documentGuid}?parseFields={parseFields}&order={order}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('order' in params):
queryParams['order'] = self.apiClient.toPathValue(params['order'])
if ('parseFields' in params):
queryParams['parseFields'] = self.apiClient.toPathValue(params['parseFields'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplateDocumentResponse')
return responseObject
def GetSignatureTemplateDocuments(self, userId, templateGuid, **kwargs):
"""Get documents in template
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
Returns: SignatureTemplateDocumentsResponse
"""
if( userId == None or templateGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureTemplateDocuments" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}/documents'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplateDocumentsResponse')
return responseObject
def DeleteSignatureTemplateDocument(self, userId, templateGuid, documentGuid, **kwargs):
"""Remove document from template
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
documentGuid, str: Document GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or templateGuid == None or documentGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid', 'documentGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteSignatureTemplateDocument" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}/documents/{documentGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def AddSignatureTemplateField(self, userId, templateGuid, documentGuid, recipientGuid, fieldGuid, **kwargs):
"""Add signature template field
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
documentGuid, str: Document GUID (required)
recipientGuid, str: Recipient GUID (required)
fieldGuid, str: Field GUID (required)
body, SignatureTemplateFieldSettingsInfo: Settings of the field (optional)
Returns: SignatureTemplateFieldResponse
"""
if( userId == None or templateGuid == None or documentGuid == None or recipientGuid == None or fieldGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid', 'documentGuid', 'recipientGuid', 'fieldGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method AddSignatureTemplateField" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}/documents/{documentGuid}/recipient/{recipientGuid}/field/{fieldGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplateFieldResponse')
return responseObject
def AssignSignatureTemplateField(self, userId, templateGuid, documentGuid, fieldGuid, **kwargs):
"""Assign signature template field
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
documentGuid, str: Document GUID (required)
fieldGuid, str: Field GUID (required)
body, SignatureTemplateAssignFieldSettingsInfo: Settings of the field (optional)
Returns: SignatureTemplateFieldResponse
"""
if( userId == None or templateGuid == None or documentGuid == None or fieldGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid', 'documentGuid', 'fieldGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method AssignSignatureTemplateField" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}/documents/{documentGuid}/field/{fieldGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplateFieldResponse')
return responseObject
def ModifySignatureTemplateField(self, userId, templateGuid, documentGuid, fieldGuid, **kwargs):
"""Modify signature template field
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
documentGuid, str: Document GUID (required)
fieldGuid, str: Field GUID (required)
body, SignatureTemplateFieldSettingsInfo: Settings of the field (optional)
Returns: SignatureTemplateFieldResponse
"""
if( userId == None or templateGuid == None or documentGuid == None or fieldGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid', 'documentGuid', 'fieldGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method ModifySignatureTemplateField" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}/documents/{documentGuid}/field/{fieldGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplateFieldResponse')
return responseObject
def DeleteSignatureTemplateFieldLocation(self, userId, templateGuid, fieldGuid, locationGuid, **kwargs):
"""Delete signature template field location
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
fieldGuid, str: Field GUID (required)
locationGuid, str: Field location GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or templateGuid == None or fieldGuid == None or locationGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid', 'fieldGuid', 'locationGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteSignatureTemplateFieldLocation" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}/fields/{fieldGuid}/locations/{locationGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
if ('locationGuid' in params):
replacement = str(self.apiClient.toPathValue(params['locationGuid']))
resourcePath = resourcePath.replace('{' + 'locationGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def ModifySignatureTemplateFieldLocation(self, userId, templateGuid, documentGuid, recipientGuid, fieldGuid, locationGuid, **kwargs):
"""Modify signature template field location
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
documentGuid, str: Document GUID (required)
recipientGuid, str: Recipient GUID (required)
fieldGuid, str: Field GUID (required)
locationGuid, str: Field location GUID (required)
body, SignatureTemplateFieldLocationSettingsInfo: Settings of the field location (optional)
Returns: SignatureTemplateFieldResponse
"""
if( userId == None or templateGuid == None or documentGuid == None or recipientGuid == None or fieldGuid == None or locationGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid', 'documentGuid', 'recipientGuid', 'fieldGuid', 'locationGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method ModifySignatureTemplateFieldLocation" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}/documents/{documentGuid}/recipient/{recipientGuid}/fields/{fieldGuid}/locations/{locationGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
if ('locationGuid' in params):
replacement = str(self.apiClient.toPathValue(params['locationGuid']))
resourcePath = resourcePath.replace('{' + 'locationGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplateFieldResponse')
return responseObject
def GetSignatureTemplateFields(self, userId, templateGuid, **kwargs):
"""Get template fields
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
documentGuid, str: Document GUID (optional)
recipientGuid, str: Recipient GUID (optional)
Returns: SignatureTemplateFieldsResponse
"""
if( userId == None or templateGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid', 'documentGuid', 'recipientGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureTemplateFields" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}/fields?document={documentGuid}&recipient={recipientGuid}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('documentGuid' in params):
queryParams['document'] = self.apiClient.toPathValue(params['documentGuid'])
if ('recipientGuid' in params):
queryParams['recipient'] = self.apiClient.toPathValue(params['recipientGuid'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplateFieldsResponse')
return responseObject
def DeleteSignatureTemplateField(self, userId, templateGuid, fieldGuid, **kwargs):
"""Delete signature template field
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
fieldGuid, str: Field GUID (required)
Returns: SignatureTemplateResponse
"""
if( userId == None or templateGuid == None or fieldGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid', 'fieldGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteSignatureTemplateField" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}/fields/{fieldGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplateResponse')
return responseObject
def GetSignatureTemplateResources(self, userId, **kwargs):
"""Get template resources
Args:
userId, str: User GUID (required)
Returns: SignatureTemplateResourcesResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureTemplateResources" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/resources'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplateResourcesResponse')
return responseObject
def RenameSignatureTemplateDocument(self, userId, templateGuid, documentGuid, **kwargs):
"""Rename signature template document
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
documentGuid, str: Template Document GUID (required)
newName, str: New name of the document (optional)
Returns: SignatureTemplateDocumentResponse
"""
if( userId == None or templateGuid == None or documentGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid', 'documentGuid', 'newName']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method RenameSignatureTemplateDocument" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}/document/{documentGuid}?newName={newName}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('newName' in params):
queryParams['newName'] = self.apiClient.toPathValue(params['newName'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplateDocumentResponse')
return responseObject
def GetContacts(self, userId, **kwargs):
"""Get contacts
Args:
userId, str: User GUID (required)
page, int: Page number (optional)
records, int: Records count to be returned (optional)
firstName, str: Filter by firstName (optional)
lastName, str: Filter by lastName (optional)
email, str: Filter by email (optional)
Returns: SignatureContactsResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'page', 'records', 'firstName', 'lastName', 'email']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetContacts" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/contacts?firstName={firstName}&lastName={lastName}&email={email}&records={records}&page={page}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('page' in params):
queryParams['page'] = self.apiClient.toPathValue(params['page'])
if ('records' in params):
queryParams['records'] = self.apiClient.toPathValue(params['records'])
if ('firstName' in params):
queryParams['firstName'] = self.apiClient.toPathValue(params['firstName'])
if ('lastName' in params):
queryParams['lastName'] = self.apiClient.toPathValue(params['lastName'])
if ('email' in params):
queryParams['email'] = self.apiClient.toPathValue(params['email'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureContactsResponse')
return responseObject
def AddContact(self, userId, body, **kwargs):
"""Add contact
Args:
userId, str: User GUID (required)
body, SignatureContactSettingsInfo: Contact data (required)
Returns: SignatureContactResponse
"""
if( userId == None or body == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method AddContact" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/contact'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureContactResponse')
return responseObject
def ModifyContact(self, userId, contactGuid, **kwargs):
"""Update contact
Args:
userId, str: User GUID (required)
contactGuid, str: Contact GUID (required)
body, SignatureContactSettingsInfo: Contact data (optional)
Returns: SignatureContactResponse
"""
if( userId == None or contactGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'contactGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method ModifyContact" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/contacts/{contactGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('contactGuid' in params):
replacement = str(self.apiClient.toPathValue(params['contactGuid']))
resourcePath = resourcePath.replace('{' + 'contactGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureContactResponse')
return responseObject
def DeleteContact(self, userId, contactGuid, **kwargs):
"""Delete contact
Args:
userId, str: User GUID (required)
contactGuid, str: Contact GUID (required)
Returns: SignatureContactResponse
"""
if( userId == None or contactGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'contactGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteContact" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/contacts/{contactGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('contactGuid' in params):
replacement = str(self.apiClient.toPathValue(params['contactGuid']))
resourcePath = resourcePath.replace('{' + 'contactGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureContactResponse')
return responseObject
def ImportContacts(self, userId, **kwargs):
"""Import contacts
Args:
userId, str: User GUID (required)
body, List[SignatureContactSettingsInfo]: Array of SignatureContactSettingsInfo (optional)
Returns: SignatureContactsImportResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method ImportContacts" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/contacts'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureContactsImportResponse')
return responseObject
def AddContactIntegration(self, userId, **kwargs):
"""Add Contact Integration Authorization
Args:
userId, str: User GUID (required)
body, SignatureContactIntegrationSettings: Authorization settings (optional)
Returns: SignatureStatusResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method AddContactIntegration" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/integration'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def SignDocument(self, userId, **kwargs):
"""Sign document
Args:
userId, str: User GUID (required)
body, SignatureSignDocumentSettingsInfo: Settings of the signing document (optional)
Returns: SignatureSignDocumentResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method SignDocument" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/sign'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureSignDocumentResponse')
return responseObject
def GetSignDocumentStatus(self, userId, jobGuid, **kwargs):
"""Get sign documents status
Args:
userId, str: User GUID (required)
jobGuid, str: Job GUID (required)
Returns: SignatureSignDocumentStatusResponse
"""
if( userId == None or jobGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'jobGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignDocumentStatus" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/documents/{jobGuid}/status'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('jobGuid' in params):
replacement = str(self.apiClient.toPathValue(params['jobGuid']))
resourcePath = resourcePath.replace('{' + 'jobGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureSignDocumentStatusResponse')
return responseObject
def ArchiveSignatureEnvelope(self, userId, envelopeGuid, **kwargs):
"""Archive envelope
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or envelopeGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method ArchiveSignatureEnvelope" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/archive'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def GetEnvelopeAuditLogs(self, userId, envelopeGuid, **kwargs):
"""Get envelope audit logs
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
Returns: SignatureEnvelopeAuditLogsResponse
"""
if( userId == None or envelopeGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetEnvelopeAuditLogs" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/logs'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeAuditLogsResponse')
return responseObject
def CreateSignatureEnvelope(self, userId, **kwargs):
"""Create signature envelope
Args:
userId, str: User GUID (required)
name, str: Envelope name (optional)
templateGuid, str: A templateGuid of the template which will be used to created the new envelope (optional)
envelopeGuid, str: A envelopeGuid of the envelope which will be used to created the new envelope (optional)
documentGuid, str: A documentGuid of the document which will be added to the new created envelope (optional)
parseFields, bool: Try to parse fields in document (optional)
body, SignatureEnvelopeSettingsInfo: Settings of the new envelope (optional)
Returns: SignatureEnvelopeResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'name', 'templateGuid', 'envelopeGuid', 'documentGuid', 'parseFields', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method CreateSignatureEnvelope" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelope?name={name}&templateId={templateGuid}&envelopeId={envelopeGuid}&documentId={documentGuid}&parseFields={parseFields}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('name' in params):
queryParams['name'] = self.apiClient.toPathValue(params['name'])
if ('templateGuid' in params):
queryParams['templateId'] = self.apiClient.toPathValue(params['templateGuid'])
if ('envelopeGuid' in params):
queryParams['envelopeId'] = self.apiClient.toPathValue(params['envelopeGuid'])
if ('documentGuid' in params):
queryParams['documentId'] = self.apiClient.toPathValue(params['documentGuid'])
if ('parseFields' in params):
queryParams['parseFields'] = self.apiClient.toPathValue(params['parseFields'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeResponse')
return responseObject
def DeclineEnvelope(self, userId, envelopeGuid, recipientGuid, **kwargs):
"""Decline envelope
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
recipientGuid, str: Recipient GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or envelopeGuid == None or recipientGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'recipientGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeclineEnvelope" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/recipient/{recipientGuid}/decline'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def DelegateEnvelopeRecipient(self, userId, envelopeGuid, recipientGuid, recipientEmail, recipientFirstName, recipientLastName, **kwargs):
"""Delegate envelope recipient
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
recipientGuid, str: Recipient GUID (required)
recipientEmail, str: Delegated recipient email (required)
recipientFirstName, str: Delegated recipient first name (required)
recipientLastName, str: Delegated recipient last name (required)
Returns: SignatureStatusResponse
"""
if( userId == None or envelopeGuid == None or recipientGuid == None or recipientEmail == None or recipientFirstName == None or recipientLastName == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'recipientGuid', 'recipientEmail', 'recipientFirstName', 'recipientLastName']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DelegateEnvelopeRecipient" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/recipient/{recipientGuid}/delegate?email={recipientEmail}&firstname={recipientFirstName}&lastname={recipientLastName}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('recipientEmail' in params):
queryParams['email'] = self.apiClient.toPathValue(params['recipientEmail'])
if ('recipientFirstName' in params):
queryParams['firstname'] = self.apiClient.toPathValue(params['recipientFirstName'])
if ('recipientLastName' in params):
queryParams['lastname'] = self.apiClient.toPathValue(params['recipientLastName'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def DeleteSignatureEnvelope(self, userId, envelopeGuid, **kwargs):
"""Delete signature envelope
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or envelopeGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteSignatureEnvelope" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def AddSignatureEnvelopeDocument(self, userId, envelopeGuid, documentGuid, **kwargs):
"""Add document in envelope
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
documentGuid, str: Document GUID (required)
order, int: Document order (optional)
parseFields, bool: Try to parse fields in document (optional)
Returns: SignatureEnvelopeDocumentResponse
"""
if( userId == None or envelopeGuid == None or documentGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'documentGuid', 'order', 'parseFields']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method AddSignatureEnvelopeDocument" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/document/{documentGuid}?parseFields={parseFields}&order={order}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('order' in params):
queryParams['order'] = self.apiClient.toPathValue(params['order'])
if ('parseFields' in params):
queryParams['parseFields'] = self.apiClient.toPathValue(params['parseFields'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeDocumentResponse')
return responseObject
def GetSignedEnvelopeDocument(self, userId, envelopeGuid, documentGuid, **kwargs):
"""Get signed envelope document
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
documentGuid, str: Document GUID (required)
Returns: stream
"""
if( userId == None or envelopeGuid == None or documentGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'documentGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignedEnvelopeDocument" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/document/{documentGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
return self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams, FileStream)
def DeleteSignatureEnvelopeDocument(self, userId, envelopeGuid, documentGuid, **kwargs):
"""Delete document from envelope
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
documentGuid, str: Document GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or envelopeGuid == None or documentGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'documentGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteSignatureEnvelopeDocument" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/documents/{documentGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def GetSignatureEnvelopeDocuments(self, userId, envelopeGuid, **kwargs):
"""Get documents in envelope
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
Returns: SignatureEnvelopeDocumentsResponse
"""
if( userId == None or envelopeGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureEnvelopeDocuments" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/documents'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeDocumentsResponse')
return responseObject
def GetSignedEnvelopeDocuments(self, userId, envelopeGuid, **kwargs):
"""Get signed envelope documents
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
Returns: stream
"""
if( userId == None or envelopeGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignedEnvelopeDocuments" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/documents/get'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
return self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams, FileStream)
def AddSignatureEnvelopeField(self, userId, envelopeGuid, documentGuid, recipientGuid, fieldGuid, **kwargs):
"""Add signature field for document in envelope
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
documentGuid, str: Document GUID (required)
recipientGuid, str: Recipient GUID (required)
fieldGuid, str: Field GUID (required)
body, SignatureEnvelopeFieldSettingsInfo: Settings of the field (optional)
Returns: SignatureEnvelopeFieldsResponse
"""
if( userId == None or envelopeGuid == None or documentGuid == None or recipientGuid == None or fieldGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'documentGuid', 'recipientGuid', 'fieldGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method AddSignatureEnvelopeField" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/documents/{documentGuid}/recipient/{recipientGuid}/field/{fieldGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeFieldsResponse')
return responseObject
def AssignSignatureEnvelopeField(self, userId, envelopeGuid, documentGuid, fieldGuid, **kwargs):
"""Assign signature envelope field
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
documentGuid, str: Document GUID (required)
fieldGuid, str: Field GUID (required)
body, SignatureEnvelopeAssignFieldSettingsInfo: Settings of the field (optional)
Returns: SignatureEnvelopeFieldResponse
"""
if( userId == None or envelopeGuid == None or documentGuid == None or fieldGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'documentGuid', 'fieldGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method AssignSignatureEnvelopeField" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/documents/{documentGuid}/field/{fieldGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeFieldResponse')
return responseObject
def FillEnvelopeField(self, userId, envelopeGuid, documentGuid, recipientGuid, fieldGuid, **kwargs):
"""Fill envelope field
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
documentGuid, str: Document GUID (required)
recipientGuid, str: Recipient GUID (required)
fieldGuid, str: Field GUID (required)
signatureGuid, str: SignatureId GUID (optional)
body, stream: Data to be placed in field (optional)
Returns: SignatureEnvelopeFieldResponse
"""
if( userId == None or envelopeGuid == None or documentGuid == None or recipientGuid == None or fieldGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'documentGuid', 'recipientGuid', 'fieldGuid', 'signatureGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method FillEnvelopeField" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/documents/{documentGuid}/recipient/{recipientGuid}/field/{fieldGuid}?signatureId={signatureGuid}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('signatureGuid' in params):
queryParams['signatureId'] = self.apiClient.toPathValue(params['signatureGuid'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeFieldResponse')
return responseObject
def ModifySignatureEnvelopeFieldLocation(self, userId, envelopeGuid, documentGuid, recipientGuid, fieldGuid, locationGuid, **kwargs):
"""Modify signature envelope field location
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
documentGuid, str: Document GUID (required)
recipientGuid, str: Recipient GUID (required)
fieldGuid, str: Field GUID (required)
locationGuid, str: Field location GUID (required)
body, SignatureEnvelopeFieldLocationSettingsInfo: Settings of the field location (optional)
Returns: SignatureEnvelopeFieldResponse
"""
if( userId == None or envelopeGuid == None or documentGuid == None or recipientGuid == None or fieldGuid == None or locationGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'documentGuid', 'recipientGuid', 'fieldGuid', 'locationGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method ModifySignatureEnvelopeFieldLocation" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/documents/{documentGuid}/recipient/{recipientGuid}/fields/{fieldGuid}/locations/{locationGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
if ('locationGuid' in params):
replacement = str(self.apiClient.toPathValue(params['locationGuid']))
resourcePath = resourcePath.replace('{' + 'locationGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeFieldResponse')
return responseObject
def DeleteSignatureEnvelopeFieldLocation(self, userId, envelopeGuid, fieldGuid, locationGuid, **kwargs):
"""Remove signature envelope field location
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
fieldGuid, str: Field GUID (required)
locationGuid, str: Field location GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or envelopeGuid == None or fieldGuid == None or locationGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'fieldGuid', 'locationGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteSignatureEnvelopeFieldLocation" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/fields/{fieldGuid}/locations/{locationGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
if ('locationGuid' in params):
replacement = str(self.apiClient.toPathValue(params['locationGuid']))
resourcePath = resourcePath.replace('{' + 'locationGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def ModifySignatureEnvelopeField(self, userId, envelopeGuid, documentGuid, fieldGuid, **kwargs):
"""Modify signature envelope field
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
documentGuid, str: Document GUID (required)
fieldGuid, str: Field GUID (required)
body, SignatureEnvelopeFieldSettingsInfo: Settings of the field (optional)
Returns: SignatureEnvelopeFieldResponse
"""
if( userId == None or envelopeGuid == None or documentGuid == None or fieldGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'documentGuid', 'fieldGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method ModifySignatureEnvelopeField" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/documents/{documentGuid}/field/{fieldGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeFieldResponse')
return responseObject
def DeleteSignatureEnvelopeField(self, userId, envelopeGuid, fieldGuid, **kwargs):
"""Delete signature envelope field
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
fieldGuid, str: Field GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or envelopeGuid == None or fieldGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'fieldGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteSignatureEnvelopeField" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/fields/{fieldGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def GetSignatureEnvelopeFields(self, userId, envelopeGuid, **kwargs):
"""Get signature field for document in envelope per recipient
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
documentGuid, str: Document GUID (optional)
recipientGuid, str: Recipient GUID (optional)
Returns: SignatureEnvelopeFieldsResponse
"""
if( userId == None or envelopeGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'documentGuid', 'recipientGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureEnvelopeFields" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/fields?document={documentGuid}&recipient={recipientGuid}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('documentGuid' in params):
queryParams['document'] = self.apiClient.toPathValue(params['documentGuid'])
if ('recipientGuid' in params):
queryParams['recipient'] = self.apiClient.toPathValue(params['recipientGuid'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeFieldsResponse')
return responseObject
def GetSignatureEnvelope(self, userId, envelopeGuid, **kwargs):
"""Get signature envelope
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
Returns: SignatureEnvelopeResponse
"""
if( userId == None or envelopeGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureEnvelope" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeResponse')
return responseObject
def ModifySignatureEnvelope(self, userId, envelopeGuid, **kwargs):
"""Modify signature envelope
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
body, SignatureEnvelopeSettingsInfo: Settings of the envelope (optional)
Returns: SignatureEnvelopeResponse
"""
if( userId == None or envelopeGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method ModifySignatureEnvelope" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeResponse')
return responseObject
def AddSignatureEnvelopeRecipient(self, userId, envelopeGuid, recipientEmail, recipientFirstName, recipientLastName, roleGuid, **kwargs):
"""Add signature envelope recipient
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
recipientEmail, str: Recipient email (required)
recipientFirstName, str: Recipient first name (required)
recipientLastName, str: Recipient last name (required)
roleGuid, str: Recipient role GUID (required)
order, int: Recipient order (optional)
Returns: SignatureEnvelopeRecipientResponse
"""
if( userId == None or envelopeGuid == None or recipientEmail == None or recipientFirstName == None or recipientLastName == None or roleGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'recipientEmail', 'recipientFirstName', 'recipientLastName', 'roleGuid', 'order']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method AddSignatureEnvelopeRecipient" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/recipient?email={recipientEmail}&firstname={recipientFirstName}&lastname={recipientLastName}&role={roleGuid}&order={order}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('recipientEmail' in params):
queryParams['email'] = self.apiClient.toPathValue(params['recipientEmail'])
if ('recipientFirstName' in params):
queryParams['firstname'] = self.apiClient.toPathValue(params['recipientFirstName'])
if ('recipientLastName' in params):
queryParams['lastname'] = self.apiClient.toPathValue(params['recipientLastName'])
if ('roleGuid' in params):
queryParams['role'] = self.apiClient.toPathValue(params['roleGuid'])
if ('order' in params):
queryParams['order'] = self.apiClient.toPathValue(params['order'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeRecipientResponse')
return responseObject
def ModifySignatureEnvelopeRecipient(self, userId, envelopeGuid, recipientGuid, recipientEmail, recipientFirstName, recipientLastName, roleGuid, **kwargs):
"""Modify signature envelope recipient
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
recipientGuid, str: Recipient GUID (required)
recipientEmail, str: Recipient email (required)
recipientFirstName, str: Recipient first name (required)
recipientLastName, str: Recipient last name (required)
roleGuid, str: Recipient role GUID (required)
order, int: Recipient order (optional)
Returns: SignatureEnvelopeRecipientResponse
"""
if( userId == None or envelopeGuid == None or recipientGuid == None or recipientEmail == None or recipientFirstName == None or recipientLastName == None or roleGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'recipientGuid', 'recipientEmail', 'recipientFirstName', 'recipientLastName', 'roleGuid', 'order']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method ModifySignatureEnvelopeRecipient" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/recipient/{recipientGuid}?email={recipientEmail}&firstname={recipientFirstName}&lastname={recipientLastName}&role={roleGuid}&order={order}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('recipientEmail' in params):
queryParams['email'] = self.apiClient.toPathValue(params['recipientEmail'])
if ('recipientFirstName' in params):
queryParams['firstname'] = self.apiClient.toPathValue(params['recipientFirstName'])
if ('recipientLastName' in params):
queryParams['lastname'] = self.apiClient.toPathValue(params['recipientLastName'])
if ('roleGuid' in params):
queryParams['role'] = self.apiClient.toPathValue(params['roleGuid'])
if ('order' in params):
queryParams['order'] = self.apiClient.toPathValue(params['order'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeRecipientResponse')
return responseObject
def DeleteSignatureEnvelopeRecipient(self, userId, envelopeGuid, recipientGuid, **kwargs):
"""Delete signature envelope recipient
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
recipientGuid, str: Recipient GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or envelopeGuid == None or recipientGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'recipientGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteSignatureEnvelopeRecipient" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/recipients/{recipientGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def GetSignatureEnvelopeRecipients(self, userId, envelopeGuid, **kwargs):
"""Get signature envelope recipients
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
Returns: SignatureEnvelopeRecipientsResponse
"""
if( userId == None or envelopeGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureEnvelopeRecipients" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/recipients'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeRecipientsResponse')
return responseObject
def RenameSignatureEnvelope(self, userId, envelopeGuid, name, **kwargs):
"""Rename signature envelope
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
name, str: New envelope name (required)
Returns: SignatureEnvelopeResponse
"""
if( userId == None or envelopeGuid == None or name == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'name']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method RenameSignatureEnvelope" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}?name={name}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('name' in params):
queryParams['name'] = self.apiClient.toPathValue(params['name'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeResponse')
return responseObject
def RestartExpiredSignatureEnvelope(self, userId, envelopeGuid, **kwargs):
"""Restart expired envelope
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or envelopeGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method RestartExpiredSignatureEnvelope" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/restart'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def SignatureEnvelopeSend(self, userId, envelopeGuid, **kwargs):
"""Send envelope
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
body, WebhookInfo: Webhook Callback Url (optional)
Returns: SignatureEnvelopeSendResponse
"""
if( userId == None or envelopeGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method SignatureEnvelopeSend" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/send'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeSendResponse')
return responseObject
def SignEnvelope(self, userId, envelopeGuid, recipientGuid, **kwargs):
"""Sign envelope
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
recipientGuid, str: Recipient GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or envelopeGuid == None or recipientGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'recipientGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method SignEnvelope" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/recipient/{recipientGuid}/sign'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def GetSignatureEnvelopes(self, userId, **kwargs):
"""Get signature envelopes
Args:
userId, str: User GUID (required)
statusId, int: Filter envelopes by statusId (optional)
page, int: Show records for page number (optional)
records, int: Show records count (optional)
originalDocumentMD5, str: Filter envelopes by original document md5 checksum (optional)
recipientEmail, str: Filter envelopes by recipient email (optional)
datetime, str: Filter envelopes by date (optional)
name, str: Filter envelopes by name (optional)
Returns: SignatureEnvelopesResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'statusId', 'page', 'records', 'originalDocumentMD5', 'recipientEmail', 'datetime', 'name']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureEnvelopes" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes?statusId={statusId}&records={records}&page={page}&document={originalDocumentMD5}&recipient={recipientEmail}&date={date}&name={name}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('statusId' in params):
queryParams['statusId'] = self.apiClient.toPathValue(params['statusId'])
if ('page' in params):
queryParams['page'] = self.apiClient.toPathValue(params['page'])
if ('records' in params):
queryParams['records'] = self.apiClient.toPathValue(params['records'])
if ('originalDocumentMD5' in params):
queryParams['document'] = self.apiClient.toPathValue(params['originalDocumentMD5'])
if ('recipientEmail' in params):
queryParams['recipient'] = self.apiClient.toPathValue(params['recipientEmail'])
if ('datetime' in params):
queryParams['date'] = self.apiClient.toPathValue(params['datetime'])
if ('name' in params):
queryParams['name'] = self.apiClient.toPathValue(params['name'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopesResponse')
return responseObject
def GetSignatureEnvelopeResources(self, userId, **kwargs):
"""Get envelope resources
Args:
userId, str: User GUID (required)
statusIds, str: Envelope status identifier - comma separated list (optional)
Returns: SignatureEnvelopeResourcesResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'statusIds']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureEnvelopeResources" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/resources?statusIds={statusIds}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('statusIds' in params):
queryParams['statusIds'] = self.apiClient.toPathValue(params['statusIds'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeResourcesResponse')
return responseObject
def GetRecipientSignatureEnvelopes(self, userId, **kwargs):
"""Get signature envelopes where the user is recipient
Args:
userId, str: User GUID (required)
statusId, str: Filter envelopes by statusId (optional)
page, int: Show records for page number (optional)
records, int: Show records count (optional)
Returns: SignatureEnvelopesResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'statusId', 'page', 'records']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetRecipientSignatureEnvelopes" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/recipient?statusId={statusId}&records={records}&page={page}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('statusId' in params):
queryParams['statusId'] = self.apiClient.toPathValue(params['statusId'])
if ('page' in params):
queryParams['page'] = self.apiClient.toPathValue(params['page'])
if ('records' in params):
queryParams['records'] = self.apiClient.toPathValue(params['records'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopesResponse')
return responseObject
def RenameSignatureEnvelopeDocument(self, userId, envelopeGuid, documentGuid, **kwargs):
"""Rename signature envelope document
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
documentGuid, str: Document GUID (required)
newName, str: New name of the document (optional)
Returns: SignatureEnvelopeDocumentResponse
"""
if( userId == None or envelopeGuid == None or documentGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid', 'documentGuid', 'newName']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method RenameSignatureEnvelopeDocument" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/document/{documentGuid}?newName={newName}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('newName' in params):
queryParams['newName'] = self.apiClient.toPathValue(params['newName'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeDocumentResponse')
return responseObject
def CancelSignatureEnvelope(self, userId, envelopeGuid, **kwargs):
"""Cancel envelope
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or envelopeGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method CancelSignatureEnvelope" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/cancel'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def RetrySignEnvelope(self, userId, envelopeGuid, **kwargs):
"""Retry sign envelope
Args:
userId, str: User GUID (required)
envelopeGuid, str: Envelope GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or envelopeGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'envelopeGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method RetrySignEnvelope" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/envelopes/{envelopeGuid}/retry'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def GetFieldsList(self, userId, **kwargs):
"""Get signature fields
Args:
userId, str: User GUID (required)
fieldGuid, str: Filter fields by id (optional)
Returns: SignatureFieldsResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'fieldGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetFieldsList" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/fields?id={fieldGuid}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('fieldGuid' in params):
queryParams['id'] = self.apiClient.toPathValue(params['fieldGuid'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFieldsResponse')
return responseObject
def CreateSignatureField(self, userId, **kwargs):
"""Create signature field
Args:
userId, str: User GUID (required)
body, SignatureFieldSettingsInfo: Settings of the new field (optional)
Returns: SignatureFieldResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method CreateSignatureField" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/field'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFieldResponse')
return responseObject
def ModifySignatureField(self, userId, fieldGuid, **kwargs):
"""Modify signature field
Args:
userId, str: User GUID (required)
fieldGuid, str: Field GUID (required)
body, SignatureFieldSettingsInfo: Settings of the field (optional)
Returns: SignatureFieldResponse
"""
if( userId == None or fieldGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'fieldGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method ModifySignatureField" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/fields/{fieldGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFieldResponse')
return responseObject
def DeleteSignatureField(self, userId, fieldGuid, **kwargs):
"""Delete signature field
Args:
userId, str: User GUID (required)
fieldGuid, str: Field GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or fieldGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'fieldGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteSignatureField" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/fields/{fieldGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def ArchiveSignatureForm(self, userId, formGuid, **kwargs):
"""Archive signature form
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or formGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method ArchiveSignatureForm" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}/archive'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def CompleteSignatureForm(self, userId, formGuid, **kwargs):
"""Complete signature form
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or formGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method CompleteSignatureForm" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}/complete'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def CreateSignatureForm(self, userId, **kwargs):
"""Create signature form
Args:
userId, str: User GUID (required)
name, str: Form name (optional)
templateGuid, str: A templateGuid of the template which will be used to created the new form (optional)
assemblyGuid, str: A guid of the assembly which will be used to created the new form (optional)
formGuid, str: A formGuid of the form which will be used to created the new form (optional)
body, SignatureFormSettingsInfo: Settings of the new form (optional)
Returns: SignatureFormResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'name', 'templateGuid', 'assemblyGuid', 'formGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method CreateSignatureForm" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/form?name={name}&templateId={templateGuid}&assemblyId={assemblyGuid}&formId={formGuid}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('name' in params):
queryParams['name'] = self.apiClient.toPathValue(params['name'])
if ('templateGuid' in params):
queryParams['templateId'] = self.apiClient.toPathValue(params['templateGuid'])
if ('assemblyGuid' in params):
queryParams['assemblyId'] = self.apiClient.toPathValue(params['assemblyGuid'])
if ('formGuid' in params):
queryParams['formId'] = self.apiClient.toPathValue(params['formGuid'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormResponse')
return responseObject
def DeleteSignatureForm(self, userId, formGuid, **kwargs):
"""Delete signature form
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or formGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteSignatureForm" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def AddSignatureFormDocument(self, userId, formGuid, documentGuid, **kwargs):
"""Add document in form
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
documentGuid, str: Document GUID (required)
order, int: Document order (optional)
parseFields, bool: Try to parse fields in document (optional)
Returns: SignatureFormDocumentResponse
"""
if( userId == None or formGuid == None or documentGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid', 'documentGuid', 'order', 'parseFields']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method AddSignatureFormDocument" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}/document/{documentGuid}?parseFields={parseFields}&order={order}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('order' in params):
queryParams['order'] = self.apiClient.toPathValue(params['order'])
if ('parseFields' in params):
queryParams['parseFields'] = self.apiClient.toPathValue(params['parseFields'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormDocumentResponse')
return responseObject
def DeleteSignatureFormDocument(self, userId, formGuid, documentGuid, **kwargs):
"""Delete document from form
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
documentGuid, str: Document GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or formGuid == None or documentGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid', 'documentGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteSignatureFormDocument" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}/documents/{documentGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def GetSignatureFormDocuments(self, userId, formGuid, **kwargs):
"""Get documents in form
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
Returns: SignatureFormDocumentsResponse
"""
if( userId == None or formGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureFormDocuments" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}/documents'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormDocumentsResponse')
return responseObject
def AddSignatureFormField(self, userId, formGuid, documentGuid, fieldGuid, **kwargs):
"""Add signature field for document in form
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
documentGuid, str: Document GUID (required)
fieldGuid, str: Field GUID (required)
body, SignatureFormFieldSettingsInfo: Settings of the field (optional)
Returns: SignatureFormFieldResponse
"""
if( userId == None or formGuid == None or documentGuid == None or fieldGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid', 'documentGuid', 'fieldGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method AddSignatureFormField" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}/documents/{documentGuid}/field/{fieldGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormFieldResponse')
return responseObject
def ModifySignatureFormFieldLocation(self, userId, formGuid, documentGuid, fieldGuid, locationGuid, **kwargs):
"""Modify signature form field location
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
documentGuid, str: Document GUID (required)
fieldGuid, str: Field GUID (required)
locationGuid, str: Field location GUID (required)
body, SignatureFormFieldLocationSettingsInfo: Settings of the field location (optional)
Returns: SignatureFormFieldResponse
"""
if( userId == None or formGuid == None or documentGuid == None or fieldGuid == None or locationGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid', 'documentGuid', 'fieldGuid', 'locationGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method ModifySignatureFormFieldLocation" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}/documents/{documentGuid}/fields/{fieldGuid}/locations/{locationGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
if ('locationGuid' in params):
replacement = str(self.apiClient.toPathValue(params['locationGuid']))
resourcePath = resourcePath.replace('{' + 'locationGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormFieldResponse')
return responseObject
def DeleteSignatureFormFieldLocation(self, userId, formGuid, fieldGuid, locationGuid, **kwargs):
"""Remove signature form field location
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
fieldGuid, str: Field GUID (required)
locationGuid, str: Field location GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or formGuid == None or fieldGuid == None or locationGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid', 'fieldGuid', 'locationGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteSignatureFormFieldLocation" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}/fields/{fieldGuid}/locations/{locationGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
if ('locationGuid' in params):
replacement = str(self.apiClient.toPathValue(params['locationGuid']))
resourcePath = resourcePath.replace('{' + 'locationGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def ModifySignatureFormField(self, userId, formGuid, documentGuid, fieldGuid, **kwargs):
"""Modify signature form field
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
documentGuid, str: Document GUID (required)
fieldGuid, str: Field GUID (required)
body, SignatureFormFieldSettingsInfo: Settings of the field (optional)
Returns: SignatureFormFieldResponse
"""
if( userId == None or formGuid == None or documentGuid == None or fieldGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid', 'documentGuid', 'fieldGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method ModifySignatureFormField" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}/documents/{documentGuid}/field/{fieldGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormFieldResponse')
return responseObject
def DeleteSignatureFormField(self, userId, formGuid, fieldGuid, **kwargs):
"""Delete signature form field
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
fieldGuid, str: Field GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or formGuid == None or fieldGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid', 'fieldGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteSignatureFormField" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}/fields/{fieldGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def GetSignatureFormFields(self, userId, formGuid, documentGuid, **kwargs):
"""Get form fields for document in form per participant
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
documentGuid, str: Document GUID (required)
Returns: SignatureFormFieldsResponse
"""
if( userId == None or formGuid == None or documentGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid', 'documentGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureFormFields" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}/documents/{documentGuid}/fields'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormFieldsResponse')
return responseObject
def GetSignatureForm(self, userId, formGuid, **kwargs):
"""Get signature form
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
Returns: SignatureFormResponse
"""
if( userId == None or formGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureForm" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormResponse')
return responseObject
def ModifySignatureForm(self, userId, formGuid, **kwargs):
"""Modify signature form
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
body, SignatureFormSettingsInfo: Settings of the form (optional)
Returns: SignatureFormResponse
"""
if( userId == None or formGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method ModifySignatureForm" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormResponse')
return responseObject
def PublishSignatureForm(self, userId, formGuid, **kwargs):
"""Publish signature form
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
body, WebhookInfo: Webhook Callback Url (optional)
Returns: SignatureStatusResponse
"""
if( userId == None or formGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublishSignatureForm" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}/publish'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def RenameSignatureForm(self, userId, formGuid, name, **kwargs):
"""Rename signature form
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
name, str: New form name (required)
Returns: SignatureFormResponse
"""
if( userId == None or formGuid == None or name == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid', 'name']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method RenameSignatureForm" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}?new_name={name}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('name' in params):
queryParams['new_name'] = self.apiClient.toPathValue(params['name'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormResponse')
return responseObject
def UpdateSignatureFormFromTemplate(self, userId, formGuid, templateGuid, **kwargs):
"""Add signature form fields from template
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
templateGuid, str: Template GUID (required)
Returns: SignatureFormResponse
"""
if( userId == None or formGuid == None or templateGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid', 'templateGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method UpdateSignatureFormFromTemplate" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}/templates/{templateGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormResponse')
return responseObject
def GetSignatureForms(self, userId, **kwargs):
"""Get signature forms
Args:
userId, str: User GUID (required)
statusId, int: Filter forms by statusId (optional)
page, int: Show records for page number (optional)
records, int: Show records count (optional)
originalDocumentMD5, str: Filter forms by original document MD5 (optional)
datetime, str: Filter forms by date (optional)
name, str: Filter forms by name (optional)
Returns: SignatureFormsResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'statusId', 'page', 'records', 'originalDocumentMD5', 'datetime', 'name']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureForms" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms?statusId={statusId}&records={records}&page={page}&document={originalDocumentMD5}&date={date}&name={name}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('statusId' in params):
queryParams['statusId'] = self.apiClient.toPathValue(params['statusId'])
if ('page' in params):
queryParams['page'] = self.apiClient.toPathValue(params['page'])
if ('records' in params):
queryParams['records'] = self.apiClient.toPathValue(params['records'])
if ('originalDocumentMD5' in params):
queryParams['document'] = self.apiClient.toPathValue(params['originalDocumentMD5'])
if ('datetime' in params):
queryParams['date'] = self.apiClient.toPathValue(params['datetime'])
if ('name' in params):
queryParams['name'] = self.apiClient.toPathValue(params['name'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormsResponse')
return responseObject
def GetSignatureFormResources(self, userId, statusIds, **kwargs):
"""Get form resources
Args:
userId, str: User GUID (required)
statusIds, str: Form status identifier - comma separated list (required)
Returns: SignatureFormResourcesResponse
"""
if( userId == None or statusIds == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'statusIds']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureFormResources" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/resources?statusIds={statusIds}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('statusIds' in params):
queryParams['statusIds'] = self.apiClient.toPathValue(params['statusIds'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormResourcesResponse')
return responseObject
def GetSignedFormDocuments(self, userId, formGuid, **kwargs):
"""Get signed form documents
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
Returns: stream
"""
if( userId == None or formGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignedFormDocuments" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}/documents/get'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
return self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams, FileStream)
def RenameSignatureFormDocument(self, userId, formGuid, documentGuid, **kwargs):
"""Rename signature form document
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
documentGuid, str: Form Document GUID (required)
newName, str: New name of the document (optional)
Returns: SignatureFormDocumentResponse
"""
if( userId == None or formGuid == None or documentGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid', 'documentGuid', 'newName']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method RenameSignatureFormDocument" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}/document/{documentGuid}?newName={newName}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('newName' in params):
queryParams['newName'] = self.apiClient.toPathValue(params['newName'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormDocumentResponse')
return responseObject
def ModifySignatureFormDocument(self, userId, formGuid, documentGuid, **kwargs):
"""Modify signature form document
Args:
userId, str: User GUID (required)
formGuid, str: Form GUID (required)
documentGuid, str: Form Document GUID (required)
body, SignatureFormDocumentSettingsInfo: Settings of the document (optional)
Returns: SignatureFormDocumentResponse
"""
if( userId == None or formGuid == None or documentGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'formGuid', 'documentGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method ModifySignatureFormDocument" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/forms/{formGuid}/document/{documentGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormDocumentResponse')
return responseObject
def GetSignaturePredefinedLists(self, userId, **kwargs):
"""Get user predefined lists
Args:
userId, str: User GUID (required)
Returns: SignaturePredefinedListsResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignaturePredefinedLists" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/lists'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignaturePredefinedListsResponse')
return responseObject
def AddPredefinedList(self, userId, body, **kwargs):
"""Add predefined list
Args:
userId, str: User GUID (required)
body, SignaturePredefinedListSettingsInfo: List data (required)
Returns: SignaturePredefinedListResponse
"""
if( userId == None or body == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method AddPredefinedList" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/list'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignaturePredefinedListResponse')
return responseObject
def DeletePredefinedList(self, userId, listGuid, **kwargs):
"""Delete predefined list
Args:
userId, str: User GUID (required)
listGuid, str: List GUID (required)
Returns: SignaturePredefinedListResponse
"""
if( userId == None or listGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'listGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeletePredefinedList" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/lists/{listGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('listGuid' in params):
replacement = str(self.apiClient.toPathValue(params['listGuid']))
resourcePath = resourcePath.replace('{' + 'listGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignaturePredefinedListResponse')
return responseObject
def GetRolesList(self, userId, **kwargs):
"""Get signature roles
Args:
userId, str: User GUID (required)
roleGuid, str: Filter roles by GUID (optional)
Returns: SignatureRolesResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'roleGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetRolesList" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/roles?id={roleGuid}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('roleGuid' in params):
queryParams['id'] = self.apiClient.toPathValue(params['roleGuid'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureRolesResponse')
return responseObject
def CreateSignature(self, userId, name, **kwargs):
"""Create user signature
Args:
userId, str: User GUID (required)
name, str: Signature name (required)
body, SignatureSignatureSettingsInfo: Settings of the signature (optional)
Returns: SignatureSignatureResponse
"""
if( userId == None or name == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'name', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method CreateSignature" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/signature?name={name}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('name' in params):
queryParams['name'] = self.apiClient.toPathValue(params['name'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureSignatureResponse')
return responseObject
def DeleteSignature(self, userId, signatureGuid, **kwargs):
"""Delete user signature
Args:
userId, str: User GUID (required)
signatureGuid, str: Signature GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or signatureGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'signatureGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteSignature" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/signatures/{signatureGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('signatureGuid' in params):
replacement = str(self.apiClient.toPathValue(params['signatureGuid']))
resourcePath = resourcePath.replace('{' + 'signatureGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def GetSignatures(self, userId, **kwargs):
"""Get user signatures
Args:
userId, str: User GUID (required)
page, int: Show records for page number (optional)
records, int: Show records count (optional)
name, str: Filter by signature name (optional)
Returns: SignatureSignaturesResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'page', 'records', 'name']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatures" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/signatures?records={records}&page={page}&name={name}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('page' in params):
queryParams['page'] = self.apiClient.toPathValue(params['page'])
if ('records' in params):
queryParams['records'] = self.apiClient.toPathValue(params['records'])
if ('name' in params):
queryParams['name'] = self.apiClient.toPathValue(params['name'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureSignaturesResponse')
return responseObject
def GetSignatureTemplates(self, userId, **kwargs):
"""Get templates
Args:
userId, str: User GUID (required)
page, int: Page number (optional)
records, int: Records count (optional)
documentGuid, str: Fitler templates by document originalMD5 (optional)
recipientName, str: Filter templates by recipient nickname (optional)
name, str: Filter templates by signatureTemplate name (optional)
Returns: SignatureTemplatesResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'page', 'records', 'documentGuid', 'recipientName', 'name']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureTemplates" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates?records={records}&page={page}&documentGuid={documentGuid}&recipientName={recipientName}&name={name}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('page' in params):
queryParams['page'] = self.apiClient.toPathValue(params['page'])
if ('records' in params):
queryParams['records'] = self.apiClient.toPathValue(params['records'])
if ('documentGuid' in params):
queryParams['documentGuid'] = self.apiClient.toPathValue(params['documentGuid'])
if ('recipientName' in params):
queryParams['recipientName'] = self.apiClient.toPathValue(params['recipientName'])
if ('name' in params):
queryParams['name'] = self.apiClient.toPathValue(params['name'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplatesResponse')
return responseObject
def GetSignatureTemplate(self, userId, templateGuid, **kwargs):
"""Get template
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
Returns: SignatureTemplateResponse
"""
if( userId == None or templateGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureTemplate" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplateResponse')
return responseObject
def CreateSignatureTemplate(self, userId, **kwargs):
"""Create template
Args:
userId, str: User GUID (required)
name, str: Template name (optional)
templateGuid, str: Template GUID of the template that will be used to create the new template (optional)
envelopeGuid, str: Envelope GUID of the envelope that will be used to create the new template (optional)
body, SignatureTemplateSettingsInfo: Settings of the template (optional)
Returns: SignatureTemplateResponse
"""
if( userId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'name', 'templateGuid', 'envelopeGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method CreateSignatureTemplate" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/template?name={name}&templateId={templateGuid}&envelopeId={envelopeGuid}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('name' in params):
queryParams['name'] = self.apiClient.toPathValue(params['name'])
if ('templateGuid' in params):
queryParams['templateId'] = self.apiClient.toPathValue(params['templateGuid'])
if ('envelopeGuid' in params):
queryParams['envelopeId'] = self.apiClient.toPathValue(params['envelopeGuid'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplateResponse')
return responseObject
def ModifySignatureTemplate(self, userId, templateGuid, **kwargs):
"""Modify template
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
body, SignatureTemplateSettingsInfo: Settings of the template (optional)
Returns: SignatureTemplateResponse
"""
if( userId == None or templateGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method ModifySignatureTemplate" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplateResponse')
return responseObject
def RenameSignatureTemplate(self, userId, templateGuid, name, **kwargs):
"""Rename template
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
name, str: New template name (required)
Returns: SignatureTemplateResponse
"""
if( userId == None or templateGuid == None or name == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid', 'name']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method RenameSignatureTemplate" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}?name={name}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('name' in params):
queryParams['name'] = self.apiClient.toPathValue(params['name'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplateResponse')
return responseObject
def DeleteSignatureTemplate(self, userId, templateGuid, **kwargs):
"""Delete template
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
Returns: SignatureStatusResponse
"""
if( userId == None or templateGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteSignatureTemplate" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'DELETE'
queryParams = {}
headerParams = {}
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def AddSignatureTemplateRecipient(self, userId, templateGuid, nickname, roleGuid, **kwargs):
"""Add recipient to the template
Args:
userId, str: User GUID (required)
templateGuid, str: Template GUID (required)
nickname, str: Nickname of the recipient (required)
roleGuid, str: Role GUID (required)
order, int: Display order of the recipient (optional)
Returns: SignatureTemplateRecipientResponse
"""
if( userId == None or templateGuid == None or nickname == None or roleGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['userId', 'templateGuid', 'nickname', 'roleGuid', 'order']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method AddSignatureTemplateRecipient" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/{userId}/templates/{templateGuid}/recipient?nickname={nickname}&role={roleGuid}&order={order}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('nickname' in params):
queryParams['nickname'] = self.apiClient.toPathValue(params['nickname'])
if ('roleGuid' in params):
queryParams['role'] = self.apiClient.toPathValue(params['roleGuid'])
if ('order' in params):
queryParams['order'] = self.apiClient.toPathValue(params['order'])
if ('userId' in params):
replacement = str(self.apiClient.toPathValue(params['userId']))
resourcePath = resourcePath.replace('{' + 'userId' + '}',
replacement)
if ('templateGuid' in params):
replacement = str(self.apiClient.toPathValue(params['templateGuid']))
resourcePath = resourcePath.replace('{' + 'templateGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureTemplateRecipientResponse')
return responseObject
def PublicFillEnvelopeField(self, envelopeGuid, documentGuid, recipientGuid, fieldGuid, **kwargs):
"""Fill envelope field
Args:
envelopeGuid, str: Envelope GUID (required)
documentGuid, str: Document GUID (required)
recipientGuid, str: Recipient GUID (required)
fieldGuid, str: Field GUID (required)
body, str: Data to be placed in field (optional)
Returns: SignatureEnvelopeFieldResponse
"""
if( envelopeGuid == None or documentGuid == None or recipientGuid == None or fieldGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['envelopeGuid', 'documentGuid', 'recipientGuid', 'fieldGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublicFillEnvelopeField" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/envelopes/{envelopeGuid}/documents/{documentGuid}/recipient/{recipientGuid}/field/{fieldGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeFieldResponse')
return responseObject
def PublicSignEnvelope(self, envelopeGuid, recipientGuid, **kwargs):
"""Sign envelope
Args:
envelopeGuid, str: Envelope GUID (required)
recipientGuid, str: Recipient GUID (required)
Returns: SignatureStatusResponse
"""
if( envelopeGuid == None or recipientGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['envelopeGuid', 'recipientGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublicSignEnvelope" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/envelopes/{envelopeGuid}/recipient/{recipientGuid}/sign'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def PublicGetEnvelopeDocuments(self, envelopeGuid, recipientGuid, **kwargs):
"""Get documents in envelope
Args:
envelopeGuid, str: Envelope GUID (required)
recipientGuid, str: Recipient GUID (required)
Returns: SignatureEnvelopeDocumentsResponse
"""
if( envelopeGuid == None or recipientGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['envelopeGuid', 'recipientGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublicGetEnvelopeDocuments" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/envelopes/{envelopeGuid}/recipient/{recipientGuid}/documents'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeDocumentsResponse')
return responseObject
def PublicGetEnvelopeRecipients(self, envelopeGuid, **kwargs):
"""Get signature envelope recipients
Args:
envelopeGuid, str: Envelope GUID (required)
Returns: SignatureEnvelopeRecipientsResponse
"""
if( envelopeGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['envelopeGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublicGetEnvelopeRecipients" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/envelopes/{envelopeGuid}/recipients'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeRecipientsResponse')
return responseObject
def PublicGetSignatureEnvelopeFields(self, envelopeGuid, documentGuid, recipientGuid, **kwargs):
"""Get signature field for document in envelope per recipient
Args:
envelopeGuid, str: Envelope GUID (required)
documentGuid, str: Document GUID (required)
recipientGuid, str: Recipient GUID (required)
Returns: SignatureEnvelopeFieldsResponse
"""
if( envelopeGuid == None or documentGuid == None or recipientGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['envelopeGuid', 'documentGuid', 'recipientGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublicGetSignatureEnvelopeFields" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/envelopes/{envelopeGuid}/fields?document={documentGuid}&recipient={recipientGuid}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('documentGuid' in params):
queryParams['document'] = self.apiClient.toPathValue(params['documentGuid'])
if ('recipientGuid' in params):
queryParams['recipient'] = self.apiClient.toPathValue(params['recipientGuid'])
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeFieldsResponse')
return responseObject
def PublicGetSignatureEnvelope(self, envelopeGuid, recipientGuid, **kwargs):
"""Get signature envelope
Args:
envelopeGuid, str: Envelope GUID (required)
recipientGuid, str: Recipient GUID (required)
Returns: SignatureEnvelopeResponse
"""
if( envelopeGuid == None or recipientGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['envelopeGuid', 'recipientGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublicGetSignatureEnvelope" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/envelopes/{envelopeGuid}/recipient/{recipientGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureEnvelopeResponse')
return responseObject
def PublicGetSignedEnvelopeDocuments(self, envelopeGuid, recipientGuid, **kwargs):
"""Get signed envelope documents
Args:
envelopeGuid, str: Envelope GUID (required)
recipientGuid, str: Recipient GUID (required)
Returns: stream
"""
if( envelopeGuid == None or recipientGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['envelopeGuid', 'recipientGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublicGetSignedEnvelopeDocuments" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/envelopes/{envelopeGuid}/recipient/{recipientGuid}/documents/get'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
return self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams, FileStream)
def PublicFillSignatureForm(self, formGuid, **kwargs):
"""Fill signature form
Args:
formGuid, str: Form GUID (required)
Returns: SignatureFormParticipantResponse
"""
if( formGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['formGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublicFillSignatureForm" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/forms/{formGuid}/fill'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormParticipantResponse')
return responseObject
def PublicFillFormField(self, formGuid, documentGuid, fieldGuid, authSignature, participantIdId, **kwargs):
"""Fill form field
Args:
formGuid, str: Form GUID (required)
documentGuid, str: Document GUID (required)
fieldGuid, str: Field GUID (required)
authSignature, str: Authentication signature (required)
body, str: Data to be placed in field (optional)
participantIdId, str: Participant GUID (required)
Returns: SignatureFormFieldResponse
"""
if( formGuid == None or documentGuid == None or fieldGuid == None or authSignature == None or participantIdId == None ):
raise ApiException(400, "missing required parameters")
allParams = ['formGuid', 'documentGuid', 'fieldGuid', 'authSignature', 'body', 'participantIdId']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublicFillFormField" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/forms/{formGuid}/documents/{documentGuid}/participant/{participantGuid}/field/{fieldGuid}?participantAuthSignature={authSignature}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('authSignature' in params):
queryParams['participantAuthSignature'] = self.apiClient.toPathValue(params['authSignature'])
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
if ('participantIdId' in params):
replacement = str(self.apiClient.toPathValue(params['participantIdId']))
resourcePath = resourcePath.replace('{' + 'participantIdId' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormFieldResponse')
return responseObject
def PublicSignForm(self, formGuid, participantGuid, authSignature, **kwargs):
"""Sign Form
Args:
formGuid, str: Form GUID (required)
participantGuid, str: Participant GUID (required)
authSignature, str: Authentication signature (required)
Returns: SignatureStatusResponse
"""
if( formGuid == None or participantGuid == None or authSignature == None ):
raise ApiException(400, "missing required parameters")
allParams = ['formGuid', 'participantGuid', 'authSignature']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublicSignForm" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/forms/{formGuid}/participant/{participantGuid}/sign?participantAuthSignature={authSignature}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'PUT'
queryParams = {}
headerParams = {}
if ('authSignature' in params):
queryParams['participantAuthSignature'] = self.apiClient.toPathValue(params['authSignature'])
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
if ('participantGuid' in params):
replacement = str(self.apiClient.toPathValue(params['participantGuid']))
resourcePath = resourcePath.replace('{' + 'participantGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureStatusResponse')
return responseObject
def PublicGetSignatureForm(self, formGuid, **kwargs):
"""Get signature form
Args:
formGuid, str: Form GUID (required)
Returns: SignatureFormResponse
"""
if( formGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['formGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublicGetSignatureForm" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/forms/{formGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormResponse')
return responseObject
def PublicGetSignatureFormDocuments(self, formGuid, **kwargs):
"""Get documents in form
Args:
formGuid, str: Form GUID (required)
Returns: SignatureFormDocumentsResponse
"""
if( formGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['formGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublicGetSignatureFormDocuments" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/forms/{formGuid}/documents'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormDocumentsResponse')
return responseObject
def PublicGetSignatureFormFields(self, formGuid, documentGuid, participantGuid, **kwargs):
"""Get form fields for document in form per participant
Args:
formGuid, str: Form GUID (required)
documentGuid, str: Document GUID (required)
participantGuid, str: Participant GUID (required)
Returns: SignatureFormFieldsResponse
"""
if( formGuid == None or documentGuid == None or participantGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['formGuid', 'documentGuid', 'participantGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublicGetSignatureFormFields" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/forms/{formGuid}/fields?document={documentGuid}&participant={participantGuid}'.replace('*', '')
pos = resourcePath.find("?")
if pos != -1:
resourcePath = resourcePath[0:pos]
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('documentGuid' in params):
queryParams['document'] = self.apiClient.toPathValue(params['documentGuid'])
if ('participantGuid' in params):
queryParams['participant'] = self.apiClient.toPathValue(params['participantGuid'])
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormFieldsResponse')
return responseObject
def PublicGetSignedFormDocuments(self, formGuid, participantGuid, **kwargs):
"""Get signed form documents
Args:
formGuid, str: Form GUID (required)
participantGuid, str: Participant GUID (required)
Returns: stream
"""
if( formGuid == None or participantGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['formGuid', 'participantGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublicGetSignedFormDocuments" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/forms/{formGuid}/participant/{participantGuid}/documents/get'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
if ('participantGuid' in params):
replacement = str(self.apiClient.toPathValue(params['participantGuid']))
resourcePath = resourcePath.replace('{' + 'participantGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
return self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams, FileStream)
def GetSignatureFormParticipant(self, formGuid, participantGuid, **kwargs):
"""Get signature form participant
Args:
formGuid, str: Form GUID (required)
participantGuid, str: Participant GUID (required)
Returns: SignatureFormParticipantResponse
"""
if( formGuid == None or participantGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['formGuid', 'participantGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureFormParticipant" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/forms/{formGuid}/participants/{participantGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('formGuid' in params):
replacement = str(self.apiClient.toPathValue(params['formGuid']))
resourcePath = resourcePath.replace('{' + 'formGuid' + '}',
replacement)
if ('participantGuid' in params):
replacement = str(self.apiClient.toPathValue(params['participantGuid']))
resourcePath = resourcePath.replace('{' + 'participantGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureFormParticipantResponse')
return responseObject
def PublicSignDocument(self, documentGuid, **kwargs):
"""Sign document
Args:
documentGuid, str: Document GUID (required)
body, PublicSignatureSignDocumentSignerSettingsInfo: Settings of the signing document (optional)
Returns: SignatureSignDocumentResponse
"""
if( documentGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['documentGuid', 'body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublicSignDocument" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/documents/{documentGuid}/sign'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
if ('documentGuid' in params):
replacement = str(self.apiClient.toPathValue(params['documentGuid']))
resourcePath = resourcePath.replace('{' + 'documentGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureSignDocumentResponse')
return responseObject
def PublicVerifyDocument(self, body, **kwargs):
"""Verify
Args:
body, stream: Document to verify (required)
Returns: SignatureVerifyDocumentResponse
"""
if( body == None ):
raise ApiException(400, "missing required parameters")
allParams = ['body']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublicVerifyDocument" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/verify'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'POST'
queryParams = {}
headerParams = {}
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureVerifyDocumentResponse')
return responseObject
def PublicGetSignDocumentStatus(self, jobGuid, **kwargs):
"""Get signed document status
Args:
jobGuid, str: Job GUID (required)
Returns: SignatureSignDocumentStatusResponse
"""
if( jobGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['jobGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PublicGetSignDocumentStatus" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/documents/{jobGuid}/status'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('jobGuid' in params):
replacement = str(self.apiClient.toPathValue(params['jobGuid']))
resourcePath = resourcePath.replace('{' + 'jobGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
response = self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SignatureSignDocumentStatusResponse')
return responseObject
def GetSignatureEnvelopeFieldData(self, envelopeGuid, recipientGuid, fieldGuid, **kwargs):
"""Get signed envelope field data
Args:
envelopeGuid, str: Envelope GUID (required)
recipientGuid, str: Recipient GUID (required)
fieldGuid, str: Field GUID (required)
Returns: stream
"""
if( envelopeGuid == None or recipientGuid == None or fieldGuid == None ):
raise ApiException(400, "missing required parameters")
allParams = ['envelopeGuid', 'recipientGuid', 'fieldGuid']
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetSignatureEnvelopeFieldData" % key)
params[key] = val
del params['kwargs']
resourcePath = '/signature/public/envelopes/{envelopeGuid}/fields/recipient/{recipientGuid}/field/{fieldGuid}'.replace('*', '')
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
if ('envelopeGuid' in params):
replacement = str(self.apiClient.toPathValue(params['envelopeGuid']))
resourcePath = resourcePath.replace('{' + 'envelopeGuid' + '}',
replacement)
if ('recipientGuid' in params):
replacement = str(self.apiClient.toPathValue(params['recipientGuid']))
resourcePath = resourcePath.replace('{' + 'recipientGuid' + '}',
replacement)
if ('fieldGuid' in params):
replacement = str(self.apiClient.toPathValue(params['fieldGuid']))
resourcePath = resourcePath.replace('{' + 'fieldGuid' + '}',
replacement)
postData = (params['body'] if 'body' in params else None)
return self.apiClient.callAPI(self.basePath, resourcePath, method, queryParams,
postData, headerParams, FileStream)
| 44.21939
| 225
| 0.571243
|
6f8a56a3869c640a3c1ab6bd377831428b9e74af
| 676
|
py
|
Python
|
ws/handler/event/enum/elapsed.py
|
fabaff/automate-ws
|
a9442f287692787e3f253e1ff23758bec8f3902e
|
[
"MIT"
] | null | null | null |
ws/handler/event/enum/elapsed.py
|
fabaff/automate-ws
|
a9442f287692787e3f253e1ff23758bec8f3902e
|
[
"MIT"
] | 1
|
2021-12-21T11:34:47.000Z
|
2021-12-21T11:34:47.000Z
|
ws/handler/event/enum/elapsed.py
|
fabaff/automate-ws
|
a9442f287692787e3f253e1ff23758bec8f3902e
|
[
"MIT"
] | 1
|
2021-12-21T10:10:13.000Z
|
2021-12-21T10:10:13.000Z
|
import home
from ws.handler.event.enum import Handler as Parent
class Handler(Parent):
KLASS = home.event.elapsed.Event
TEMPLATE = "event/enum.html"
LABEL = "Elapsed"
ICON_ELAPSED = "fas fa-stop-circle"
ICON_NOT_ELAPSED = "far fa-stop-circle"
def _get_str(self, e):
if e == home.event.elapsed.Event.On:
return self.YES
elif e == home.event.elapsed.Event.Off:
return self.NO
return e
def get_icon(self, e):
if e == home.event.elapsed.Event.On:
return self.ICON_ELAPSED
elif e == home.event.elapsed.Event.Off:
return self.ICON_NOT_ELAPSED
return e
| 25.037037
| 51
| 0.616864
|
7f2a3109189ed11ad83de3c324028b35807fcb26
| 1,571
|
py
|
Python
|
features/output.py
|
AngeStan/github-list-commits
|
1c49445374489503e64adf07d3475447fda4b323
|
[
"Apache-2.0"
] | 1
|
2020-02-18T09:53:59.000Z
|
2020-02-18T09:53:59.000Z
|
features/output.py
|
AngeStan/github-list-commits
|
1c49445374489503e64adf07d3475447fda4b323
|
[
"Apache-2.0"
] | null | null | null |
features/output.py
|
AngeStan/github-list-commits
|
1c49445374489503e64adf07d3475447fda4b323
|
[
"Apache-2.0"
] | null | null | null |
import csv, sqlite3, os, datetime
def export_csv(table):
now = datetime.datetime.now().strftime('%Y-%m-%d %H.%M.%S') # string "now" is a formatted current time var
file_name = f"commits {now}.csv"
csv_file = open(file_name, 'w', newline='')
csv_write = csv.DictWriter(csv_file, ["SHA", "Message", "URL"]) # define the header of the CSV
csv_write.writeheader()
for i in range(len(table)):
csv_write.writerow({"SHA": table[i][0], "Message": table[i][1], "URL": table[i][2]})
csv_file.close()
print(f'CSV file saved: "{os.path.join(os.getcwd(), file_name)}"')
def export_db(table):
now = datetime.datetime.now().strftime('%Y-%m-%d %H.%M.%S') # string "now" is a formatted current time var
connection = sqlite3.connect("commits.db") # establish connection with the database
cursor = connection.cursor() # a cursor object must be applied
# create new table named as up-to-date date & time
sql_command = f'''
CREATE TABLE "{now}" (
SHA VARCHAR(40),
Message VARCHAR(1000),
URL CHAR(100));'''
cursor.execute(sql_command)
for i in range(len(table)):
sql_command = f'''INSERT INTO "{now}" (SHA, Message, URL)
VALUES ("{table[i][0]}", "{table[i][1]}", "{table[i][2]}");'''
cursor.execute(sql_command)
connection.commit() # save the changes in the database
connection.close()
print('Table "{}" saved in database "{}":\nyou can open/refresh it with a lightweight database manager.' \
.format(now, os.path.join(os.getcwd(), "commits.db")))
| 41.342105
| 111
| 0.629535
|
0ec9e5457408399ae644ebde378afbf34c85c829
| 3,568
|
py
|
Python
|
FastSurferCNN/models/losses.py
|
Jinboasltw/FastSurfer
|
3c0330c459c221b85428d3ec2e95f5196aee3129
|
[
"Apache-2.0"
] | 257
|
2020-06-24T15:34:31.000Z
|
2022-03-30T04:24:18.000Z
|
FastSurferCNN/models/losses.py
|
Jinboasltw/FastSurfer
|
3c0330c459c221b85428d3ec2e95f5196aee3129
|
[
"Apache-2.0"
] | 92
|
2020-06-24T18:44:38.000Z
|
2022-03-31T15:15:31.000Z
|
FastSurferCNN/models/losses.py
|
Jinboasltw/FastSurfer
|
3c0330c459c221b85428d3ec2e95f5196aee3129
|
[
"Apache-2.0"
] | 62
|
2020-06-25T00:59:10.000Z
|
2022-03-10T10:12:09.000Z
|
# Copyright 2019 Image Analysis Lab, German Center for Neurodegenerative Diseases (DZNE), Bonn
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# IMPORTS
import torch
import torch.nn as nn
from torch.nn.modules.loss import _Loss
import torch.nn.functional as F
class DiceLoss(_Loss):
"""
Dice Loss
"""
def forward(self, output, target, weights=None, ignore_index=None):
"""
:param output: N x C x H x W Variable
:param target: N x C x W LongTensor with starting class at 0
:param weights: C FloatTensor with class wise weights
:param int ignore_index: ignore label with index x in the loss calculation
:return:
"""
eps = 0.001
encoded_target = output.detach() * 0
if ignore_index is not None:
mask = target == ignore_index
target = target.clone()
target[mask] = 0
encoded_target.scatter_(1, target.unsqueeze(1), 1)
mask = mask.unsqueeze(1).expand_as(encoded_target)
encoded_target[mask] = 0
else:
encoded_target.scatter_(1, target.unsqueeze(1), 1)
if weights is None:
weights = 1
intersection = output * encoded_target
numerator = 2 * intersection.sum(0).sum(1).sum(1)
denominator = output + encoded_target
if ignore_index is not None:
denominator[mask] = 0
denominator = denominator.sum(0).sum(1).sum(1) + eps
loss_per_channel = weights * (1 - (numerator / denominator)) # Channel-wise weights
return loss_per_channel.sum() / output.size(1)
class CrossEntropy2D(nn.Module):
"""
2D Cross-entropy loss implemented as negative log likelihood
"""
def __init__(self, weight=None, reduction='none'):
super(CrossEntropy2D, self).__init__()
self.nll_loss = nn.CrossEntropyLoss(weight=weight, reduction=reduction)
def forward(self, inputs, targets):
return self.nll_loss(inputs, targets)
class CombinedLoss(nn.Module):
"""
For CrossEntropy the input has to be a long tensor
Args:
-- inputx N x C x H x W
-- target - N x H x W - int type
-- weight - N x H x W - float
"""
def __init__(self, weight_dice=1, weight_ce=1):
super(CombinedLoss, self).__init__()
self.cross_entropy_loss = CrossEntropy2D()
self.dice_loss = DiceLoss()
self.weight_dice = weight_dice
self.weight_ce = weight_ce
def forward(self, inputx, target, weight):
target = target.type(torch.LongTensor) # Typecast to long tensor
if inputx.is_cuda:
target = target.cuda()
input_soft = F.softmax(inputx, dim=1) # Along Class Dimension
dice_val = torch.mean(self.dice_loss(input_soft, target))
ce_val = torch.mean(torch.mul(self.cross_entropy_loss.forward(inputx, target), weight))
total_loss = torch.add(torch.mul(dice_val, self.weight_dice), torch.mul(ce_val, self.weight_ce))
return total_loss, dice_val, ce_val
| 33.037037
| 104
| 0.653307
|
53c1b963c5ece49bcaf75e9e12322090fc7dcd3e
| 1,257
|
py
|
Python
|
magic_card_generator/dataset/collection/card_collector.py
|
joaopedromattos/MTGAN
|
9a48123d08650fb06e9a138364f98a675bf95451
|
[
"MIT"
] | 1
|
2020-11-18T18:47:25.000Z
|
2020-11-18T18:47:25.000Z
|
magic_card_generator/dataset/collection/card_collector.py
|
joaopedromattos/MTGan
|
9a48123d08650fb06e9a138364f98a675bf95451
|
[
"MIT"
] | null | null | null |
magic_card_generator/dataset/collection/card_collector.py
|
joaopedromattos/MTGan
|
9a48123d08650fb06e9a138364f98a675bf95451
|
[
"MIT"
] | 1
|
2021-12-22T15:53:42.000Z
|
2021-12-22T15:53:42.000Z
|
# -*- coding: utf-8 -*-
"""dataset_collection
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1lHElNaOJc6KguYAQuFrWGjVqDUUk3an8
"""
import os
from requests import get
import pandas as pd
import numpy as np
from tqdm import tqdm
os.system("wget https://mtgjson.com/api/v5/AllPrintingsCSVFiles.tar.gz && tar xvzf AllPrintingsCSVFiles.tar.gz")
cards = pd.read_csv('./AllPrintingsCSVFiles/cards.csv', low_memory=False)
cards_ids = cards.query('layout == "normal"')['scryfallId'].to_numpy()
API_ENDPOINT = "https://api.scryfall.com/cards/"
DATASET_FOLDER = "../raw/"
for i in tqdm(cards_ids, unit="image", initial=0):
# Get our response from the API_ENDPOINT
response = get(API_ENDPOINT + i)
# From the API_ENDPOINT, we retrieve the image url
# that contains our desired illustration
image_url = response.json()["image_uris"]['art_crop']
# Retrieving an image stream.
image_response = get(image_url)
# image_response will be a blob file stream,
# so we'll write it in a .jpg binary file (image)
file = open(DATASET_FOLDER + i + '.jpg', "wb")
file.write(image_response.content)
file.close()
| 30.658537
| 112
| 0.692124
|
f67a04c9e56f2d42f6610ca910e69989c875a9ac
| 3,714
|
py
|
Python
|
src/backendServer/requestHandler/views.py
|
JiahongHe/Personal-Facial-Identification-System
|
11ed41a5afda02f46f04f539036fb1c27dc756d6
|
[
"MIT"
] | null | null | null |
src/backendServer/requestHandler/views.py
|
JiahongHe/Personal-Facial-Identification-System
|
11ed41a5afda02f46f04f539036fb1c27dc756d6
|
[
"MIT"
] | null | null | null |
src/backendServer/requestHandler/views.py
|
JiahongHe/Personal-Facial-Identification-System
|
11ed41a5afda02f46f04f539036fb1c27dc756d6
|
[
"MIT"
] | 1
|
2018-11-27T16:59:30.000Z
|
2018-11-27T16:59:30.000Z
|
from django.shortcuts import render
from django.http import HttpResponse
from requestHandler.models import User, Song, SystemSetting
from .forms import updateForm
import json
fields = ['FirstName',
'LastName',
'Email',
'passWord']
def requestInfo(request):
# API that returns all the necessary user infomation for facial recognition
# security measures might be implemented in the future.
users = User.objects.all()
result = {}
for user in users:
info = {}
info['firstName'] = user.FirstName
info['lastName'] = user.LastName
try:
info['image'] = user.Image.path
except:
info['image'] = 'NULL'
try:
info['FavouriteSongName'] = user.FavouriteSong.SongName
info['FavouriteSongPath'] = user.FavouriteSong.File.path
except Exception as e:
info['FavouriteSongName'] = 'NULL'
info['FavouriteSongPath'] = 'NULL'
result[user.FirstName + user.LastName] = info
return HttpResponse(json.dumps(result), content_type="application/json")
def requestLoginInfo(request):
# API that returns all the necessary user infomation for user login
users = User.objects.all()
result = {}
for user in users:
info = {}
info['Email'] = user.Email
info['passWord'] = user.passWord
info['id'] = user.id
result[user.Email] = info
return HttpResponse(json.dumps(result), content_type="application/json")
def requestUpdateUserInfo(request):
if request.method == 'GET':
userId = request.GET["userId"]
user = User.objects.get(id=userId)
initials = {}
for field in fields:
initials[field] = user.__dict__[field]
initials['FavouriteSong'] = user.FavouriteSong
initials['Image'] = user.Image
initials['userId'] = user.id
initials['passWord'] = user.passWord
form = updateForm(initial=initials)
form.fields['Email'].widget.attrs['readonly'] = True
context = {"form": form}
return render(request, "requestHandler/updateInfo.html", context)
elif request.method == 'POST':
form = updateForm(request.POST, request.FILES)
if form.is_valid():
userId = form.cleaned_data['userId']
user = User.objects.get(id=userId)
if user is not None:
for field in fields:
user.__dict__[field] = form.cleaned_data[field]
user.save()
return HttpResponse("update saved!")
else:
return HttpResponse("user not found")
else:
return HttpResponse("invalid information")
def getSettings(request):
if request.method == 'GET':
settingObj = SystemSetting.objects.all()
result = {}
if (len(settingObj) > 0):
setting = settingObj[0]
result["defaultBehavior"] = setting.DefaultBehavior
result["defaultSong"] = setting.DefaultSong.SongName if setting.DefaultSong is not None else "Null"
return HttpResponse(json.dumps(result), content_type="application/json")
else:
result["defaultBehavior"] = "Null"
result["defaultSong"] = "NULL"
return HttpResponse(json.dumps(result), content_type="application/json")
def getSongs(request):
if request.method == 'GET':
result = {}
Songs = Song.objects.all()
for song in Songs:
result[song.SongName] = song.File.path if song.File is not None else "None"
return HttpResponse(json.dumps(result), content_type="application/json")
| 35.711538
| 111
| 0.609585
|
d9db9e3bca29f1e0c503c0193b72f2e95adbae9c
| 8,886
|
py
|
Python
|
polyaxon/query/parser.py
|
elyase/polyaxon
|
1c19f059a010a6889e2b7ea340715b2bcfa382a0
|
[
"MIT"
] | null | null | null |
polyaxon/query/parser.py
|
elyase/polyaxon
|
1c19f059a010a6889e2b7ea340715b2bcfa382a0
|
[
"MIT"
] | null | null | null |
polyaxon/query/parser.py
|
elyase/polyaxon
|
1c19f059a010a6889e2b7ea340715b2bcfa382a0
|
[
"MIT"
] | null | null | null |
from collections import defaultdict, namedtuple
from query.exceptions import QueryParserException
class QueryOpSpec(namedtuple("QueryOpSpec", "op negation params")):
def items(self):
return self._asdict().items()
def parse_negation_operation(operation):
"""Parse the negation modifier in an operation."""
_operation = operation.strip()
if not _operation:
raise QueryParserException('Operation is not valid: {}'.format(operation))
negation = False
if _operation[0] == '~':
negation = True
_operation = _operation[1:]
return negation, _operation.strip()
def parse_comparison_operation(operation):
"""Parse the comparision operator in an operation."""
_operation = operation.strip()
if not _operation:
raise QueryParserException('Operation is not valid: {}'.format(operation))
# Check inclusion comparison
if _operation[:2] in ('<=', '=<'):
return '<=', _operation[2:].strip()
if _operation[:2] in ('>=', '=>'):
return '>=', _operation[2:].strip()
# Non inclusive
if _operation[:1] in ('>', '<'):
return _operation[:1], _operation[1:].strip()
return None, _operation
def parse_datetime_operation(operation):
"""Parse datetime operations.
A datetime operation can one of the following:
* single value: start_date:2014-10-10, start_date:>2014-10-10, start_date:>=2014-10-10
* negation single value: start_date:~2014-10-10
* interval: start_date:2010-10-10 10:10 .. 2012-10-10
* negation interval: start_date:~2010-10-10 10:10 .. 2012-10-10
This parser does not allow `|`
"""
_operation = operation.strip()
if not _operation:
raise QueryParserException('Operation is not valid: {}'.format(operation))
# Check not allowed ops
if '|' in _operation:
raise QueryParserException('`|` is not allowed for datetime operations. '
'Operation: {}'.format(operation))
# Check negation
negation, _operation = parse_negation_operation(_operation)
# Check range operator
if '..' in _operation:
op = '..'
params = _operation.split('..')
params = [param.strip() for param in params if param]
if len(params) != 2:
raise QueryParserException('Expression is not valid, ranges requires only 2 params, '
'Operation: {}'.format(operation))
return QueryOpSpec(op, negation, params)
# Check comparison operators
op, _operation = parse_comparison_operation(_operation)
if not op:
# Now the operation must be an equality param param
op = '='
if not _operation:
raise QueryParserException('Expression is not valid, it must be formatted as '
'name:operation, '
'Operation: {}'.format(operation))
return QueryOpSpec(op, negation, _operation)
def parse_scalar_operation(operation):
"""Parse scalar operations.
A scalar operation can one of the following:
* single value: start_date:12, metric1:>0.9, metric1:>=-0.12
* negation single value: metric1:~1112, metric1:~<1112 equivalent to metric1:>=1112
This parser does not allow `|` and `..`.
"""
_operation = operation.strip()
if not _operation:
raise QueryParserException('Operation is not valid: {}'.format(operation))
# Check not allowed ops
if '|' in _operation:
raise QueryParserException('`|` is not allowed for scalar operations. '
'Operation: {}'.format(operation))
if '..' in _operation:
raise QueryParserException('`..` is not allowed for scalar operations. '
'Operation: {}'.format(operation))
# Check negation
negation, _operation = parse_negation_operation(_operation)
# Check comparison operators
op, _operation = parse_comparison_operation(_operation)
if not op:
# Now the operation must be an equality param param
op = '='
# Check that params are scalar (int, float)
try:
_operation = int(_operation)
except (ValueError, TypeError):
try:
_operation = float(_operation)
except (ValueError, TypeError):
raise QueryParserException('Scalar operation requires int or float params, '
'receive {}.'.format(operation))
return QueryOpSpec(op, negation, _operation)
def parse_value_operation(operation):
"""Parse value operations.
A value operation can one of the following:
* single value: tag1:foo
* negation single value: tag1:~foo
* multiple values: tag1:foo|bar|moo
* negation multiple values: tag1:~foo|bar|moo
This parser does not allow `..`, '>', '<', '>=', and '<='.
"""
_operation = operation.strip()
if not _operation:
raise QueryParserException('Operation is not valid: {}'.format(operation))
# Check range not allowed
if '..' in _operation:
raise QueryParserException('`..` is not allowed for value operations. '
'Operation: {}'.format(operation))
# Check negation
negation, _operation = parse_negation_operation(_operation)
# Check comparison not allowed
op, _operation = parse_comparison_operation(_operation)
if op:
raise QueryParserException('`{}` is not allowed for value operations, '
'Operation: {}'.format(op, operation))
# Check in operator
if '|' in _operation:
op = '|'
params = _operation.split('|')
params = [param.strip() for param in params if param.strip()]
if len(params) <= 1:
raise QueryParserException('`{}` is not allowed for value operations, '
'Operation: {}'.format(op, operation))
return QueryOpSpec(op, negation, params)
if not _operation:
raise QueryParserException('Expression is not valid, it must be formatted as '
'name:operation, '
'Operation: {}'.format(operation))
# Now the operation must be an equality param param
return QueryOpSpec('=', negation, _operation)
def parse_expression(expression):
"""Base parsing for expressions.
Every expression must follow a basic format:
`name:[modifier|operator]operation[*[operator]operation]`
So this parser just split the expression into: field name, operation.
"""
try:
_expression = expression.strip()
name, operation = _expression.split(':')
name = name.strip()
operation = operation.strip()
if not name or not operation:
raise ValueError
except (ValueError, AttributeError):
raise QueryParserException('Expression is not valid, it must be formatted as '
'name:operation, '
'Expression: {}'.format(expression))
return name, operation
def split_query(query):
"""Split a query into different expressions.
Example:
name:bla, foo:<=1
"""
try:
_query = query.strip()
except (ValueError, AttributeError):
raise QueryParserException('query is not valid, received instead {}'.format(query))
expressions = _query.split(',')
expressions = [exp.strip() for exp in expressions if exp.strip()]
if not expressions:
raise QueryParserException('Query is not valid: {}'.format(query))
return expressions
def tokenize_query(query):
"""Tokenizes a standard search query in name: operations mapping.
Example:
moo:bla, foo:~<=1, foo:ll..ff
{
'moo': ['bla'],
'foo': ['~<=1', 'll..ff']
}
"""
expressions = split_query(query)
name_operation_tuples = [parse_expression(expression) for expression in expressions]
operation_by_name = defaultdict(list)
for name, operation in name_operation_tuples:
operation_by_name[name].append(operation)
return operation_by_name
def parse_field(field):
"""Parses fields with underscores, and return field and suffix.
Example:
foo => foo, None
metric.foo => metric, foo
"""
_field = field.split('.')
_field = [f.strip() for f in _field]
if len(_field) == 1 and _field[0]:
return _field[0], None
elif len(_field) == 2 and _field[0] and _field[1]:
return _field[0], _field[1]
raise QueryParserException('Query field must be either a single value,'
'possibly with single underscores, '
'or a prefix double underscore field. '
'Received `{}`'.format(field))
| 34.710938
| 97
| 0.613324
|
64b89391e1d04b3916809d21ae58557b246b7448
| 964
|
py
|
Python
|
sdi/sprites/Animatable.py
|
dysfunctionals/sdi-display
|
cd7ce012de73034752a4efb70298200dd6d42a39
|
[
"MIT"
] | null | null | null |
sdi/sprites/Animatable.py
|
dysfunctionals/sdi-display
|
cd7ce012de73034752a4efb70298200dd6d42a39
|
[
"MIT"
] | 1
|
2020-02-23T11:57:04.000Z
|
2020-03-09T11:11:42.000Z
|
sdi/sprites/Animatable.py
|
dysfunctionals/sdi-display
|
cd7ce012de73034752a4efb70298200dd6d42a39
|
[
"MIT"
] | 1
|
2020-02-22T23:58:22.000Z
|
2020-02-22T23:58:22.000Z
|
import pygame, random
class Animatable(pygame.sprite.Sprite):
def __init__(self, files, image_count, delays, scale, init_y, init_x=1920):
super().__init__()
self.images = []
self.delays = delays
self.scale = int(32 * (random.randint(7, 14) / 10) * self.scale)
for suffix in range(image_count):
image = pygame.image.load(files + "_{}.png".format(suffix))
self.images.append(pygame.transform.scale(
image,
(
int(32 * self.scale),
int(32 * self.scale),
),
))
self.rect = self.images[0].get_rect()
self.rect.y = init_y
self.rect.x = init_x
self.x_pos = init_x
def update(self):
self.x_pos -= self.scale
self.rect.x = int(self.x_pos)
if self.rect.x <= 0:
self.x_pos = 1920
self.rect.y = random.randint(0, 1080)
| 29.212121
| 79
| 0.520747
|
1f9832c3ab921ba545a0efe448f2923b9b22ce06
| 6,392
|
py
|
Python
|
terra/tests/test_executor_celery.py
|
NoahRJohnson/terra
|
131954ee42fb5905ceff35101e34d89c5eb6de6c
|
[
"MIT"
] | null | null | null |
terra/tests/test_executor_celery.py
|
NoahRJohnson/terra
|
131954ee42fb5905ceff35101e34d89c5eb6de6c
|
[
"MIT"
] | null | null | null |
terra/tests/test_executor_celery.py
|
NoahRJohnson/terra
|
131954ee42fb5905ceff35101e34d89c5eb6de6c
|
[
"MIT"
] | null | null | null |
import sys
import os
import time
from unittest import mock, skipUnless
try:
import celery
except: # noqa
celery = None
from .utils import TestCase
@skipUnless(celery, "Celery not installed")
class TestCeleryConfig(TestCase):
def setUp(self):
self.patches.append(mock.patch.dict(os.environ,
TERRA_CWD=self.temp_dir.name))
self.patches.append(mock.patch.dict(os.environ,
TERRA_REDIS_SECRET_FILE='foo'))
with open(os.path.join(self.temp_dir.name, 'foo'), 'w') as fid:
fid.write('hiya')
super().setUp()
def tearDown(self):
super().tearDown()
if 'terra.executor.celery.celeryconfig' in sys.modules:
sys.modules.pop('terra.executor.celery.celeryconfig')
def test_no_redis_passwordfile(self):
os.remove(os.path.join(self.temp_dir.name, 'foo'))
with self.assertRaises(FileNotFoundError), self.assertLogs():
import terra.executor.celery.celeryconfig # noqa
def test_redis_passwordfile(self):
import terra.executor.celery.celeryconfig as cc
self.assertEqual(cc.password, 'hiya')
@mock.patch.dict(os.environ, TERRA_CELERY_INCLUDE='["foo", "bar"]')
def test_include(self):
import terra.executor.celery.celeryconfig as cc
self.assertEqual(cc.include, ['foo', 'bar'])
class MockAsyncResult:
def __init__(self, id, fun):
self.id = id
self.fun = fun
self.forgotten = False
def ready(self):
return True
state = 'SUCCESS'
def revoke(self):
self.state = 'REVOKED'
def get(self, *args, **kwargs):
return self.fun(self)
def forget(self):
self.forgotten = True
def test_factory():
def test(self):
return 17
test.apply_async = lambda args, kwargs: MockAsyncResult(1, test)
return test
@skipUnless(celery, "Celery not installed")
class TestCeleryExecutor(TestCase):
def setUp(self):
super().setUp()
from terra.executor.celery import CeleryExecutor
self.executor = CeleryExecutor(update_delay=0.001)
def tearDown(self):
super().tearDown()
self.executor._monitor_stopping = True
try:
self.executor._monitor.join()
except RuntimeError:
# Thread never started. Cannot join
pass
def wait_for_state(self, future, state):
for x in range(100):
time.sleep(0.001)
if future._state == state:
break
if x == 99:
raise TimeoutError(f'Took longer than 100us for a 1us update for '
f'{future._state} to become {state}')
def test_simple(self):
test = test_factory()
future = self.executor.submit(test)
future.result()
def test_cancel(self):
test = test_factory()
future = self.executor.submit(test)
future._ar.state = 'RECEIVED'
# Cancels!
self.assertTrue(future.cancel())
self.assertEqual(future._state, 'CANCELLED')
self.assertEqual(future._ar.state, 'REVOKED')
def test_cancel_uncancellable(self):
test = test_factory()
future = self.executor.submit(test)
future._ar.state = 'RECEIVED'
# Make revoking fail
future._ar.revoke = lambda: True
# Fails to cancel
self.assertFalse(future.cancel())
self.assertEqual(future._state, 'PENDING')
self.assertEqual(future._ar.state, 'RECEIVED')
def test_cancel_running(self):
test = test_factory()
future = self.executor.submit(test)
future._ar.state = 'RUNNING'
future._state = 'RUNNING'
# Fails to cancel
self.assertFalse(future.cancel())
self.assertEqual(future._state, 'RUNNING')
self.assertEqual(future._ar.state, 'RUNNING')
def test_update_futures_running(self):
test = test_factory()
future = self.executor.submit(test)
self.assertFalse(future.running())
future._ar.state = 'RUNNING'
self.wait_for_state(future, 'RUNNING')
self.assertTrue(future.running())
def test_update_futures_finish(self):
test = test_factory()
future = self.executor.submit(test)
future._state = 'FINISHED'
self.assertEqual(len(self.executor._futures), 1)
for x in range(100):
time.sleep(0.001)
if not len(self.executor._futures):
break
if x == 99:
raise TimeoutError('Took longer than 100us for a 1us update')
def test_update_futures_revoked(self):
test = test_factory()
future = self.executor.submit(test)
self.assertFalse(future.cancelled())
future._ar.state = 'REVOKED'
self.wait_for_state(future, 'CANCELLED_AND_NOTIFIED')
self.assertTrue(future.cancelled())
def test_update_futures_success(self):
test = test_factory()
future = self.executor.submit(test)
self.assertIsNone(future._result)
future._ar.state = 'SUCCESS'
self.wait_for_state(future, 'FINISHED')
self.assertEqual(future._result, 17)
def test_update_futures_failure(self):
test = test_factory()
future = self.executor.submit(test)
self.assertIsNone(future._result)
future._ar.state = 'FAILURE'
future._ar.result = TypeError('On no')
self.wait_for_state(future, 'FINISHED')
def test_shutdown(self):
test = test_factory()
self.assertEqual(self.executor.submit(test).result(), 17)
self.executor.shutdown()
with self.assertRaisesRegex(RuntimeError, "cannot .* after shutdown"):
self.executor.submit(test)
# def test_import(self):
# import terra.executor.celery
# from celery._state import _apps
# print([a for a in _apps])
# # This is too hard to test, unittest.discover does something funky, celery
# # Is double registered. And I don't know how to stop it. The point of this
# # test was going to be to to be able to write a test to verify the celery app
# # is registered, but that's out of window now.
# @skipUnless(celery, "Celery not installed")
# class TestUnitTests(TestCase):
# def last_test_celery_apps(self):
# self.assertEqual(
# len(celery._state._apps), 0,
# msg="If you are seeing this, one of the other unit tests has "
# "initialized the celery apps. This side effect should be "
# "prevented by mocking out the celary._state._apps, or "
# "'mock.patch.???'. Otherwise unit tests can "
# "interfere with each other. Add 'import traceback; "
# "traceback.print_stack()' to terra/executor/celery/__init__.py "
# "to see who is importing the app")
| 29.054545
| 79
| 0.673967
|
0804b774727a1f200928d93df073c5ad646f3599
| 1,822
|
py
|
Python
|
src/eventgrid/azext_eventgrid/mgmt/eventgrid/models/storage_queue_event_subscription_destination_py3.py
|
mayank88mahajan/azure-cli-extensions
|
8bd389a1877bffd14052bec5519ce75dc6fc34cf
|
[
"MIT"
] | 1
|
2019-05-10T19:58:09.000Z
|
2019-05-10T19:58:09.000Z
|
src/eventgrid/azext_eventgrid/mgmt/eventgrid/models/storage_queue_event_subscription_destination_py3.py
|
mayank88mahajan/azure-cli-extensions
|
8bd389a1877bffd14052bec5519ce75dc6fc34cf
|
[
"MIT"
] | 2
|
2019-10-02T23:37:38.000Z
|
2020-10-02T01:17:31.000Z
|
src/eventgrid/azext_eventgrid/mgmt/eventgrid/models/storage_queue_event_subscription_destination_py3.py
|
mayank88mahajan/azure-cli-extensions
|
8bd389a1877bffd14052bec5519ce75dc6fc34cf
|
[
"MIT"
] | 1
|
2019-06-17T22:18:23.000Z
|
2019-06-17T22:18:23.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .event_subscription_destination_py3 import EventSubscriptionDestination
class StorageQueueEventSubscriptionDestination(EventSubscriptionDestination):
"""Information about the storage queue destination for an event subscription.
All required parameters must be populated in order to send to Azure.
:param endpoint_type: Required. Constant filled by server.
:type endpoint_type: str
:param resource_id: The Azure Resource ID of the storage account that
contains the queue that is the destination of an event subscription.
:type resource_id: str
:param queue_name: The name of the Storage queue under a storage account
that is the destination of an event subscription.
:type queue_name: str
"""
_validation = {
'endpoint_type': {'required': True},
}
_attribute_map = {
'endpoint_type': {'key': 'endpointType', 'type': 'str'},
'resource_id': {'key': 'properties.resourceId', 'type': 'str'},
'queue_name': {'key': 'properties.queueName', 'type': 'str'},
}
def __init__(self, *, resource_id: str=None, queue_name: str=None, **kwargs) -> None:
super(StorageQueueEventSubscriptionDestination, self).__init__(**kwargs)
self.resource_id = resource_id
self.queue_name = queue_name
self.endpoint_type = 'StorageQueue'
| 40.488889
| 89
| 0.655873
|
fb78ee438fb9053d53b21663fbe0b4b37d4389ac
| 5,811
|
py
|
Python
|
gym/envs/toy_text/frozen_lake.py
|
afernandezcanosa/gym
|
11c3b77ea0afd467b51e4f2c73e47e103ef128e9
|
[
"Python-2.0",
"OLDAP-2.7"
] | 6
|
2020-03-25T07:58:24.000Z
|
2021-12-26T05:53:32.000Z
|
gym/envs/toy_text/frozen_lake.py
|
afernandezcanosa/gym
|
11c3b77ea0afd467b51e4f2c73e47e103ef128e9
|
[
"Python-2.0",
"OLDAP-2.7"
] | 2
|
2020-03-20T18:10:49.000Z
|
2021-11-29T12:27:51.000Z
|
gym/envs/toy_text/frozen_lake.py
|
afernandezcanosa/gym
|
11c3b77ea0afd467b51e4f2c73e47e103ef128e9
|
[
"Python-2.0",
"OLDAP-2.7"
] | 3
|
2022-02-03T16:40:08.000Z
|
2022-02-06T17:28:11.000Z
|
import sys
from contextlib import closing
import numpy as np
from six import StringIO, b
from gym import utils
from gym.envs.toy_text import discrete
LEFT = 0
DOWN = 1
RIGHT = 2
UP = 3
MAPS = {
"4x4": [
"SFFF",
"FHFH",
"FFFH",
"HFFG"
],
"8x8": [
"SFFFFFFF",
"FFFFFFFF",
"FFFHFFFF",
"FFFFFHFF",
"FFFHFFFF",
"FHHFFFHF",
"FHFFHFHF",
"FFFHFFFG"
],
}
# Generates a random valid map (one that has a path from start to goal)
# @params size, size of each side of the grid
# @prams p, probability that a tile is frozen
def generate_random_map(size=8, p=0.8):
valid = False
#BFS to check that it's a valid path
def is_valid(arr, r=0, c=0):
if arr[r][c] == 'G':
return True
tmp = arr[r][c]
arr[r][c] = "#"
if r+1 < size and arr[r+1][c] not in '#H':
if is_valid(arr, r+1, c) == True:
arr[r][c] = tmp
return True
if c+1 < size and arr[r][c+1] not in '#H':
if is_valid(arr, r, c+1) == True:
arr[r][c] = tmp
return True
if r-1 >= 0 and arr[r-1][c] not in '#H':
if is_valid(arr, r-1, c) == True:
arr[r][c] = tmp
return True
if c-1 >= 0 and arr[r][c-1] not in '#H':
if is_valid(arr,r, c-1) == True:
arr[r][c] = tmp
return True
arr[r][c] = tmp
return False
while not valid:
p = min(1, p)
res = np.random.choice(['F','H'], (size, size), p=[p, 1-p])
res[0][0] = 'S'
res[-1][-1] = 'G'
valid = is_valid(res)
return ["".join(x) for x in res]
class FrozenLakeEnv(discrete.DiscreteEnv):
"""
Winter is here. You and your friends were tossing around a frisbee at the park
when you made a wild throw that left the frisbee out in the middle of the lake.
The water is mostly frozen, but there are a few holes where the ice has melted.
If you step into one of those holes, you'll fall into the freezing water.
At this time, there's an international frisbee shortage, so it's absolutely imperative that
you navigate across the lake and retrieve the disc.
However, the ice is slippery, so you won't always move in the direction you intend.
The surface is described using a grid like the following
SFFF
FHFH
FFFH
HFFG
S : starting point, safe
F : frozen surface, safe
H : hole, fall to your doom
G : goal, where the frisbee is located
The episode ends when you reach the goal or fall in a hole.
You receive a reward of 1 if you reach the goal, and zero otherwise.
"""
metadata = {'render.modes': ['human', 'ansi']}
def __init__(self, desc=None, map_name="4x4",is_slippery=True):
if desc is None and map_name is None:
desc = generate_random_map()
elif desc is None:
desc = MAPS[map_name]
self.desc = desc = np.asarray(desc,dtype='c')
self.nrow, self.ncol = nrow, ncol = desc.shape
self.reward_range = (0, 1)
nA = 4
nS = nrow * ncol
isd = np.array(desc == b'S').astype('float64').ravel()
isd /= isd.sum()
P = {s : {a : [] for a in range(nA)} for s in range(nS)}
def to_s(row, col):
return row*ncol + col
def inc(row, col, a):
if a == LEFT:
col = max(col-1,0)
elif a == DOWN:
row = min(row+1,nrow-1)
elif a == RIGHT:
col = min(col+1,ncol-1)
elif a == UP:
row = max(row-1,0)
return (row, col)
for row in range(nrow):
for col in range(ncol):
s = to_s(row, col)
for a in range(4):
li = P[s][a]
letter = desc[row, col]
if letter in b'GH':
li.append((1.0, s, 0, True))
else:
if is_slippery:
for b in [(a-1)%4, a, (a+1)%4]:
newrow, newcol = inc(row, col, b)
newstate = to_s(newrow, newcol)
newletter = desc[newrow, newcol]
done = bytes(newletter) in b'GH'
rew = float(newletter == b'G')
li.append((1.0/3.0, newstate, rew, done))
else:
newrow, newcol = inc(row, col, a)
newstate = to_s(newrow, newcol)
newletter = desc[newrow, newcol]
done = bytes(newletter) in b'GH'
rew = float(newletter == b'G')
li.append((1.0, newstate, rew, done))
super(FrozenLakeEnv, self).__init__(nS, nA, P, isd)
def render(self, mode='human'):
outfile = StringIO() if mode == 'ansi' else sys.stdout
row, col = self.s // self.ncol, self.s % self.ncol
desc = self.desc.tolist()
desc = [[c.decode('utf-8') for c in line] for line in desc]
desc[row][col] = utils.colorize(desc[row][col], "red", highlight=True)
if self.lastaction is not None:
outfile.write(" ({})\n".format(["Left","Down","Right","Up"][self.lastaction]))
else:
outfile.write("\n")
outfile.write("\n".join(''.join(line) for line in desc)+"\n")
if mode != 'human':
with closing(outfile):
return outfile.getvalue()
| 32.104972
| 95
| 0.487352
|
4fafa8db09c25775d999498635af4b5c3894a651
| 5,043
|
py
|
Python
|
tests/printing/test_configure_printing.py
|
anna-naden/qalgebra
|
e7641ef77a2433caf2f587df27235800b894b631
|
[
"MIT"
] | 2
|
2020-08-17T12:18:19.000Z
|
2020-08-25T11:17:27.000Z
|
tests/printing/test_configure_printing.py
|
anna-naden/qalgebra
|
e7641ef77a2433caf2f587df27235800b894b631
|
[
"MIT"
] | 1
|
2022-01-13T10:29:18.000Z
|
2022-01-13T10:29:18.000Z
|
tests/printing/test_configure_printing.py
|
anna-naden/qalgebra
|
e7641ef77a2433caf2f587df27235800b894b631
|
[
"MIT"
] | null | null | null |
import pytest
from sympy import atan, symbols
from qalgebra.core.operator_algebra import LocalSigma, OperatorSymbol
from qalgebra.core.state_algebra import CoherentStateKet
from qalgebra.printing import (
ascii,
configure_printing,
init_printing,
latex,
unicode,
)
def test_sympy_tex_cached():
"""Test that we can use the cache to change how sub-expressions of sympy
are printed in tex"""
a = symbols('a')
A = OperatorSymbol("A", hs=1)
expr = (a ** 2 / 2) * A
assert latex(expr) == r'\frac{a^{2}}{2} \hat{A}^{(1)}'
cache = {a: r'\alpha'}
assert latex(expr, cache=cache) == r'\frac{\alpha^{2}}{2} \hat{A}^{(1)}'
def test_sympy_setting():
"""Test that we can pass settings to the sympy sub-printer"""
x = symbols('a')
A = OperatorSymbol("A", hs=1)
expr = atan(x) * A
assert latex(expr) == r'\operatorname{atan}{\left(a \right)} \hat{A}^{(1)}'
assert (
latex(expr, inv_trig_style='full')
== r'\arctan{\left(a \right)} \hat{A}^{(1)}'
)
def test_custom_options():
"""Test giving options to print routines or using configure_printing"""
A = OperatorSymbol('A', hs=1)
CNOT = OperatorSymbol('CNOT', hs=1)
sig = LocalSigma(0, 1, hs=1)
ket = CoherentStateKet(symbols('alpha'), hs=1)
assert ascii(A) == r'A^(1)'
assert ascii(A, show_hs_label=False) == 'A'
with pytest.raises(TypeError) as exc_info:
ascii(A, some_bogus_option=False)
assert "not a valid setting" in str(exc_info.value)
assert ascii(sig) == r'|0><1|^(1)'
assert ascii(ket) == r'|alpha=alpha>^(1)'
assert unicode(A) == r'Â⁽¹⁾'
assert unicode(sig) == r'|0⟩⟨1|⁽¹⁾'
assert unicode(ket) == r'|α=α⟩⁽¹⁾'
assert latex(A) == r'\hat{A}^{(1)}'
assert (
latex(sig)
== r'\left\lvert 0 \middle\rangle\!\middle\langle 1 \right\rvert^{(1)}'
)
assert latex(ket) == r'\left\lvert \alpha=\alpha \right\rangle^{(1)}'
with configure_printing(
unicode_op_hats=False, tex_op_macro=r'\Op{{{name}}}'
):
assert unicode(A) == r'A⁽¹⁾'
assert latex(A) == r'\Op{A}^{(1)}'
with configure_printing(show_hs_label=False):
assert ascii(A) == r'A'
assert ascii(sig) == r'|0><1|'
assert ascii(ket) == r'|alpha=alpha>'
assert unicode(A) == r'Â'
assert unicode(sig) == r'|0⟩⟨1|'
assert unicode(ket) == r'|α=α⟩'
assert latex(A) == r'\hat{A}'
assert latex(A, show_hs_label=True) == r'\hat{A}^{(1)}'
assert latex(A) == r'\hat{A}'
assert (
latex(sig)
== r'\left\lvert 0 \middle\rangle\!\middle\langle 1 \right\rvert'
)
assert latex(ket) == r'\left\lvert \alpha=\alpha \right\rangle'
assert latex(CNOT) == r'\text{CNOT}^{(1)}'
with configure_printing(tex_textop_macro=r'\Op{{{name}}}'):
assert latex(CNOT) == r'\Op{CNOT}^{(1)}'
init_printing(show_hs_label=False)
assert unicode(A) == r'Â'
assert latex(A) == r'\hat{A}'
with configure_printing(
unicode_op_hats=False, tex_op_macro=r'\Op{{{name}}}'
):
assert unicode(A) == r'A'
assert latex(A) == r'\Op{A}'
with configure_printing(tex_op_macro=r'\op{{{name}}}'):
assert unicode(A) == r'A'
assert latex(A) == r'\op{A}'
with configure_printing(tex_use_braket=True):
assert latex(sig) == r'\Ket{0}\!\Bra{1}'
assert unicode(A) == r'Â'
assert latex(A) == r'\hat{A}'
init_printing(reset=True)
assert ascii(A) == r'A^(1)'
assert ascii(sig) == r'|0><1|^(1)'
assert ascii(ket) == r'|alpha=alpha>^(1)'
assert unicode(A) == r'Â⁽¹⁾'
assert unicode(sig) == r'|0⟩⟨1|⁽¹⁾'
assert unicode(ket) == r'|α=α⟩⁽¹⁾'
assert latex(A) == r'\hat{A}^{(1)}'
assert (
latex(sig)
== r'\left\lvert 0 \middle\rangle\!\middle\langle 1 \right\rvert^{(1)}'
)
assert latex(ket) == r'\left\lvert \alpha=\alpha \right\rangle^{(1)}'
def test_custom_repr():
A = OperatorSymbol('A', hs=1)
assert repr(A) in ['Â⁽¹⁾', 'A^(1)']
init_printing(repr_format='srepr', reset=True)
assert repr(A) == "OperatorSymbol('A', hs=LocalSpace('1'))"
init_printing(reset=True)
assert repr(A) in ['Â⁽¹⁾', 'A^(1)']
with configure_printing(repr_format='srepr'):
assert repr(A) == "OperatorSymbol('A', hs=LocalSpace('1'))"
assert repr(A) in ['Â⁽¹⁾', 'A^(1)']
def test_exception_teardown():
"""Test that teardown works when breaking out due to an exception"""
class ConfigurePrintingException(Exception):
pass
init_printing(show_hs_label=True, repr_format='ascii')
try:
with configure_printing(show_hs_label=False, repr_format='srepr'):
raise ConfigurePrintingException
except ConfigurePrintingException:
A = OperatorSymbol('A', hs=1)
assert repr(A) == 'A^(1)'
finally:
# Even if this failed we don't want to make a mess for other tests
init_printing(reset=True)
| 33.845638
| 79
| 0.587151
|
a38f371772dfd4865cc1313dd8f650d73e287d83
| 736
|
py
|
Python
|
U9/EJ10U9.py
|
jdmejiav/python-unal-execises
|
4bfe3c6caed8b20bc27eb015e9f74f059355da54
|
[
"MIT"
] | null | null | null |
U9/EJ10U9.py
|
jdmejiav/python-unal-execises
|
4bfe3c6caed8b20bc27eb015e9f74f059355da54
|
[
"MIT"
] | null | null | null |
U9/EJ10U9.py
|
jdmejiav/python-unal-execises
|
4bfe3c6caed8b20bc27eb015e9f74f059355da54
|
[
"MIT"
] | null | null | null |
archivo = open('conversaciones.txt','r')
for renglon in archivo:
renglon = renglon.replace('\n','').lower()
opositivos = 0
causativos = 0
if "sin embargo" in renglon:
opositivos+=1
if "no obstante" in renglon:
opositivos+=1
if "ahora bien" in renglon:
opositivos+=1
if "aun asi" in renglon:
opositivos+=1
if "por tanto" in renglon:
causativos+=1
if "dado que" in renglon:
causativos+=1
if "por consiguiente" in renglon:
causativos+=1
if "asi pues" in renglon:
causativos+=1
if "por ende" in renglon:
causativos+=1
print("Opositivos "+str(opositivos)+" Causativos "+str(causativos))
archivo.close()
| 18.4
| 71
| 0.59375
|
eb3cc27e3a4161247877c6d8a1d06d32f9eaae79
| 449
|
py
|
Python
|
equibel/__init__.py
|
asteroidhouse/equibel
|
476d58e4c8d4b5aabe4b94b650e6d326acb41ec8
|
[
"MIT"
] | 4
|
2015-05-19T04:31:52.000Z
|
2019-08-13T23:57:29.000Z
|
equibel/__init__.py
|
asteroidhouse/equibel
|
476d58e4c8d4b5aabe4b94b650e6d326acb41ec8
|
[
"MIT"
] | null | null | null |
equibel/__init__.py
|
asteroidhouse/equibel
|
476d58e4c8d4b5aabe4b94b650e6d326acb41ec8
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
import equibel.graph
from equibel.graph import *
import equibel.graphgen
from equibel.graphgen import *
import equibel.formulagen
from equibel.formulagen import *
import equibel.formatters
from equibel.formatters import *
import equibel.parsers
from equibel.parsers import *
import equibel.solver
from equibel.solver import *
try:
import equibel.draw
from equibel.draw import *
except:
pass
| 17.269231
| 38
| 0.797327
|
de7f62792396e0e054c3dcbdd6da37f0d2bbf438
| 5,618
|
py
|
Python
|
Main.py
|
david-c-stein/Python-Multiprocessing
|
f55000ad0e0e0280a28a55247d03a37df8630922
|
[
"MIT"
] | 2
|
2020-05-06T08:55:23.000Z
|
2020-10-13T15:37:59.000Z
|
Main.py
|
david-c-stein/Python-Multiprocessing
|
f55000ad0e0e0280a28a55247d03a37df8630922
|
[
"MIT"
] | null | null | null |
Main.py
|
david-c-stein/Python-Multiprocessing
|
f55000ad0e0e0280a28a55247d03a37df8630922
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import datetime
import getopt
import inspect
import json
import os
import platform
import sys
import time
import threading
from Global import __MULTIPROCESSING__
__version__ = "0.1"
if __MULTIPROCESSING__:
import multiprocessing
from multiprocessing import Queue
from multiprocessing import Array
else:
if sys.version_info[0] < 3:
from Queue import Queue
else:
from queue import Queue
import Logger
starttime = datetime.datetime.now().strftime("%b %d %Y %H:%M:%S")
#-----------------------
class myApp(object):
logger = None
logconfig = None
pTwo = None # Worker Two thread/process
pThr = None # Worker Thr thread/process
def main(self, argv):
self.logger = Logger.logging.getLogger(__name__)
self.logconfig = Logger.logconfig
self.logger.info("Start time: " + starttime)
# parse command line arguments
try:
opts, args = getopt.getopt(argv, "h", ["help"])
except getopt.GetoptError as e:
self.logger.exception(str(e))
self.usage()
return
for opt, arg in opts:
if opt in ("-h", "--help"):
self.usage()
return
else:
self.usage()
return
# initilize and run
self.initilize()
self.start()
#-----------------------
def initilize(self):
try:
# identify platform
self.logger.info("------------------------------")
self.logger.info(" machine: " + platform.machine())
self.logger.info(" version: " + platform.version())
self.logger.info(" platform: " + platform.platform())
self.logger.info(" system: " + platform.system())
self.logger.info("processor: " + platform.processor())
if __MULTIPROCESSING__:
self.logger.info(" cores: " + str(multiprocessing.cpu_count()))
self.logger.info(" nodes: " + platform.node())
self.logger.info("PythonImp: " + platform.python_implementation())
self.logger.info("PythonVer: " + platform.python_version())
self.logger.info("starttime: " + starttime)
self.logger.info("scriptver: " + __version__)
self.logger.info("------------------------------")
# initialize queues
if __MULTIPROCESSING__:
self.qOne = multiprocessing.Queue()
self.qTwo = multiprocessing.Queue()
self.qThr = multiprocessing.Queue()
else:
self.qOne = Queue()
self.qTwo = Queue()
self.qThr = Queue()
# initialize 'two' process
try:
import Two
self.pTwo = Two.Two(self.logger, self.logconfig, self.qOne, self.qTwo, self.qThr)
except Exception as e:
self.logger.exception(e)
print( "Two Initialization Error: " + str(e) )
# initialize 'three' process
try:
import Three
self.pThr = Three.Three(self.logger, self.logconfig, self.qOne, self.qTwo, self.qThr)
except Exception as e:
self.logger.exception(e)
print( "Three Initialization Error: " + str(e) )
# Queue for main process
self.getMsgQue = self.qOne
self.putMsgTwo = self.qTwo.put
self.putMsgThr = self.qThr.put
self.RUNNING = True
except Exception as e:
self.logger.exception(e)
#-----------------------
def start(self):
try:
# start two
self.pTwo.start()
# start three
self.pThr.start()
simpleCnt = 0
while self.RUNNING:
try:
#-----------------------
# process main
if (not self.getMsgQue.empty()):
msg = self.getMsgQue.get()
self.logger.debug('Main : ' + str(self.msg))
if (msg != None):
event = msg['event']
type = msg['data']
else:
time.sleep(.2)
simpleCnt += 1
if (simpleCnt % 6):
msgOne = { 'event' : 'print',
'data' : ['Hello from Main', 'two', 3, 4, 'V', 'VI', 'VII', 8, 'nine']}
self.putMsgTwo( msgOne )
if (simpleCnt > 30):
simpleCnt = 0
except (KeyboardInterrupt, SystemExit):
self.logger.info("Interrupted")
self.stop()
except Exception as e:
self.logger.exception(str(e))
self.stop()
except Exception as e:
self.logger.exception(str(e))
self.stop()
finally:
self.logger.info("Exiting")
#-----------------------
def stop(self):
# stop processes
if(self.pTwo != None):
self.pTwo.stop()
if(self.pThr != None):
self.pThr.stop()
if(self.pTwo != None):
self.pTwo.join()
if(self.pThr != None):
self.pThr.join()
self.RUNNING = False
if __name__== '__main__':
myApp().main(sys.argv[1:])
| 29.108808
| 110
| 0.475614
|
df03ae652dbbe5434783b31c79cb37355cd83f86
| 1,065
|
py
|
Python
|
Shishkin_Anatoliy_lesson_10/actions/my_iterators.py
|
amilykh/2074_GB_Python_1-
|
0112ad710154623ad1dafffbdb413caeff424813
|
[
"MIT"
] | 3
|
2022-01-28T15:53:22.000Z
|
2022-03-07T15:58:14.000Z
|
Shishkin_Anatoliy_lesson_10/actions/my_iterators.py
|
amilykh/2074_GB_Python_1-
|
0112ad710154623ad1dafffbdb413caeff424813
|
[
"MIT"
] | 5
|
2022-01-24T11:08:42.000Z
|
2022-03-03T00:15:22.000Z
|
Shishkin_Anatoliy_lesson_10/actions/my_iterators.py
|
amilykh/2074_GB_Python_1-
|
0112ad710154623ad1dafffbdb413caeff424813
|
[
"MIT"
] | 18
|
2022-01-18T05:56:00.000Z
|
2022-02-28T10:30:18.000Z
|
class IterObj: ... # просто класс-заглушка
class IteratorEnergy:
"""Объект-итератор, имитация иссякающей энергии"""
def __init__(self, start=10):
self.i = start + 1
# У итератора есть метод __next__
def __next__(self):
self.i -= 1
if self.i >= 1:
return self.i
else:
raise StopIteration
class Accumulator(IterObj):
"""Объект Аккумулятор, поддерживающий интерфейс
итерации (итерируемый объект)"""
def __init__(self, energy=6):
self.energy = energy
def __iter__(self):
# Метод __iter__ должен возвращать объект-итератор
return IteratorEnergy(self.energy)
class Battery(IterObj):
"""Объект Батарейка, независимый итерируемый объект"""
def __init__(self, energy=3):
self.i = energy + 1
# Метод __iter__ должен возвращать объект-итератор
def __iter__(self):
return self
def __next__(self):
self.i -= 1
if self.i >= 1:
return self.i
else:
raise StopIteration
| 23.666667
| 58
| 0.611268
|
46cf8327d4cfcab196e5153b443a24c1ba6cfd68
| 1,045
|
py
|
Python
|
plaidml/bridge/tensorflow/tests/util.py
|
hfp/plaidml
|
c86852a910e68181781b3045f5a306d2f41a775f
|
[
"Apache-2.0"
] | null | null | null |
plaidml/bridge/tensorflow/tests/util.py
|
hfp/plaidml
|
c86852a910e68181781b3045f5a306d2f41a775f
|
[
"Apache-2.0"
] | null | null | null |
plaidml/bridge/tensorflow/tests/util.py
|
hfp/plaidml
|
c86852a910e68181781b3045f5a306d2f41a775f
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from plaidml.bridge.tensorflow.tests import archive_py_generated as schema
dataTypeMap = {
'int8': (schema.I8DataT, schema.Data.I8Data),
'int16': (schema.I16DataT, schema.Data.I16Data),
'int32': (schema.I32DataT, schema.Data.I32Data),
'int64': (schema.I64DataT, schema.Data.I64Data),
'uint8': (schema.U8DataT, schema.Data.U8Data),
'uint16': (schema.U16DataT, schema.Data.U16Data),
'uint32': (schema.U32DataT, schema.Data.U32Data),
'uint64': (schema.U64DataT, schema.Data.U64Data),
'float16': (schema.F16DataT, schema.Data.F16Data),
'float32': (schema.F32DataT, schema.Data.F32Data),
'float64': (schema.F64DataT, schema.Data.F64Data),
}
def convertBuffer(name, src):
dtype = dataTypeMap.get(str(src.dtype))
if not dtype:
raise Exception('Unknown dtype: {}'.format(src.dtype))
data = dtype[0]()
data.data = np.ndarray.flatten(src)
buffer = schema.BufferT()
buffer.name = name
buffer.dataType = dtype[1]
buffer.data = data
return buffer
| 33.709677
| 74
| 0.680383
|
1deb15ba03296cdc428e63f9c302e67ff93a5373
| 10,051
|
py
|
Python
|
src/python/grpcio_tests/tests/unit/_cython/_read_some_but_not_all_responses_test.py
|
geffzhang/grpc
|
3ca1872927be87e3acb5230080990291d9393c5a
|
[
"BSD-3-Clause"
] | null | null | null |
src/python/grpcio_tests/tests/unit/_cython/_read_some_but_not_all_responses_test.py
|
geffzhang/grpc
|
3ca1872927be87e3acb5230080990291d9393c5a
|
[
"BSD-3-Clause"
] | null | null | null |
src/python/grpcio_tests/tests/unit/_cython/_read_some_but_not_all_responses_test.py
|
geffzhang/grpc
|
3ca1872927be87e3acb5230080990291d9393c5a
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test a corner-case at the level of the Cython API."""
import threading
import unittest
from grpc._cython import cygrpc
_INFINITE_FUTURE = cygrpc.Timespec(float('+inf'))
_EMPTY_FLAGS = 0
_EMPTY_METADATA = ()
class _ServerDriver(object):
def __init__(self, completion_queue, shutdown_tag):
self._condition = threading.Condition()
self._completion_queue = completion_queue
self._shutdown_tag = shutdown_tag
self._events = []
self._saw_shutdown_tag = False
def start(self):
def in_thread():
while True:
event = self._completion_queue.poll()
with self._condition:
self._events.append(event)
self._condition.notify()
if event.tag is self._shutdown_tag:
self._saw_shutdown_tag = True
break
thread = threading.Thread(target=in_thread)
thread.start()
def done(self):
with self._condition:
return self._saw_shutdown_tag
def first_event(self):
with self._condition:
while not self._events:
self._condition.wait()
return self._events[0]
def events(self):
with self._condition:
while not self._saw_shutdown_tag:
self._condition.wait()
return tuple(self._events)
class _QueueDriver(object):
def __init__(self, condition, completion_queue, due):
self._condition = condition
self._completion_queue = completion_queue
self._due = due
self._events = []
self._returned = False
def start(self):
def in_thread():
while True:
event = self._completion_queue.poll()
with self._condition:
self._events.append(event)
self._due.remove(event.tag)
self._condition.notify_all()
if not self._due:
self._returned = True
return
thread = threading.Thread(target=in_thread)
thread.start()
def done(self):
with self._condition:
return self._returned
def event_with_tag(self, tag):
with self._condition:
while True:
for event in self._events:
if event.tag is tag:
return event
self._condition.wait()
def events(self):
with self._condition:
while not self._returned:
self._condition.wait()
return tuple(self._events)
class ReadSomeButNotAllResponsesTest(unittest.TestCase):
def testReadSomeButNotAllResponses(self):
server_completion_queue = cygrpc.CompletionQueue()
server = cygrpc.Server(cygrpc.ChannelArgs([]))
server.register_completion_queue(server_completion_queue)
port = server.add_http2_port(b'[::]:0')
server.start()
channel = cygrpc.Channel('localhost:{}'.format(port).encode(),
cygrpc.ChannelArgs([]))
server_shutdown_tag = 'server_shutdown_tag'
server_driver = _ServerDriver(server_completion_queue,
server_shutdown_tag)
server_driver.start()
client_condition = threading.Condition()
client_due = set()
client_completion_queue = cygrpc.CompletionQueue()
client_driver = _QueueDriver(client_condition, client_completion_queue,
client_due)
client_driver.start()
server_call_condition = threading.Condition()
server_send_initial_metadata_tag = 'server_send_initial_metadata_tag'
server_send_first_message_tag = 'server_send_first_message_tag'
server_send_second_message_tag = 'server_send_second_message_tag'
server_complete_rpc_tag = 'server_complete_rpc_tag'
server_call_due = set(
(server_send_initial_metadata_tag, server_send_first_message_tag,
server_send_second_message_tag, server_complete_rpc_tag,))
server_call_completion_queue = cygrpc.CompletionQueue()
server_call_driver = _QueueDriver(server_call_condition,
server_call_completion_queue,
server_call_due)
server_call_driver.start()
server_rpc_tag = 'server_rpc_tag'
request_call_result = server.request_call(server_call_completion_queue,
server_completion_queue,
server_rpc_tag)
client_call = channel.create_call(None, _EMPTY_FLAGS,
client_completion_queue, b'/twinkies',
None, _INFINITE_FUTURE)
client_receive_initial_metadata_tag = 'client_receive_initial_metadata_tag'
client_complete_rpc_tag = 'client_complete_rpc_tag'
with client_condition:
client_receive_initial_metadata_start_batch_result = (
client_call.start_client_batch([
cygrpc.operation_receive_initial_metadata(_EMPTY_FLAGS),
], client_receive_initial_metadata_tag))
client_due.add(client_receive_initial_metadata_tag)
client_complete_rpc_start_batch_result = (
client_call.start_client_batch([
cygrpc.operation_send_initial_metadata(_EMPTY_METADATA,
_EMPTY_FLAGS),
cygrpc.operation_send_close_from_client(_EMPTY_FLAGS),
cygrpc.operation_receive_status_on_client(_EMPTY_FLAGS),
], client_complete_rpc_tag))
client_due.add(client_complete_rpc_tag)
server_rpc_event = server_driver.first_event()
with server_call_condition:
server_send_initial_metadata_start_batch_result = (
server_rpc_event.operation_call.start_server_batch([
cygrpc.operation_send_initial_metadata(_EMPTY_METADATA,
_EMPTY_FLAGS),
], server_send_initial_metadata_tag))
server_send_first_message_start_batch_result = (
server_rpc_event.operation_call.start_server_batch([
cygrpc.operation_send_message(b'\x07', _EMPTY_FLAGS),
], server_send_first_message_tag))
server_send_initial_metadata_event = server_call_driver.event_with_tag(
server_send_initial_metadata_tag)
server_send_first_message_event = server_call_driver.event_with_tag(
server_send_first_message_tag)
with server_call_condition:
server_send_second_message_start_batch_result = (
server_rpc_event.operation_call.start_server_batch([
cygrpc.operation_send_message(b'\x07', _EMPTY_FLAGS),
], server_send_second_message_tag))
server_complete_rpc_start_batch_result = (
server_rpc_event.operation_call.start_server_batch([
cygrpc.operation_receive_close_on_server(_EMPTY_FLAGS),
cygrpc.operation_send_status_from_server(
(), cygrpc.StatusCode.ok, b'test details',
_EMPTY_FLAGS),
], server_complete_rpc_tag))
server_send_second_message_event = server_call_driver.event_with_tag(
server_send_second_message_tag)
server_complete_rpc_event = server_call_driver.event_with_tag(
server_complete_rpc_tag)
server_call_driver.events()
with client_condition:
client_receive_first_message_tag = 'client_receive_first_message_tag'
client_receive_first_message_start_batch_result = (
client_call.start_client_batch([
cygrpc.operation_receive_message(_EMPTY_FLAGS),
], client_receive_first_message_tag))
client_due.add(client_receive_first_message_tag)
client_receive_first_message_event = client_driver.event_with_tag(
client_receive_first_message_tag)
client_call_cancel_result = client_call.cancel()
client_driver.events()
server.shutdown(server_completion_queue, server_shutdown_tag)
server.cancel_all_calls()
server_driver.events()
self.assertEqual(cygrpc.CallError.ok, request_call_result)
self.assertEqual(cygrpc.CallError.ok,
server_send_initial_metadata_start_batch_result)
self.assertEqual(cygrpc.CallError.ok,
client_receive_initial_metadata_start_batch_result)
self.assertEqual(cygrpc.CallError.ok,
client_complete_rpc_start_batch_result)
self.assertEqual(cygrpc.CallError.ok, client_call_cancel_result)
self.assertIs(server_rpc_tag, server_rpc_event.tag)
self.assertEqual(cygrpc.CompletionType.operation_complete,
server_rpc_event.type)
self.assertIsInstance(server_rpc_event.operation_call, cygrpc.Call)
self.assertEqual(0, len(server_rpc_event.batch_operations))
if __name__ == '__main__':
unittest.main(verbosity=2)
| 41.705394
| 83
| 0.634265
|
f3aec75d462ac45fc9cd058f407ddc1bd9a217c5
| 1,898
|
py
|
Python
|
PyTrinamicMicro/platforms/motionpy2/examples/tmcl_bridge/tmcl_bridge_uart_x.py
|
trinamic/PyTrinamicMicro
|
defcd1aef2ea35e848b3a3faf201ec4d8a649bfd
|
[
"MIT"
] | 4
|
2020-06-25T08:59:59.000Z
|
2021-07-17T09:52:38.000Z
|
PyTrinamicMicro/platforms/motionpy2/examples/tmcl_bridge/tmcl_bridge_uart_x.py
|
trinamic/PyTrinamicMicro
|
defcd1aef2ea35e848b3a3faf201ec4d8a649bfd
|
[
"MIT"
] | 2
|
2020-10-08T15:48:07.000Z
|
2021-05-12T13:28:27.000Z
|
PyTrinamicMicro/platforms/motionpy2/examples/tmcl_bridge/tmcl_bridge_uart_x.py
|
trinamic/PyTrinamicMicro
|
defcd1aef2ea35e848b3a3faf201ec4d8a649bfd
|
[
"MIT"
] | 3
|
2021-01-26T09:24:28.000Z
|
2021-04-27T08:42:38.000Z
|
'''
Bridge from USB host to UART module.
Pitfall:
stdout redirection is impossible in micropython at the moment.
By default, stdout-writing functions will write to VCP and interfere with connection.
Therefore, do not use stdout-writing functions (print, ...) here or turn them off while using VCP.
Created on 08.10.2020
@author: LK
'''
from PyTrinamicMicro import PyTrinamicMicro
from PyTrinamicMicro.connections.tmcl_host_interface import tmcl_host_interface
from PyTrinamicMicro.platforms.motionpy2.connections.uart_tmcl_interface import uart_tmcl_interface
from PyTrinamicMicro.platforms.motionpy2.connections.can_tmcl_interface import can_tmcl_interface
from PyTrinamicMicro.platforms.motionpy2.connections.rs232_tmcl_interface import rs232_tmcl_interface
from PyTrinamicMicro.platforms.motionpy2.connections.rs485_tmcl_interface import rs485_tmcl_interface
from PyTrinamicMicro.TMCL_Bridge import TMCL_Bridge
from PyTrinamic.TMCL import TMCL
import logging
# Prepare Logger
logger = logging.getLogger(__name__)
logger.info("TMCL Bridge from UART to X")
request_command = 0
def request_callback(request):
global request_command
request_command = request.command
return request
def reply_callback(reply):
if(request_command != TMCL.COMMANDS["GET_FIRMWARE_VERSION"]):
reply.calculate_checksum()
return reply
logger.info("Initializing interfaces ...")
host = uart_tmcl_interface()
modules = [{
"module": can_tmcl_interface(debug=True),
"request_callback": request_callback,
"reply_callback": reply_callback
}, {"module":rs232_tmcl_interface(debug=True)}, {"module":rs485_tmcl_interface(debug=True)}]
bridge = TMCL_Bridge(host, modules)
logger.info("Interfaces initialized.")
while(not(bridge.process())):
pass
logger.info("Closing interfaces ...")
host.close()
module.close()
logger.info("Interfaces closed.")
logger.info("Bridge stopped.")
| 32.169492
| 101
| 0.800843
|
631cd426b32b8bc47877d1fcabb30b4a4948e199
| 7,645
|
py
|
Python
|
python/paddle/fluid/tests/unittests/dygraph_to_static/test_convert_operators.py
|
a6802739/Paddle
|
c0bda9109f11872432f7d60580b5b6a3367f2577
|
[
"Apache-2.0"
] | 1
|
2020-12-21T09:01:24.000Z
|
2020-12-21T09:01:24.000Z
|
python/paddle/fluid/tests/unittests/dygraph_to_static/test_convert_operators.py
|
a6802739/Paddle
|
c0bda9109f11872432f7d60580b5b6a3367f2577
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/fluid/tests/unittests/dygraph_to_static/test_convert_operators.py
|
a6802739/Paddle
|
c0bda9109f11872432f7d60580b5b6a3367f2577
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import paddle
import unittest
class CallNotExist(paddle.nn.Layer):
def __call__(self):
# call a non-exist API to trigger exception
return paddle.nn.not_exist_api
class ForwardNotExist(paddle.nn.Layer):
def forward(self):
return 0
net = ForwardNotExist()
setattr(net, "forward", "A string so that convert forward will fail")
class TestConvertCall(unittest.TestCase):
def test_class_exception(self):
@paddle.jit.to_static
def call_not_exist():
net = CallNotExist()
return net()
with self.assertRaises(AttributeError):
call_not_exist()
@paddle.jit.to_static
def forward_not_exist():
return net()
with self.assertRaises(TypeError):
forward_not_exist()
class TestConvertShapeCompare(unittest.TestCase):
def test_non_variable(self):
self.assertEqual(
paddle.jit.dy2static.convert_shape_compare(1, "<", 2), True)
self.assertEqual(
paddle.jit.dy2static.convert_shape_compare(1, "<", 2, "<=", 3),
True)
self.assertEqual(
paddle.jit.dy2static.convert_shape_compare(1, ">", 2, "<=", 3),
False)
def error_func():
"""
Function used to test that comparison doesn't run after first False
"""
raise ValueError("Used for test")
self.assertEqual(
paddle.jit.dy2static.convert_shape_compare(
1, ">", 2, "<=", lambda: error_func()), False)
self.assertEqual(
paddle.jit.dy2static.convert_shape_compare(1, "<", 2, "in",
[1, 2, 3]), True)
self.assertEqual(
paddle.jit.dy2static.convert_shape_compare(1, "<", 2, "not in",
[1, 2, 3]), False)
self.assertEqual(
paddle.jit.dy2static.convert_shape_compare(1, "<", 2, "is", 3),
False)
self.assertEqual(
paddle.jit.dy2static.convert_shape_compare(1, "<", 2, "is not",
[1, 2, 3]), True)
self.assertEqual(
paddle.jit.dy2static.convert_shape_compare([1, 2], "==", [1, 2],
"!=", [1, 2, 3]), True)
self.assertEqual(
paddle.jit.dy2static.convert_shape_compare([1, 2], "!=", [1, 2, 3],
"==", [1, 2]), False)
def test_variable(self):
paddle.enable_static()
with paddle.static.program_guard(paddle.static.Program(),
paddle.static.Program()):
x = paddle.static.data(name='x', shape=[3, 2], dtype='float32')
y = paddle.static.data(name='y', shape=[3, 2], dtype='float32')
self.assertEqual(
paddle.jit.dy2static.convert_shape_compare(x, "is", x, "is not",
y), True)
self.assertEqual(
paddle.jit.dy2static.convert_shape_compare(x, "is not", x,
"is not", y), False)
self.assertEqual(
paddle.jit.dy2static.convert_shape_compare(x, "is", x, "is", y),
False)
eq_out = paddle.jit.dy2static.convert_shape_compare(x, "==", y)
not_eq_out = paddle.jit.dy2static.convert_shape_compare(x, "!=", y)
long_eq_out = paddle.jit.dy2static.convert_shape_compare(x, "==", x,
"!=", y)
place = paddle.CUDAPlace(0) if paddle.is_compiled_with_cuda(
) else paddle.CPUPlace()
exe = paddle.static.Executor(place)
x_y_eq_out = exe.run(feed={
"x": np.ones([3, 2]).astype(np.float32),
"y": np.ones([3, 2]).astype(np.float32)
},
fetch_list=[eq_out, not_eq_out, long_eq_out])
np.testing.assert_array_equal(
np.array(x_y_eq_out), np.array([[True], [False], [False]]))
set_a_zero = np.ones([3, 2]).astype(np.float32)
set_a_zero[0][0] = 0.0
x_y_not_eq_out = exe.run(
feed={
"x": np.ones([3, 2]).astype(np.float32),
"y": set_a_zero
},
fetch_list=[eq_out, not_eq_out, long_eq_out])
np.testing.assert_array_equal(
np.array(x_y_not_eq_out), np.array([[False], [True], [True]]))
paddle.disable_static()
class TestChooseShapeAttrOrApi(unittest.TestCase):
def test_api_shape_is_none(self):
self.assertEqual(
paddle.jit.dy2static.choose_shape_attr_or_api([1, 2], None),
[1, 2])
self.assertEqual(
paddle.jit.dy2static.choose_shape_attr_or_api([1], None), [1])
self.assertEqual(
paddle.jit.dy2static.choose_shape_attr_or_api([2, 3, 7], None, 0),
2)
def test_attr_shape_is_int(self):
x = paddle.zeros([1, 3, 5, 7])
self.assertEqual(
paddle.jit.dy2static.choose_shape_attr_or_api(x.shape[0],
paddle.shape(x)[0]),
1)
self.assertEqual(
paddle.jit.dy2static.choose_shape_attr_or_api(x.shape[1],
paddle.shape(x)[1]),
3)
self.assertEqual(
paddle.jit.dy2static.choose_shape_attr_or_api(-1,
paddle.shape(x)[0]),
paddle.shape(x)[0])
self.assertEqual(
paddle.jit.dy2static.choose_shape_attr_or_api(-1,
paddle.shape(x), 0),
paddle.shape(x)[0])
def test_positive_attr_shape(self):
x = paddle.zeros([1, 3, 5, 7])
self.assertEqual(
paddle.jit.dy2static.choose_shape_attr_or_api(x.shape,
paddle.shape(x)),
x.shape)
self.assertEqual(
paddle.jit.dy2static.choose_shape_attr_or_api(x.shape,
paddle.shape(x), 3),
x.shape[3])
def test_negative_attr_shape(self):
x = paddle.zeros([7])
self.assertEqual(
paddle.jit.dy2static.choose_shape_attr_or_api([-1],
paddle.shape(x), 0),
paddle.shape(x)[0])
self.assertEqual(
paddle.jit.dy2static.choose_shape_attr_or_api([-1],
paddle.shape(x)),
paddle.shape(x))
if __name__ == '__main__':
unittest.main()
| 39.407216
| 80
| 0.516678
|
0c24e5f375447159ab4363e995ab710e9c15c369
| 726
|
py
|
Python
|
Python_script/cnt.py
|
hzz0024/BioinfoTools
|
1a2fd50e8dfb0e52f2514720e938702fe255ca7b
|
[
"MIT"
] | 2
|
2019-08-15T15:16:28.000Z
|
2020-10-12T12:27:19.000Z
|
Python_script/cnt.py
|
hzz0024/BioinfoTools
|
1a2fd50e8dfb0e52f2514720e938702fe255ca7b
|
[
"MIT"
] | null | null | null |
Python_script/cnt.py
|
hzz0024/BioinfoTools
|
1a2fd50e8dfb0e52f2514720e938702fe255ca7b
|
[
"MIT"
] | null | null | null |
import numpy as np
import re
import argparse
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument(
'--input',
default='./Flowcell1_1_fastq',
help='Input file.')
args = arg_parser.parse_args()
# replace the fileName with your own fastq file name
fileName = args.input
fileInput = open(fileName, mode='r')
#chars = set('!"#$%&'()*+,-./')
chars = set('\'"!#$%&()*+/,-.012')
cnt = 0
for strLine in fileInput:
#Strip the endline character from each input line
strLine = strLine.rstrip("\n")
if strLine.startswith("EEEE"):
strLine=strLine[0:64]
if any((c in chars) for c in strLine):
cnt += 1
print('Count: ' + str(cnt))
| 25.928571
| 54
| 0.597796
|
e5b85dfbc95660ee9b847b012a6337d4b6b30b70
| 496
|
py
|
Python
|
examples/advanced/hydra_app_example/setup.py
|
Devabdulakeem/hydra
|
7afee0976f7507c3c1b607ebd129d3408b608fa2
|
[
"MIT"
] | 1
|
2021-02-23T00:00:20.000Z
|
2021-02-23T00:00:20.000Z
|
examples/advanced/hydra_app_example/setup.py
|
rolveb/hydra
|
dfbebc34020ad3d4bf1ab061565408740e62eb22
|
[
"MIT"
] | null | null | null |
examples/advanced/hydra_app_example/setup.py
|
rolveb/hydra
|
dfbebc34020ad3d4bf1ab061565408740e62eb22
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from setuptools import find_packages, setup
setup(
name="hydra-app",
version="0.1",
packages=find_packages(include=["hydra_app"]),
entry_points={"console_scripts": ["hydra_app = hydra_app.main:main"]},
author="you!",
author_email="your_email@example.com",
url="http://hydra-app.example.com",
include_package_data=True,
install_requires=["hydra-core~=1.0.0rc3"],
)
| 27.555556
| 74
| 0.693548
|
1de385f2ef105533a657268c8f28ab9e84887666
| 1,201
|
py
|
Python
|
soda/core/tests/unit/test_sodacl_error_user_defined_queries.py
|
duyet/soda-core
|
92a52e0d7c1e88624d0637123cfcb2610af6d112
|
[
"Apache-2.0"
] | 4
|
2022-03-23T02:43:42.000Z
|
2022-03-31T15:20:54.000Z
|
soda/core/tests/unit/test_sodacl_error_user_defined_queries.py
|
duyet/soda-core
|
92a52e0d7c1e88624d0637123cfcb2610af6d112
|
[
"Apache-2.0"
] | 543
|
2022-03-22T09:02:17.000Z
|
2022-03-31T16:29:41.000Z
|
soda/core/tests/unit/test_sodacl_error_user_defined_queries.py
|
duyet/soda-core
|
92a52e0d7c1e88624d0637123cfcb2610af6d112
|
[
"Apache-2.0"
] | 1
|
2022-03-27T03:37:55.000Z
|
2022-03-27T03:37:55.000Z
|
from textwrap import dedent
from soda.scan import Scan
def test_user_defined_metric_query_unsupported_configuration():
scan = Scan()
scan.add_sodacl_yaml_str(
dedent(
"""
checks for CUSTOMERS:
- avg_surface between 1068 and 1069:
typo config: AVG(size * distance)
"""
)
)
scan.assert_has_error("Skipping unsupported check configuration: typo config")
def test_user_defined_metric_query_metric_name_typo():
scan = Scan()
scan.add_sodacl_yaml_str(
dedent(
"""
checks for CUSTOMERS:
- avg_surface between 1068 and 1069:
avg_surfas expression: AVG(size * distance)
"""
)
)
scan.assert_has_error(
'In configuration "avg_surfas expression" the metric name must match exactly the metric name in the check "avg_surface"'
)
def test_typo_in_metric_name():
scan = Scan()
scan.add_sodacl_yaml_str(
dedent(
"""
checks for CUSTOMERS:
- inval id_percent(pct) < 5 %
"""
)
)
scan.assert_has_error('Invalid check "inval id_percent(pct) < 5 %"')
assert len(scan.get_error_logs()) == 1
| 24.510204
| 128
| 0.622814
|
7d6bcc4d564c9332cd2abde37753d3a5c4a89515
| 34,070
|
py
|
Python
|
python/paddle/utils/cpp_extension/extension_utils.py
|
dingsiyu/paddle_adamw
|
868c0e44d7d43d83fd062bfe4536f13b8bf4517f
|
[
"Apache-2.0"
] | 1
|
2021-04-22T03:51:33.000Z
|
2021-04-22T03:51:33.000Z
|
python/paddle/utils/cpp_extension/extension_utils.py
|
dingsiyu/paddle_adamw
|
868c0e44d7d43d83fd062bfe4536f13b8bf4517f
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/utils/cpp_extension/extension_utils.py
|
dingsiyu/paddle_adamw
|
868c0e44d7d43d83fd062bfe4536f13b8bf4517f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import six
import sys
import json
import glob
import hashlib
import logging
import collections
import textwrap
import warnings
import subprocess
from contextlib import contextmanager
from setuptools.command import bdist_egg
from .. import load_op_library
from ...fluid import core
from ...fluid.framework import OpProtoHolder
from ...sysconfig import get_include, get_lib
logging.basicConfig(
format='%(asctime)s - %(levelname)s - %(message)s', level=logging.INFO)
logger = logging.getLogger("utils.cpp_extension")
OS_NAME = sys.platform
IS_WINDOWS = OS_NAME.startswith('win')
MSVC_COMPILE_FLAGS = [
'/MT', '/wd4819', '/wd4251', '/wd4244', '/wd4267', '/wd4275', '/wd4018',
'/wd4190', '/EHsc', '/w', '/DGOOGLE_GLOG_DLL_DECL',
'/DBOOST_HAS_STATIC_ASSERT', '/DNDEBUG', '/DPADDLE_USE_DSO'
]
MSVC_LINK_FLAGS = ['/MACHINE:X64', 'paddle_custom_op.lib']
COMMON_NVCC_FLAGS = ['-DPADDLE_WITH_CUDA', '-DEIGEN_USE_GPU']
GCC_MINI_VERSION = (5, 4, 0)
MSVC_MINI_VERSION = (19, 0, 24215)
# Give warning if using wrong compiler
WRONG_COMPILER_WARNING = '''
*************************************
* Compiler Compatibility WARNING *
*************************************
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
Found that your compiler ({user_compiler}) is not compatible with the compiler
built Paddle for this platform, which is {paddle_compiler} on {platform}. Please
use {paddle_compiler} to compile your custom op. Or you may compile Paddle from
source using {user_compiler}, and then also use it compile your custom op.
See https://www.paddlepaddle.org.cn/documentation/docs/zh/install/compile/fromsource.html
for help with compiling Paddle from source.
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
'''
# Give warning if used compiler version is incompatible
ABI_INCOMPATIBILITY_WARNING = '''
**********************************
* ABI Compatibility WARNING *
**********************************
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
Found that your compiler ({user_compiler} == {version}) may be ABI-incompatible with pre-installed Paddle!
Please use compiler that is ABI-compatible with GCC >= 5.4 (Recommended 8.2).
See https://gcc.gnu.org/onlinedocs/libstdc++/manual/abi.html for ABI Compatibility
information
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
'''
USING_NEW_CUSTOM_OP_LOAD_METHOD = True
DEFAULT_OP_ATTR_NAMES = [
core.op_proto_and_checker_maker.kOpRoleAttrName(),
core.op_proto_and_checker_maker.kOpRoleVarAttrName(),
core.op_proto_and_checker_maker.kOpNameScopeAttrName(),
core.op_proto_and_checker_maker.kOpCreationCallstackAttrName(),
core.op_proto_and_checker_maker.kOpDeviceAttrName()
]
# NOTE(chenweihang): In order to be compatible with
# the two custom op define method, after removing
# old method, we can remove them together
def use_new_custom_op_load_method(*args):
global USING_NEW_CUSTOM_OP_LOAD_METHOD
if len(args) == 0:
return USING_NEW_CUSTOM_OP_LOAD_METHOD
else:
assert len(args) == 1 and isinstance(args[0], bool)
USING_NEW_CUSTOM_OP_LOAD_METHOD = args[0]
@contextmanager
def bootstrap_context():
"""
Context to manage how to write `__bootstrap__` code in .egg
"""
origin_write_stub = bdist_egg.write_stub
bdist_egg.write_stub = custom_write_stub
yield
bdist_egg.write_stub = origin_write_stub
def load_op_meta_info_and_register_op(lib_filename):
if USING_NEW_CUSTOM_OP_LOAD_METHOD:
core.load_op_meta_info_and_register_op(lib_filename)
else:
core.load_op_library(lib_filename)
return OpProtoHolder.instance().update_op_proto()
def custom_write_stub(resource, pyfile):
"""
Customized write_stub function to allow us to inject generated python
api codes into egg python file.
"""
_stub_template = textwrap.dedent("""
import os
import sys
import types
import paddle
def inject_ext_module(module_name, api_names):
if module_name in sys.modules:
return sys.modules[module_name]
new_module = types.ModuleType(module_name)
for api_name in api_names:
setattr(new_module, api_name, eval(api_name))
return new_module
def __bootstrap__():
cur_dir = os.path.dirname(os.path.abspath(__file__))
so_path = os.path.join(cur_dir, "{resource}")
assert os.path.exists(so_path)
# load custom op shared library with abs path
new_custom_ops = paddle.utils.cpp_extension.load_op_meta_info_and_register_op(so_path)
m = inject_ext_module(__name__, new_custom_ops)
__bootstrap__()
{custom_api}
""").lstrip()
# Parse registerring op information
_, op_info = CustomOpInfo.instance().last()
so_path = op_info.so_path
new_custom_ops = load_op_meta_info_and_register_op(so_path)
assert len(
new_custom_ops
) > 0, "Required at least one custom operators, but received len(custom_op) = %d" % len(
new_custom_ops)
# NOTE: To avoid importing .so file instead of python file because they have same name,
# we rename .so shared library to another name, see EasyInstallCommand.
filename, ext = os.path.splitext(resource)
resource = filename + "_pd_" + ext
api_content = []
for op_name in new_custom_ops:
api_content.append(_custom_api_content(op_name))
with open(pyfile, 'w') as f:
f.write(
_stub_template.format(
resource=resource, custom_api='\n\n'.join(api_content)))
OpInfo = collections.namedtuple('OpInfo', ['so_name', 'so_path'])
class CustomOpInfo:
"""
A global Singleton map to record all compiled custom ops information.
"""
@classmethod
def instance(cls):
if not hasattr(cls, '_instance'):
cls._instance = cls()
return cls._instance
def __init__(self):
assert not hasattr(
self.__class__,
'_instance'), 'Please use `instance()` to get CustomOpInfo object!'
# NOTE(Aurelius84): Use OrderedDict to save more order information
self.op_info_map = collections.OrderedDict()
def add(self, op_name, so_name, so_path=None):
self.op_info_map[op_name] = OpInfo(so_name, so_path)
def last(self):
"""
Return the lastest insert custom op info.
"""
assert len(self.op_info_map) > 0
return next(reversed(self.op_info_map.items()))
VersionFields = collections.namedtuple('VersionFields', [
'sources',
'extra_compile_args',
'extra_link_args',
'library_dirs',
'runtime_library_dirs',
'include_dirs',
'define_macros',
'undef_macros',
])
class VersionManager:
def __init__(self, version_field):
self.version_field = version_field
self.version = self.hasher(version_field)
def hasher(self, version_field):
from paddle.fluid.layers.utils import flatten
md5 = hashlib.md5()
for field in version_field._fields:
elem = getattr(version_field, field)
if not elem: continue
if isinstance(elem, (list, tuple, dict)):
flat_elem = flatten(elem)
md5 = combine_hash(md5, tuple(flat_elem))
else:
raise RuntimeError(
"Support types with list, tuple and dict, but received {} with {}.".
format(type(elem), elem))
return md5.hexdigest()
@property
def details(self):
return self.version_field._asdict()
def combine_hash(md5, value):
"""
Return new hash value.
DO NOT use `hash()` beacuse it doesn't generate stable value between different process.
See https://stackoverflow.com/questions/27522626/hash-function-in-python-3-3-returns-different-results-between-sessions
"""
md5.update(repr(value).encode())
return md5
def clean_object_if_change_cflags(so_path, extension):
"""
If already compiling source before, we should check whether cflags
have changed and delete the built object to re-compile the source
even though source file content keeps unchanaged.
"""
def serialize(path, version_info):
assert isinstance(version_info, dict)
with open(path, 'w') as f:
f.write(json.dumps(version_info, indent=4, sort_keys=True))
def deserialize(path):
assert os.path.exists(path)
with open(path, 'r') as f:
content = f.read()
return json.loads(content)
# version file
VERSION_FILE = "version.txt"
base_dir = os.path.dirname(so_path)
so_name = os.path.basename(so_path)
version_file = os.path.join(base_dir, VERSION_FILE)
# version info
args = [getattr(extension, field, None) for field in VersionFields._fields]
version_field = VersionFields._make(args)
versioner = VersionManager(version_field)
if os.path.exists(so_path) and os.path.exists(version_file):
old_version_info = deserialize(version_file)
so_version = old_version_info.get(so_name, None)
# delete shared library file if versison is changed to re-compile it.
if so_version is not None and so_version != versioner.version:
log_v(
"Re-Compiling {}, because specified cflags have been changed. New signature {} has been saved into {}.".
format(so_name, versioner.version, version_file))
os.remove(so_path)
# upate new version information
new_version_info = versioner.details
new_version_info[so_name] = versioner.version
serialize(version_file, new_version_info)
else:
# If compile at first time, save compiling detail information for debug.
if not os.path.exists(base_dir):
os.makedirs(base_dir)
details = versioner.details
details[so_name] = versioner.version
serialize(version_file, details)
def prepare_unix_cudaflags(cflags):
"""
Prepare all necessary compiled flags for nvcc compiling CUDA files.
"""
cflags = COMMON_NVCC_FLAGS + [
'-ccbin', 'cc', '-Xcompiler', '-fPIC', '--expt-relaxed-constexpr',
'-DNVCC'
] + cflags + get_cuda_arch_flags(cflags)
return cflags
def prepare_win_cudaflags(cflags):
"""
Prepare all necessary compiled flags for nvcc compiling CUDA files.
"""
cflags = COMMON_NVCC_FLAGS + ['-w'] + cflags + get_cuda_arch_flags(cflags)
return cflags
def add_std_without_repeat(cflags, compiler_type, use_std14=False):
"""
Append -std=c++11/14 in cflags if without specific it before.
"""
cpp_flag_prefix = '/std:' if compiler_type == 'msvc' else '-std='
if not any(cpp_flag_prefix in flag for flag in cflags):
suffix = 'c++14' if use_std14 else 'c++11'
cpp_flag = cpp_flag_prefix + suffix
cflags.append(cpp_flag)
def get_cuda_arch_flags(cflags):
"""
For an arch, say "6.1", the added compile flag will be
``-gencode=arch=compute_61,code=sm_61``.
For an added "+PTX", an additional
``-gencode=arch=compute_xx,code=compute_xx`` is added.
"""
# TODO(Aurelius84):
return []
def normalize_extension_kwargs(kwargs, use_cuda=False):
"""
Normalize include_dirs, library_dir and other attributes in kwargs.
"""
assert isinstance(kwargs, dict)
# append necessary include dir path of paddle
include_dirs = kwargs.get('include_dirs', [])
include_dirs.extend(find_paddle_includes(use_cuda))
kwargs['include_dirs'] = include_dirs
# append necessary lib path of paddle
library_dirs = kwargs.get('library_dirs', [])
library_dirs.extend(find_paddle_libraries(use_cuda))
kwargs['library_dirs'] = library_dirs
# append compile flags and check settings of compiler
extra_compile_args = kwargs.get('extra_compile_args', [])
if isinstance(extra_compile_args, dict):
for compiler in ['cxx', 'nvcc']:
if compiler not in extra_compile_args:
extra_compile_args[compiler] = []
if IS_WINDOWS:
# TODO(zhouwei): may append compile flags in future
pass
# append link flags
extra_link_args = kwargs.get('extra_link_args', [])
extra_link_args.extend(MSVC_LINK_FLAGS)
if use_cuda:
extra_link_args.extend(['cudadevrt.lib', 'cudart_static.lib'])
kwargs['extra_link_args'] = extra_link_args
else:
add_compile_flag(extra_compile_args, ['-w']) # disable warning
# Note(Aurelius84): This marco will impact memory layout of `Tensor`.
# We align it automatially with pre-installed Paddle.
if core.is_compiled_with_mkldnn():
add_compile_flag(extra_compile_args, ['-DPADDLE_WITH_MKLDNN'])
# append link flags
extra_link_args = kwargs.get('extra_link_args', [])
if use_new_custom_op_load_method():
extra_link_args.append('-lpaddle_custom_op')
else:
extra_link_args.append('-lpaddle_framework')
if use_cuda:
extra_link_args.append('-lcudart')
kwargs['extra_link_args'] = extra_link_args
# add runtime library dirs
runtime_library_dirs = kwargs.get('runtime_library_dirs', [])
runtime_library_dirs.extend(find_paddle_libraries(use_cuda))
kwargs['runtime_library_dirs'] = runtime_library_dirs
kwargs['extra_compile_args'] = extra_compile_args
kwargs['language'] = 'c++'
return kwargs
def find_cuda_home():
"""
Use heuristic method to find cuda path
"""
# step 1. find in $CUDA_HOME or $CUDA_PATH
cuda_home = os.environ.get('CUDA_HOME') or os.environ.get('CUDA_PATH')
# step 2. find path by `which nvcc`
if cuda_home is None:
which_cmd = 'where' if IS_WINDOWS else 'which'
try:
with open(os.devnull, 'w') as devnull:
nvcc_path = subprocess.check_output(
[which_cmd, 'nvcc'], stderr=devnull)
if six.PY3:
nvcc_path = nvcc_path.decode()
# Multi CUDA, select the first
nvcc_path = nvcc_path.split('\r\n')[0]
# for example: /usr/local/cuda/bin/nvcc
cuda_home = os.path.dirname(os.path.dirname(nvcc_path))
except:
if IS_WINDOWS:
# search from default NVIDIA GPU path
candidate_paths = glob.glob(
'C:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v*.*'
)
if len(candidate_paths) > 0:
cuda_home = candidate_paths[0]
else:
cuda_home = "/usr/local/cuda"
# step 3. check whether path is valid
if cuda_home and not os.path.exists(
cuda_home) and core.is_compiled_with_cuda():
cuda_home = None
return cuda_home
def find_rocm_home():
"""
Use heuristic method to find rocm path
"""
# step 1. find in $ROCM_HOME or $ROCM_PATH
rocm_home = os.environ.get('ROCM_HOME') or os.environ.get('ROCM_PATH')
# step 2. find path by `which nvcc`
if rocm_home is None:
which_cmd = 'where' if IS_WINDOWS else 'which'
try:
with open(os.devnull, 'w') as devnull:
hipcc_path = subprocess.check_output(
[which_cmd, 'hipcc'], stderr=devnull)
if six.PY3:
hipcc_path = hipcc_path.decode()
hipcc_path = hipcc_path.rstrip('\r\n')
# for example: /opt/rocm/bin/hipcc
rocm_home = os.path.dirname(os.path.dirname(hipcc_path))
except:
rocm_home = "/opt/rocm"
# step 3. check whether path is valid
if rocm_home and not os.path.exists(
rocm_home) and core.is_compiled_with_rocm():
rocm_home = None
return rocm_home
def find_cuda_includes():
"""
Use heuristic method to find cuda include path
"""
cuda_home = find_cuda_home()
if cuda_home is None:
raise ValueError(
"Not found CUDA runtime, please use `export CUDA_HOME=XXX` to specific it."
)
return [os.path.join(cuda_home, 'include')]
def find_rocm_includes():
"""
Use heuristic method to find rocm include path
"""
rocm_home = find_rocm_home()
if rocm_home is None:
raise ValueError(
"Not found ROCM runtime, please use `export ROCM_PATH= XXX` to specific it."
)
return [os.path.join(rocm_home, 'include')]
def find_paddle_includes(use_cuda=False):
"""
Return Paddle necessary include dir path.
"""
# pythonXX/site-packages/paddle/include
paddle_include_dir = get_include()
third_party_dir = os.path.join(paddle_include_dir, 'third_party')
include_dirs = [paddle_include_dir, third_party_dir]
if use_cuda:
if core.is_compiled_with_rocm():
rocm_include_dir = find_rocm_includes()
include_dirs.extend(rocm_include_dir)
else:
cuda_include_dir = find_cuda_includes()
include_dirs.extend(cuda_include_dir)
return include_dirs
def find_cuda_libraries():
"""
Use heuristic method to find cuda static lib path
"""
cuda_home = find_cuda_home()
if cuda_home is None:
raise ValueError(
"Not found CUDA runtime, please use `export CUDA_HOME=XXX` to specific it."
)
if IS_WINDOWS:
cuda_lib_dir = [os.path.join(cuda_home, 'lib', 'x64')]
else:
cuda_lib_dir = [os.path.join(cuda_home, 'lib64')]
return cuda_lib_dir
def find_rocm_libraries():
"""
Use heuristic method to find rocm dynamic lib path
"""
rocm_home = find_rocm_home()
if rocm_home is None:
raise ValueError(
"Not found ROCM runtime, please use `export ROCM_PATH=XXX` to specific it."
)
rocm_lib_dir = [os.path.join(rocm_home, 'lib')]
return rocm_lib_dir
def find_paddle_libraries(use_cuda=False):
"""
Return Paddle necessary library dir path.
"""
# pythonXX/site-packages/paddle/libs
paddle_lib_dirs = [get_lib()]
if use_cuda:
if core.is_compiled_with_rocm():
rocm_lib_dir = find_rocm_libraries()
paddle_lib_dirs.extend(rocm_lib_dir)
else:
cuda_lib_dir = find_cuda_libraries()
paddle_lib_dirs.extend(cuda_lib_dir)
return paddle_lib_dirs
def add_compile_flag(extra_compile_args, flags):
assert isinstance(flags, list)
if isinstance(extra_compile_args, dict):
for args in extra_compile_args.values():
args.extend(flags)
else:
extra_compile_args.extend(flags)
def is_cuda_file(path):
cuda_suffix = set(['.cu'])
items = os.path.splitext(path)
assert len(items) > 1
return items[-1] in cuda_suffix
def get_build_directory(verbose=False):
"""
Return paddle extension root directory to put shared library. It could be specified by
``export PADDLE_EXTENSION_DIR=XXX`` . If not set, ``~/.cache/paddle_extension`` will be used
by default.
Returns:
The root directory of compiling customized operators.
Examples:
.. code-block:: python
from paddle.utils.cpp_extension import get_build_directory
build_dir = get_build_directory()
print(build_dir)
"""
root_extensions_directory = os.environ.get('PADDLE_EXTENSION_DIR')
if root_extensions_directory is None:
dir_name = "paddle_extensions"
root_extensions_directory = os.path.join(
os.path.expanduser('~/.cache'), dir_name)
if IS_WINDOWS:
root_extensions_directory = os.path.normpath(
root_extensions_directory)
elif OS_NAME.startswith('darwin'):
# TODO(Aurelius84): consider macOs
raise NotImplementedError("Not support Mac now.")
log_v("$PADDLE_EXTENSION_DIR is not set, using path: {} by default.".
format(root_extensions_directory), verbose)
if not os.path.exists(root_extensions_directory):
os.makedirs(root_extensions_directory)
return root_extensions_directory
def parse_op_info(op_name):
"""
Parse input names and outpus detail information from registered custom op
from OpInfoMap.
"""
from paddle.fluid.framework import OpProtoHolder
if op_name not in OpProtoHolder.instance().op_proto_map:
raise ValueError(
"Please load {} shared library file firstly by `paddle.utils.cpp_extension.load_op_meta_info_and_register_op(...)`".
format(op_name))
op_proto = OpProtoHolder.instance().get_op_proto(op_name)
in_names = [x.name for x in op_proto.inputs]
out_names = [x.name for x in op_proto.outputs]
attr_names = [
x.name for x in op_proto.attrs if x.name not in DEFAULT_OP_ATTR_NAMES
]
return in_names, out_names, attr_names
def _import_module_from_library(module_name, build_directory, verbose=False):
"""
Load shared library and import it as callable python module.
"""
if IS_WINDOWS:
dynamic_suffix = '.pyd'
else:
dynamic_suffix = '.so'
ext_path = os.path.join(build_directory, module_name + dynamic_suffix)
if not os.path.exists(ext_path):
raise FileNotFoundError("Extension path: {} does not exist.".format(
ext_path))
# load custom op_info and kernels from .so shared library
log_v('loading shared library from: {}'.format(ext_path), verbose)
op_names = load_op_meta_info_and_register_op(ext_path)
# generate Python api in ext_path
return _generate_python_module(module_name, op_names, build_directory,
verbose)
def _generate_python_module(module_name,
op_names,
build_directory,
verbose=False):
"""
Automatically generate python file to allow import or load into as module
"""
api_file = os.path.join(build_directory, module_name + '.py')
log_v("generate api file: {}".format(api_file), verbose)
# write into .py file
api_content = [_custom_api_content(op_name) for op_name in op_names]
with open(api_file, 'w') as f:
f.write('\n\n'.join(api_content))
# load module
custom_module = _load_module_from_file(api_file, verbose)
return custom_module
def _custom_api_content(op_name):
params_str, ins_str, attrs_str, outs_str = _get_api_inputs_str(op_name)
API_TEMPLATE = textwrap.dedent("""
from paddle.fluid.layer_helper import LayerHelper
def {op_name}({inputs}):
helper = LayerHelper("{op_name}", **locals())
# prepare inputs and outputs
ins = {ins}
attrs = {attrs}
outs = {{}}
out_names = {out_names}
for out_name in out_names:
# Set 'float32' temporarily, and the actual dtype of output variable will be inferred
# in runtime.
outs[out_name] = helper.create_variable(dtype='float32')
helper.append_op(type="{op_name}", inputs=ins, outputs=outs, attrs=attrs)
res = [outs[out_name] for out_name in out_names]
return res[0] if len(res)==1 else res
""").lstrip()
# generate python api file
api_content = API_TEMPLATE.format(
op_name=op_name,
inputs=params_str,
ins=ins_str,
attrs=attrs_str,
out_names=outs_str)
return api_content
def _load_module_from_file(api_file_path, verbose=False):
"""
Load module from python file.
"""
if not os.path.exists(api_file_path):
raise FileNotFoundError("File : {} does not exist.".format(
api_file_path))
# Unique readable module name to place custom api.
log_v('import module from file: {}'.format(api_file_path), verbose)
ext_name = "_paddle_cpp_extension_"
if six.PY2:
import imp
module = imp.load_source(ext_name, api_file_path)
else:
from importlib import machinery
loader = machinery.SourceFileLoader(ext_name, api_file_path)
module = loader.load_module()
return module
def _get_api_inputs_str(op_name):
"""
Returns string of api parameters and inputs dict.
"""
in_names, out_names, attr_names = parse_op_info(op_name)
# e.g: x, y, z
param_names = in_names + attr_names
# NOTE(chenweihang): we add suffix `@VECTOR` for std::vector<Tensor> input,
# but the string contains `@` cannot used as argument name, so we split
# input name by `@`, and only use first substr as argument
params_str = ','.join([p.split("@")[0].lower() for p in param_names])
# e.g: {'X': x, 'Y': y, 'Z': z}
ins_str = "{%s}" % ','.join([
"'{}' : {}".format(in_name, in_name.split("@")[0].lower())
for in_name in in_names
])
# e.g: {'num': n}
attrs_str = "{%s}" % ",".join([
"'{}' : {}".format(attr_name, attr_name.split("@")[0].lower())
for attr_name in attr_names
])
# e.g: ['Out', 'Index']
outs_str = "[%s]" % ','.join(["'{}'".format(name) for name in out_names])
return params_str, ins_str, attrs_str, outs_str
def _write_setup_file(name,
sources,
file_path,
build_dir,
include_dirs,
extra_cxx_cflags,
extra_cuda_cflags,
link_args,
verbose=False):
"""
Automatically generate setup.py and write it into build directory.
"""
template = textwrap.dedent("""
import os
from paddle.utils.cpp_extension import CppExtension, CUDAExtension, BuildExtension, setup
from paddle.utils.cpp_extension import get_build_directory
from paddle.utils.cpp_extension.extension_utils import use_new_custom_op_load_method
use_new_custom_op_load_method({use_new_method})
setup(
name='{name}',
ext_modules=[
{prefix}Extension(
sources={sources},
include_dirs={include_dirs},
extra_compile_args={{'cxx':{extra_cxx_cflags}, 'nvcc':{extra_cuda_cflags}}},
extra_link_args={extra_link_args})],
cmdclass={{"build_ext" : BuildExtension.with_options(
output_dir=r'{build_dir}',
no_python_abi_suffix=True)
}})""").lstrip()
with_cuda = False
if any([is_cuda_file(source) for source in sources]):
with_cuda = True
log_v("with_cuda: {}".format(with_cuda), verbose)
content = template.format(
name=name,
prefix='CUDA' if with_cuda else 'Cpp',
sources=list2str(sources),
include_dirs=list2str(include_dirs),
extra_cxx_cflags=list2str(extra_cxx_cflags),
extra_cuda_cflags=list2str(extra_cuda_cflags),
extra_link_args=list2str(link_args),
build_dir=build_dir,
use_new_method=use_new_custom_op_load_method())
log_v('write setup.py into {}'.format(file_path), verbose)
with open(file_path, 'w') as f:
f.write(content)
def list2str(args):
"""
Convert list[str] into string. For example: ['x', 'y'] -> "['x', 'y']"
"""
if args is None: return '[]'
assert isinstance(args, (list, tuple))
args = ["{}".format(arg) for arg in args]
return repr(args)
def _jit_compile(file_path, verbose=False):
"""
Build shared library in subprocess
"""
ext_dir = os.path.dirname(file_path)
setup_file = os.path.basename(file_path)
# Using interpreter same with current process.
interpreter = sys.executable
try:
py_version = subprocess.check_output([interpreter, '-V'])
if six.PY3:
py_version = py_version.decode()
log_v("Using Python interpreter: {}, version: {}".format(
interpreter, py_version.strip()), verbose)
except Exception:
_, error, _ = sys.exc_info()
raise RuntimeError(
'Failed to check Python interpreter with `{}`, errors: {}'.format(
interpreter, error))
if IS_WINDOWS:
compile_cmd = 'cd /d {} && {} {} build'.format(ext_dir, interpreter,
setup_file)
else:
compile_cmd = 'cd {} && {} {} build'.format(ext_dir, interpreter,
setup_file)
print("Compiling user custom op, it will cost a few seconds.....")
run_cmd(compile_cmd, verbose)
def parse_op_name_from(sources):
"""
Parse registerring custom op name from sources.
"""
def regex(content):
if USING_NEW_CUSTOM_OP_LOAD_METHOD:
pattern = re.compile(r'PD_BUILD_OP\(([^,\)]+)\)')
else:
pattern = re.compile(r'REGISTER_OPERATOR\(([^,]+),')
content = re.sub(r'\s|\t|\n', '', content)
op_name = pattern.findall(content)
op_name = set([re.sub('_grad', '', name) for name in op_name])
return op_name
op_names = set()
for source in sources:
with open(source, 'r') as f:
content = f.read()
op_names |= regex(content)
return list(op_names)
def run_cmd(command, verbose=False):
"""
Execute command with subprocess.
"""
# logging
log_v("execute command: {}".format(command), verbose)
try:
from subprocess import DEVNULL # py3
except ImportError:
DEVNULL = open(os.devnull, 'wb')
# execute command
try:
if verbose:
return subprocess.check_call(
command, shell=True, stderr=subprocess.STDOUT)
else:
return subprocess.check_call(command, shell=True, stdout=DEVNULL)
except Exception:
_, error, _ = sys.exc_info()
raise RuntimeError("Failed to run command: {}, errors: {}".format(
compile, error))
def check_abi_compatibility(compiler, verbose=False):
"""
Check whether GCC version on user local machine is compatible with Paddle in
site-packages.
"""
if os.environ.get('PADDLE_SKIP_CHECK_ABI') in ['True', 'true', '1']:
return True
which = 'where' if IS_WINDOWS else 'which'
cmd_out = subprocess.check_output(
[which, compiler], stderr=subprocess.STDOUT)
compiler_path = os.path.realpath(cmd_out.decode()
if six.PY3 else cmd_out).strip()
# step 1. if not found any suitable compiler, raise error
if not any(name in compiler_path
for name in _expected_compiler_current_platform()):
warnings.warn(
WRONG_COMPILER_WARNING.format(
user_compiler=compiler,
paddle_compiler=_expected_compiler_current_platform()[0],
platform=OS_NAME))
return False
version = (0, 0, 0)
# clang++ have no ABI compatibility problem
if OS_NAME.startswith('darwin'):
return True
try:
if OS_NAME.startswith('linux'):
mini_required_version = GCC_MINI_VERSION
version_info = subprocess.check_output(
[compiler, '-dumpfullversion', '-dumpversion'])
if six.PY3:
version_info = version_info.decode()
version = version_info.strip().split('.')
elif IS_WINDOWS:
mini_required_version = MSVC_MINI_VERSION
compiler_info = subprocess.check_output(
compiler, stderr=subprocess.STDOUT)
if six.PY3:
try:
compiler_info = compiler_info.decode('UTF-8')
except UnicodeDecodeError:
compiler_info = compiler_info.decode('gbk')
match = re.search(r'(\d+)\.(\d+)\.(\d+)', compiler_info.strip())
if match is not None:
version = match.groups()
except Exception:
# check compiler version failed
_, error, _ = sys.exc_info()
warnings.warn('Failed to check compiler version for {}: {}'.format(
compiler, error))
return False
# check version compatibility
assert len(version) == 3
if tuple(map(int, version)) >= mini_required_version:
return True
warnings.warn(
ABI_INCOMPATIBILITY_WARNING.format(
user_compiler=compiler, version='.'.join(version)))
return False
def _expected_compiler_current_platform():
"""
Returns supported compiler string on current platform
"""
if OS_NAME.startswith('darwin'):
expect_compilers = ['clang', 'clang++']
elif OS_NAME.startswith('linux'):
expect_compilers = ['gcc', 'g++', 'gnu-c++', 'gnu-cc']
elif IS_WINDOWS:
expect_compilers = ['cl']
return expect_compilers
def log_v(info, verbose=True):
"""
Print log information on stdout.
"""
if verbose:
logging.info(info)
| 33.142023
| 128
| 0.629087
|
99f8c648b3a236c026b3aeb2f1a82b8047ca5c4d
| 20
|
py
|
Python
|
scratch/py-eval/py/numpy-array.py
|
mdorier/Supervisor
|
f1e43b2b33fb2cf9e03ea3ac49378aba37bd9839
|
[
"MIT"
] | 10
|
2017-03-14T14:36:19.000Z
|
2021-01-21T00:39:36.000Z
|
scratch/py-eval/py/numpy-array.py
|
mdorier/Supervisor
|
f1e43b2b33fb2cf9e03ea3ac49378aba37bd9839
|
[
"MIT"
] | 58
|
2017-03-03T21:07:53.000Z
|
2021-07-19T18:51:03.000Z
|
scratch/py-eval/py/numpy-array.py
|
ORNL-BSEC/Supervisor
|
14a73ad19b10cebab0d7d2d48e52692485957ad2
|
[
"MIT"
] | 21
|
2017-03-08T16:07:47.000Z
|
2020-11-24T04:23:00.000Z
|
A = numpy.array(3)
| 6.666667
| 18
| 0.6
|
49cfcbfd245b21bf56aabce05e645d8b37bf0e5d
| 992
|
py
|
Python
|
test/test_inline_response2002.py
|
latourette359/modern_logic_client
|
16d415e1b07a66a975dc08a67465c0d70c90cbac
|
[
"MIT"
] | null | null | null |
test/test_inline_response2002.py
|
latourette359/modern_logic_client
|
16d415e1b07a66a975dc08a67465c0d70c90cbac
|
[
"MIT"
] | null | null | null |
test/test_inline_response2002.py
|
latourette359/modern_logic_client
|
16d415e1b07a66a975dc08a67465c0d70c90cbac
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Modern Logic Api
Manage and version your customer decision logic outside of your codebase # noqa: E501
OpenAPI spec version: 1.0.0
Contact: info@usemodernlogic.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import modern_logic_client
from modern_logic_client.models.inline_response2002 import InlineResponse2002 # noqa: E501
from modern_logic_client.rest import ApiException
class TestInlineResponse2002(unittest.TestCase):
"""InlineResponse2002 unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testInlineResponse2002(self):
"""Test InlineResponse2002"""
# FIXME: construct object with mandatory attributes with example values
# model = modern_logic_client.models.inline_response2002.InlineResponse2002() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 24.8
| 99
| 0.728831
|
f3e6b6166cb8ed68437f31096f216c9a76000d00
| 273
|
py
|
Python
|
modules/loss_functions/__init__.py
|
df424/ml
|
e12232ca4b90f983bfb14718afd314d3d6cc1bf9
|
[
"MIT"
] | null | null | null |
modules/loss_functions/__init__.py
|
df424/ml
|
e12232ca4b90f983bfb14718afd314d3d6cc1bf9
|
[
"MIT"
] | null | null | null |
modules/loss_functions/__init__.py
|
df424/ml
|
e12232ca4b90f983bfb14718afd314d3d6cc1bf9
|
[
"MIT"
] | null | null | null |
from ml.modules.loss_functions.loss_function import LossFunction
from ml.modules.loss_functions.cross_entropy import CrossEntropyLoss
from ml.modules.loss_functions.log_likelihood import SigmoidLogLikelihood
from ml.modules.loss_functions.squared_error import SquaredError
| 54.6
| 73
| 0.897436
|
78c5820665af4380870cd71167a5c30c7ab4657f
| 1,854
|
py
|
Python
|
prepare/migrations/0007_auto_20210808_1231.py
|
engelsmann/bedom
|
eeb7eea5c5e908b8a0c744e36a4f9806105c3eb2
|
[
"CC0-1.0"
] | 1
|
2020-10-01T06:40:32.000Z
|
2020-10-01T06:40:32.000Z
|
prepare/migrations/0007_auto_20210808_1231.py
|
engelsmann/bedom
|
eeb7eea5c5e908b8a0c744e36a4f9806105c3eb2
|
[
"CC0-1.0"
] | 17
|
2020-10-01T11:49:30.000Z
|
2022-03-04T16:07:45.000Z
|
prepare/migrations/0007_auto_20210808_1231.py
|
engelsmann/bedom
|
eeb7eea5c5e908b8a0c744e36a4f9806105c3eb2
|
[
"CC0-1.0"
] | null | null | null |
# Generated by Django 3.2.5 on 2021-08-08 10:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('prepare', '0006_auto_20210801_1550'),
]
operations = [
migrations.AlterField(
model_name='fokusgruppe',
name='bedømt',
field=models.BooleanField(default='', null=True),
),
migrations.AlterField(
model_name='fokusgruppe',
name='faglig',
field=models.IntegerField(help_text='Score for elevens evne til at bidrage til en faglig samtale', null=True),
),
migrations.AlterField(
model_name='fokusgruppe',
name='hjælp',
field=models.IntegerField(help_text='Score for elevens evne til at yde hjælp til faglig problemløsning', null=True),
),
migrations.AlterField(
model_name='fokusgruppe',
name='modul',
field=models.ManyToManyField(default='', to='prepare.Modul'),
),
migrations.AlterField(
model_name='fokusgruppe',
name='spørg',
field=models.IntegerField(help_text='Score for elevens evne til at søge hjælp på fagligt spørgsmål', null=True),
),
migrations.AlterField(
model_name='fokusgruppe',
name='stikord',
field=models.CharField(help_text='Lærerens observationer i "tre" ord', max_length=30, null=True),
),
migrations.AlterField(
model_name='fokusgruppe',
name='tilstede',
field=models.BooleanField(default='', null=True),
),
migrations.AlterField(
model_name='modul',
name='afholdt',
field=models.DateField(help_text='Planlagt / faktisk dato for modulet'),
),
]
| 34.333333
| 128
| 0.587379
|
b3dcba7dd1ed0622baeddd8be80c5f162ee33699
| 2,656
|
py
|
Python
|
sushirank/datasets.py
|
Datatouille/sushirank
|
fe77509c6220ae269b9cb3003045b973e34f8661
|
[
"Apache-2.0"
] | 19
|
2020-07-19T07:14:57.000Z
|
2022-01-29T02:42:40.000Z
|
sushirank/datasets.py
|
Datatouille/sushirank
|
fe77509c6220ae269b9cb3003045b973e34f8661
|
[
"Apache-2.0"
] | null | null | null |
sushirank/datasets.py
|
Datatouille/sushirank
|
fe77509c6220ae269b9cb3003045b973e34f8661
|
[
"Apache-2.0"
] | 10
|
2020-07-19T07:18:49.000Z
|
2020-12-16T13:39:34.000Z
|
import numpy as np
import torch
from torch.utils.data import Dataset
from tqdm.auto import tqdm
device = 'cuda' if torch.cuda.is_available() else 'cpu'
class PointwiseDataset(Dataset):
def __init__(
self,
df,
label_col,
cat_cols,
num_cols,
):
self.label_col = label_col
self.cat_cols = cat_cols
self.num_cols = num_cols
self.df = df
self.cat_dims = [self.df[i].nunique() for i in self.cat_cols]
self.features = []
self._build()
def __len__(self):
return len(self.features)
def __getitem__(self, i):
return self.features[i]
def _build(self):
cat_features = torch.tensor(self.df[self.cat_cols].values,dtype=torch.long).to(device)
num_features = torch.tensor(self.df[self.num_cols].values,dtype=torch.float).to(device)
label = torch.tensor(self.df[self.label_col].values,dtype=torch.float).to(device)
for i in tqdm(range(self.df.shape[0])):
feat = {
'cat_feature': cat_features[i],
'num_feature': num_features[i],
'label': label[i]
}
self.features.append(feat)
class PairwiseDataset(Dataset):
def __init__(
self,
df,
label_col,
cat_cols,
num_cols,
):
self.label_col = label_col
self.cat_cols = cat_cols
self.num_cols = num_cols
self.df = df
self.cat_dims = [self.df[i].nunique() for i in self.cat_cols]
self.features = []
self._build()
def __len__(self):
return len(self.features)
def __getitem__(self, i):
feat_i = self.features[i]
feat_j = self.features[np.random.randint(len(self.features))]
return {
'cat_feature_i': feat_i['cat_feature'],
'num_feature_i': feat_i['num_feature'],
'cat_feature_j': feat_j['cat_feature'],
'num_feature_j': feat_j['num_feature'],
'label': torch.tensor([int(feat_i['label'] > feat_j['label'])],dtype=torch.float).to(device)
}
def _build(self):
cat_features = torch.tensor(self.df[self.cat_cols].values,dtype=torch.long).to(device)
num_features = torch.tensor(self.df[self.num_cols].values,dtype=torch.float).to(device)
label = self.df[self.label_col].values
for i in tqdm(range(self.df.shape[0])):
feat = {
'cat_feature': cat_features[i],
'num_feature': num_features[i],
'label': label[i]
}
self.features.append(feat)
| 32.790123
| 104
| 0.578313
|
984ca1d8b16729b1f46e8e48abfa388a669ab8bd
| 3,738
|
py
|
Python
|
qa/rpc-tests/maxblocksinflight.py
|
CircuitBreaker88/BTCMonster
|
d9598ed7c389c64743534e047787ba7195bdf24a
|
[
"MIT"
] | 3
|
2019-05-25T11:07:10.000Z
|
2019-08-17T01:29:06.000Z
|
qa/rpc-tests/maxblocksinflight.py
|
CircuitBreaker88/BTCMonster
|
d9598ed7c389c64743534e047787ba7195bdf24a
|
[
"MIT"
] | null | null | null |
qa/rpc-tests/maxblocksinflight.py
|
CircuitBreaker88/BTCMonster
|
d9598ed7c389c64743534e047787ba7195bdf24a
|
[
"MIT"
] | 1
|
2019-03-27T20:37:32.000Z
|
2019-03-27T20:37:32.000Z
|
#!/usr/bin/env python2
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import logging
'''
In this test we connect to one node over p2p, send it numerous inv's, and
compare the resulting number of getdata requests to a max allowed value. We
test for exceeding 128 blocks in flight, which was the limit an 0.9 client will
reach. [0.10 clients shouldn't request more than 16 from a single peer.]
'''
MAX_REQUESTS = 128
class TestManager(NodeConnCB):
# set up NodeConnCB callbacks, overriding base class
def on_getdata(self, conn, message):
self.log.debug("got getdata %s" % repr(message))
# Log the requests
for inv in message.inv:
if inv.hash not in self.blockReqCounts:
self.blockReqCounts[inv.hash] = 0
self.blockReqCounts[inv.hash] += 1
def on_close(self, conn):
if not self.disconnectOkay:
raise EarlyDisconnectError(0)
def __init__(self):
NodeConnCB.__init__(self)
self.log = logging.getLogger("BlockRelayTest")
def add_new_connection(self, connection):
self.connection = connection
self.blockReqCounts = {}
self.disconnectOkay = False
def run(self):
self.connection.rpc.generate(1) # Leave IBD
numBlocksToGenerate = [8, 16, 128, 1024]
for count in range(len(numBlocksToGenerate)):
current_invs = []
for i in range(numBlocksToGenerate[count]):
current_invs.append(CInv(2, random.randrange(0, 1 << 256)))
if len(current_invs) >= 50000:
self.connection.send_message(msg_inv(current_invs))
current_invs = []
if len(current_invs) > 0:
self.connection.send_message(msg_inv(current_invs))
# Wait and see how many blocks were requested
time.sleep(2)
total_requests = 0
with mininode_lock:
for key in self.blockReqCounts:
total_requests += self.blockReqCounts[key]
if self.blockReqCounts[key] > 1:
raise AssertionError("Error, test failed: block %064x requested more than once" % key)
if total_requests > MAX_REQUESTS:
raise AssertionError("Error, too many blocks (%d) requested" % total_requests)
print "Round %d: success (total requests: %d)" % (count, total_requests)
self.disconnectOkay = True
self.connection.disconnect_node()
class MaxBlocksInFlightTest(BitcoinTestFramework):
def add_options(self, parser):
parser.add_option("--testbinary", dest="testbinary",
default=os.getenv("MOND", "bitcoinmonsterd"),
help="Binary to test max block requests behavior")
def setup_chain(self):
print "Initializing test directory "+self.options.tmpdir
initialize_chain_clean(self.options.tmpdir, 1)
def setup_network(self):
self.nodes = start_nodes(1, self.options.tmpdir,
extra_args=[['-debug', '-whitelist=127.0.0.1']],
binary=[self.options.testbinary])
def run_test(self):
test = TestManager()
test.add_new_connection(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test))
NetworkThread().start() # Start up network handling in another thread
test.run()
if __name__ == '__main__':
MaxBlocksInFlightTest().main()
| 38.536082
| 110
| 0.631354
|
b2744d063020a5427dc23ef3f33c603849c8bb50
| 1,451
|
py
|
Python
|
steembase/operationids.py
|
VoxChain/vox-python
|
74147edcdeaa76e18faa752e5feb9b2174db9847
|
[
"MIT"
] | 24
|
2017-05-01T20:31:32.000Z
|
2020-04-09T02:32:02.000Z
|
steembase/operationids.py
|
VoxChain/vox-python
|
74147edcdeaa76e18faa752e5feb9b2174db9847
|
[
"MIT"
] | 7
|
2017-05-31T22:47:57.000Z
|
2018-02-10T22:21:25.000Z
|
steembase/operationids.py
|
VoxChain/vox-python
|
74147edcdeaa76e18faa752e5feb9b2174db9847
|
[
"MIT"
] | 25
|
2017-05-31T22:48:11.000Z
|
2020-07-19T11:23:34.000Z
|
op_names = [
'vote',
'comment',
'transfer',
'transfer_to_vesting',
'withdraw_vesting',
'limit_order_create',
'limit_order_cancel',
'feed_publish',
'convert',
'account_create',
'account_update',
'witness_update',
'account_witness_vote',
'account_witness_proxy',
'pow',
'custom',
'report_over_production',
'delete_comment',
'custom_json',
'comment_options',
'set_withdraw_vesting_route',
'limit_order_create2',
'challenge_authority',
'prove_authority',
'request_account_recovery',
'recover_account',
'change_recovery_account',
'escrow_transfer',
'escrow_dispute',
'escrow_release',
'pow2',
'escrow_approve',
'transfer_to_savings',
'transfer_from_savings',
'cancel_transfer_from_savings',
'custom_binary',
'decline_voting_rights',
'reset_account',
'set_reset_account',
'claim_reward_balance',
'delegate_vesting_shares',
'account_create_with_delegation',
'fill_convert_request',
'author_reward',
'curation_reward',
'comment_reward',
'liquidity_reward',
'interest',
'fill_vesting_withdraw',
'fill_order',
'shutdown_witness',
'fill_transfer_from_savings',
'hardfork',
'comment_payout_update',
'return_vesting_delegation',
'comment_benefactor_reward',
]
#: assign operation ids
operations = dict(zip(op_names, range(len(op_names))))
| 23.403226
| 54
| 0.67195
|
159c3e22f62d5fe68c2b54bfb5649693ddbd806b
| 15,372
|
py
|
Python
|
source/lib/blueprints/byom/model_monitor.py
|
Salah856/aws-mlops-framework
|
59cc38dd74b2715e104e7247422aae6e3bb59b37
|
[
"Apache-2.0"
] | null | null | null |
source/lib/blueprints/byom/model_monitor.py
|
Salah856/aws-mlops-framework
|
59cc38dd74b2715e104e7247422aae6e3bb59b37
|
[
"Apache-2.0"
] | null | null | null |
source/lib/blueprints/byom/model_monitor.py
|
Salah856/aws-mlops-framework
|
59cc38dd74b2715e104e7247422aae6e3bb59b37
|
[
"Apache-2.0"
] | null | null | null |
# #####################################################################################################################
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance #
# with the License. A copy of the License is located at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES #
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions #
# and limitations under the License. #
# #####################################################################################################################
from aws_cdk import (
aws_s3 as s3,
core,
)
from lib.blueprints.byom.pipeline_definitions.deploy_actions import (
create_baseline_job_lambda,
sagemaker_layer,
create_invoke_lambda_custom_resource,
)
from lib.blueprints.byom.pipeline_definitions.templates_parameters import (
ParameteresFactory as pf,
ConditionsFactory as cf,
)
from lib.blueprints.byom.pipeline_definitions.sagemaker_monitor_role import create_sagemaker_monitor_role
from lib.blueprints.byom.pipeline_definitions.sagemaker_model_monitor_construct import SageMakerModelMonitor
class ModelMonitorStack(core.Stack):
def __init__(self, scope: core.Construct, id: str, monitoring_type: str, **kwargs) -> None:
super().__init__(scope, id, **kwargs)
# validate the provided monitoring_type
if monitoring_type not in ["DataQuality", "ModelQuality"]:
raise ValueError(
(
f"The {monitoring_type} is not valid. Currently supported Monitoring Types are: "
f"['DataQuality'|'ModelQuality']"
)
)
# Baseline/Monitor attributes, this will be updated based on the monitoring_type
self.baseline_attributes = dict()
self.monitor_attributes = dict()
# Parameteres #
blueprint_bucket_name = pf.create_blueprint_bucket_name_parameter(self)
assets_bucket_name = pf.create_assets_bucket_name_parameter(self)
endpoint_name = pf.create_endpoint_name_parameter(self)
baseline_job_output_location = pf.create_baseline_job_output_location_parameter(self)
baseline_data = pf.create_baseline_data_parameter(self)
instance_type = pf.create_instance_type_parameter(self)
instance_count = pf.create_instance_count_parameter(self)
instance_volume_size = pf.create_instance_volume_size_parameter(self)
baseline_max_runtime_seconds = pf.create_baseline_max_runtime_seconds_parameter(self)
monitor_max_runtime_seconds = pf.create_monitor_max_runtime_seconds_parameter(self, "ModelQuality")
kms_key_arn = pf.create_kms_key_arn_parameter(self)
baseline_job_name = pf.create_baseline_job_name_parameter(self)
monitoring_schedule_name = pf.create_monitoring_schedule_name_parameter(self)
data_capture_bucket = pf.create_data_capture_bucket_name_parameter(self)
baseline_output_bucket = pf.create_baseline_output_bucket_name_parameter(self)
data_capture_s3_location = pf.create_data_capture_location_parameter(self)
monitoring_output_location = pf.create_monitoring_output_location_parameter(self)
schedule_expression = pf.create_schedule_expression_parameter(self)
image_uri = pf.create_algorithm_image_uri_parameter(self)
# add ModelQuality specific parameters/conditions, and update self.baseline_attributes/self.monitor_attributes
if monitoring_type == "ModelQuality":
self._add_model_quality_resources()
# conditions
kms_key_arn_provided = cf.create_kms_key_arn_provided_condition(self, kms_key_arn)
# Resources #
assets_bucket = s3.Bucket.from_bucket_name(self, "ImportedAssetsBucket", assets_bucket_name.value_as_string)
# getting blueprint bucket object from its name - will be used later in the stack
blueprint_bucket = s3.Bucket.from_bucket_name(
self, "ImportedBlueprintBucket", blueprint_bucket_name.value_as_string
)
# create sagemaker layer
sm_layer = sagemaker_layer(self, blueprint_bucket)
# update Baseline attributes
self.baseline_attributes.update(
dict(
monitoring_type=monitoring_type,
baseline_job_name=baseline_job_name.value_as_string,
baseline_data_location=baseline_data.value_as_string,
baseline_job_output_location=baseline_job_output_location.value_as_string,
endpoint_name=endpoint_name.value_as_string,
instance_type=instance_type.value_as_string,
instance_volume_size=instance_volume_size.value_as_string,
max_runtime_seconds=baseline_max_runtime_seconds.value_as_string,
kms_key_arn=core.Fn.condition_if(
kms_key_arn_provided.logical_id, kms_key_arn.value_as_string, core.Aws.NO_VALUE
).to_string(),
kms_key_arn_provided_condition=kms_key_arn_provided,
stack_name=core.Aws.STACK_NAME,
)
)
# create baseline job lambda action
baseline_job_lambda = create_baseline_job_lambda(
self,
blueprint_bucket=blueprint_bucket,
assets_bucket=assets_bucket,
sm_layer=sm_layer,
**self.baseline_attributes,
)
# create custom resource to invoke the baseline job lambda
# remove the condition from the custom resource properties. Otherwise, CFN will give an error
del self.baseline_attributes["kms_key_arn_provided_condition"]
invoke_lambda_custom_resource = create_invoke_lambda_custom_resource(
scope=self,
id="InvokeBaselineLambda",
lambda_function_arn=baseline_job_lambda.function_arn,
lambda_function_name=baseline_job_lambda.function_name,
blueprint_bucket=blueprint_bucket,
# add baseline attributes to the invoke lambda custom resource, so any change to these attributes
# (via template update) will re-invoke the baseline lambda and re-calculate the baseline
custom_resource_properties={
"Resource": "InvokeLambda",
"function_name": baseline_job_lambda.function_name,
"assets_bucket_name": assets_bucket_name.value_as_string,
**self.baseline_attributes,
},
)
# add dependency on baseline lambda
invoke_lambda_custom_resource.node.add_dependency(baseline_job_lambda)
# creating monitoring schedule
sagemaker_role = create_sagemaker_monitor_role(
scope=self,
id="MLOpsSagemakerMonitorRole",
kms_key_arn=kms_key_arn.value_as_string,
assets_bucket_name=assets_bucket_name.value_as_string,
data_capture_bucket=data_capture_bucket.value_as_string,
data_capture_s3_location=data_capture_s3_location.value_as_string,
baseline_output_bucket=baseline_output_bucket.value_as_string,
baseline_job_output_location=baseline_job_output_location.value_as_string,
output_s3_location=monitoring_output_location.value_as_string,
kms_key_arn_provided_condition=kms_key_arn_provided,
baseline_job_name=baseline_job_name.value_as_string,
monitoring_schedule_name=monitoring_schedule_name.value_as_string,
endpoint_name=endpoint_name.value_as_string,
model_monitor_ground_truth_input=None
if monitoring_type == "DataQuality"
else self.monitor_attributes["ground_truth_s3_uri"],
)
# resource tags
resource_tags = [{"key": "stack-name", "value": core.Aws.STACK_NAME}]
# update attributes
self.monitor_attributes.update(
dict(
monitoring_schedule_name=monitoring_schedule_name.value_as_string,
endpoint_name=endpoint_name.value_as_string,
baseline_job_name=baseline_job_name.value_as_string,
baseline_job_output_location=baseline_job_output_location.value_as_string,
schedule_expression=schedule_expression.value_as_string,
monitoring_output_location=monitoring_output_location.value_as_string,
instance_type=instance_type.value_as_string,
instance_count=instance_count.value_as_string,
instance_volume_size=instance_volume_size.value_as_string,
max_runtime_seconds=monitor_max_runtime_seconds.value_as_string,
kms_key_arn=core.Fn.condition_if(
kms_key_arn_provided.logical_id, kms_key_arn.value_as_string, core.Aws.NO_VALUE
).to_string(),
role_arn=sagemaker_role.role_arn,
image_uri=image_uri.value_as_string,
monitoring_type=monitoring_type,
tags=resource_tags,
)
)
# create Sagemaker monitoring Schedule
sagemaker_monitor = SageMakerModelMonitor(self, f"{monitoring_type}Monitor", **self.monitor_attributes)
# add job definition dependency on sagemaker role and invoke_lambda_custom_resource (so, the baseline job is created)
sagemaker_monitor.job_definition.node.add_dependency(sagemaker_role)
sagemaker_monitor.job_definition.node.add_dependency(invoke_lambda_custom_resource)
# Outputs #
core.CfnOutput(
self,
id="BaselineName",
value=baseline_job_name.value_as_string,
)
core.CfnOutput(
self,
id="MonitoringScheduleJobName",
value=monitoring_schedule_name.value_as_string,
)
core.CfnOutput(
self,
id="MonitoringScheduleType",
value=monitoring_type,
)
core.CfnOutput(
self,
id="BaselineJobOutput",
value=f"https://s3.console.aws.amazon.com/s3/buckets/{baseline_job_output_location.value_as_string}/",
)
core.CfnOutput(
self,
id="MonitoringScheduleOutput",
value=(
f"https://s3.console.aws.amazon.com/s3/buckets/{monitoring_output_location.value_as_string}/"
f"{endpoint_name.value_as_string}/{monitoring_schedule_name.value_as_string}/"
),
)
core.CfnOutput(
self,
id="MonitoredSagemakerEndpoint",
value=endpoint_name.value_as_string,
)
core.CfnOutput(
self,
id="DataCaptureS3Location",
value=(
f"https://s3.console.aws.amazon.com/s3/buckets/{data_capture_s3_location.value_as_string}"
f"/{endpoint_name.value_as_string}/"
),
)
def _add_model_quality_resources(self):
"""
Adds ModelQuality specific parameters/conditions and updates self.baseline_attributes/self.monitor_attributes
"""
# add baseline job attributes (they are different from Monitor attributes)
baseline_inference_attribute = pf.create_inference_attribute_parameter(self, "Baseline")
baseline_probability_attribute = pf.create_probability_attribute_parameter(self, "Baseline")
ground_truth_attribute = pf.create_ground_truth_attribute_parameter(self)
# add monitor attributes
monitor_inference_attribute = pf.create_inference_attribute_parameter(self, "Monitor")
monitor_probability_attribute = pf.create_probability_attribute_parameter(self, "Monitor")
ground_truth_s3_uri = pf.create_ground_truth_s3_uri_parameter(self)
# problem_type and probability_threshold_attribute are the same for both
problem_type = pf.create_problem_type_parameter(self)
probability_threshold_attribute = pf.create_probability_threshold_attribute_parameter(self)
# add conditions (used by monitor)
is_regression_or_multiclass_classification_problem = (
cf.create_problem_type_regression_or_multiclass_classification_condition(self, problem_type)
)
is_binary_classification_problem = cf.create_problem_type_binary_classification_condition(self, problem_type)
# add ModelQuality Baseline attributes
self.baseline_attributes.update(
dict(
problem_type=problem_type.value_as_string,
ground_truth_attribute=ground_truth_attribute.value_as_string,
inference_attribute=baseline_inference_attribute.value_as_string,
probability_attribute=baseline_probability_attribute.value_as_string,
probability_threshold_attribute=probability_threshold_attribute.value_as_string,
)
)
# add ModelQuality Monitor attributes
self.monitor_attributes.update(
dict(
problem_type=problem_type.value_as_string,
ground_truth_s3_uri=ground_truth_s3_uri.value_as_string,
# inference_attribute is required for Regression/Multiclass Classification problems
# probability_attribute/probability_threshold_attribute are not used
inference_attribute=core.Fn.condition_if(
is_regression_or_multiclass_classification_problem.logical_id,
monitor_inference_attribute.value_as_string,
core.Aws.NO_VALUE,
).to_string(),
# for a Binary Classification problem, we use probability_attribute and probability_threshold_attribute.
# note: probability_attribute is the index of the predicted probability in the captured data by the
# SageMaker endpoint. Tepically, probability_attribute="0" and probability_threshold_attribute="0.5"
probability_attribute=core.Fn.condition_if(
is_binary_classification_problem.logical_id,
monitor_probability_attribute.value_as_string,
core.Aws.NO_VALUE,
).to_string(),
probability_threshold_attribute=core.Fn.condition_if(
is_binary_classification_problem.logical_id,
probability_threshold_attribute.value_as_string,
core.Aws.NO_VALUE,
).to_string(),
)
)
| 53.006897
| 125
| 0.654567
|
6c9fea53f9379258f0d3690d16300eb490d12f66
| 918
|
py
|
Python
|
tests/unit/tsi/api_object_test.py
|
jdgwartney/tsi
|
d0e098ce40e0743eb51af8a65b7589b549019542
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/tsi/api_object_test.py
|
jdgwartney/tsi
|
d0e098ce40e0743eb51af8a65b7589b549019542
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/tsi/api_object_test.py
|
jdgwartney/tsi
|
d0e098ce40e0743eb51af8a65b7589b549019542
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
from tsi import ApiObject
class ApiObjectTest(unittest.TestCase):
def setUp(self):
self.api_object = ApiObject()
def test_init(self):
m = ApiObject()
self.assertIsNotNone(m)
def test_get_meta(self):
# meta = self.api.meta.get()
pass
| 26.228571
| 74
| 0.71024
|
aa1d1145381d2ac071008866a8aec6b198c1f460
| 356
|
py
|
Python
|
qtrio/_tests/test_cli.py
|
nodeselector/qtrio
|
4bc25ef97d7e6e01a9751de9c84a4214e637e9d4
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
qtrio/_tests/test_cli.py
|
nodeselector/qtrio
|
4bc25ef97d7e6e01a9751de9c84a4214e637e9d4
|
[
"Apache-2.0",
"MIT"
] | 1
|
2021-03-30T21:14:20.000Z
|
2021-03-30T21:14:20.000Z
|
qtrio/_tests/test_cli.py
|
nodeselector/qtrio
|
4bc25ef97d7e6e01a9751de9c84a4214e637e9d4
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
import os
import pathlib
import subprocess
import sys
import sysconfig
def test_emissions_help_dash_m():
"""The CLI run via ``python -m qtrio`` doesn't fail when asked for --help for the
emissions example. Woooot more stuff.
"""
subprocess.run(
[sys.executable, "-m", "qtrio", "examples", "emissions", "--help"], check=True
)
| 23.733333
| 86
| 0.671348
|
8335b72902270d31619ee9c6ec3bc3e2a35b3089
| 2,525
|
py
|
Python
|
pirates/minigame/RepairMousePicker.py
|
ksmit799/POTCO-PS
|
520d38935ae8df4b452c733a82c94dddac01e275
|
[
"Apache-2.0"
] | 8
|
2017-01-24T04:33:29.000Z
|
2020-11-01T08:36:24.000Z
|
pirates/minigame/RepairMousePicker.py
|
ksmit799/Pirates-Online-Remake
|
520d38935ae8df4b452c733a82c94dddac01e275
|
[
"Apache-2.0"
] | 1
|
2017-03-02T18:05:17.000Z
|
2017-03-14T06:47:10.000Z
|
pirates/minigame/RepairMousePicker.py
|
ksmit799/Pirates-Online-Remake
|
520d38935ae8df4b452c733a82c94dddac01e275
|
[
"Apache-2.0"
] | 11
|
2017-03-02T18:46:07.000Z
|
2020-11-01T08:36:26.000Z
|
# File: R (Python 2.4)
import math
from pandac.PandaModules import BitMask32
from pandac.PandaModules import NodePath, Point3
from pandac.PandaModules import CollisionNode, CollisionSphere, CollisionRay, GeomNode
from pandac.PandaModules import CollisionTraverser, CollisionHandlerQueue
from direct.interval.IntervalGlobal import Sequence, Parallel, LerpPosInterval, LerpFunc, Func
from direct.gui.DirectGui import DirectButton, DGG
from direct.task import Task
from RepairMincroGame import RepairMincroGame
class RepairMousePicker:
def __init__(self):
self.pickerNode = CollisionNode('RepairMousePicker.pickerNode')
self.pickerNP = base.cam2d.attachNewNode(self.pickerNode)
self.pickerRay = CollisionRay()
self.pickerNode.addSolid(self.pickerRay)
self.collisionTraverser = CollisionTraverser()
self.collisionHandler = CollisionHandlerQueue()
self.collisionTraverser.addCollider(self.pickerNP, self.collisionHandler)
self.clearCollisionMask()
self.orthographic = True
def destroy(self):
del self.pickerNode
self.pickerNP.removeNode()
del self.pickerNP
del self.pickerRay
del self.collisionTraverser
del self.collisionHandler
def setOrthographic(self, ortho):
self.orthographic = ortho
def setCollisionMask(self, mask):
self.pickerNode.setFromCollideMask(mask)
def clearCollisionMask(self):
self.pickerNode.setFromCollideMask(BitMask32.allOff())
def getCollisions(self, traverseRoot, useIntoNodePaths = False):
if not base.mouseWatcherNode.hasMouse():
return []
mpos = base.mouseWatcherNode.getMouse()
if self.orthographic:
self.pickerRay.setFromLens(base.cam2d.node(), 0, 0)
self.pickerNP.setPos(mpos.getX(), 0.0, mpos.getY())
else:
self.pickerRay.setFromLens(base.cam2d.node(), mpos.getX(), mpos.getY())
self.pickerNP.setPos(0.0, 0.0, 0.0)
self.collisionTraverser.traverse(traverseRoot)
pickedObjects = []
if useIntoNodePaths:
for i in range(self.collisionHandler.getNumEntries()):
pickedObjects.append(self.collisionHandler.getEntry(i).getIntoNodePath())
else:
for i in range(self.collisionHandler.getNumEntries()):
pickedObjects.append(self.collisionHandler.getEntry(i))
return pickedObjects
| 35.069444
| 94
| 0.685149
|
127b52dc86dba829f0bac1effcaac2a123f62c00
| 11,677
|
py
|
Python
|
lib/python3.8/site-packages/ansible_collections/junipernetworks/junos/plugins/modules/junos_l2_interface.py
|
cjsteel/python3-venv-ansible-2.10.5
|
c95395c4cae844dc66fddde9b4343966f4b2ecd5
|
[
"Apache-1.1"
] | null | null | null |
lib/python3.8/site-packages/ansible_collections/junipernetworks/junos/plugins/modules/junos_l2_interface.py
|
cjsteel/python3-venv-ansible-2.10.5
|
c95395c4cae844dc66fddde9b4343966f4b2ecd5
|
[
"Apache-1.1"
] | null | null | null |
lib/python3.8/site-packages/ansible_collections/junipernetworks/junos/plugins/modules/junos_l2_interface.py
|
cjsteel/python3-venv-ansible-2.10.5
|
c95395c4cae844dc66fddde9b4343966f4b2ecd5
|
[
"Apache-1.1"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
module: junos_l2_interface
author: Ganesh Nalawade (@ganeshrn)
short_description: (deprecated, removed after 2022-06-01) Manage L2 Interface on Juniper
JUNOS network devices
description:
- This module provides declarative management of Layer-2 interface on Juniper JUNOS
network devices.
version_added: 1.0.0
deprecated:
why: Updated modules released with more functionality
alternative: Use M(junipernetworks.junos.junos_l2_interfaces) instead.
removed_at_date: '2022-06-01'
options:
name:
description:
- Name of the interface excluding any logical unit number.
type: str
description:
description:
- Description of Interface.
type: str
aggregate:
description:
- List of Layer-2 interface definitions.
type: list
elements: dict
suboptions:
name:
description:
- Name of the interface excluding any logical unit number.
type: str
required: true
description:
description:
- Description of Interface.
type: str
mode:
description:
- Mode in which interface needs to be configured.
choices:
- access
- trunk
type: str
access_vlan:
description:
- Configure given VLAN in access port. The value of C(access_vlan) should be vlan
name.
type: str
trunk_vlans:
description:
- List of VLAN names to be configured in trunk port. The value of C(trunk_vlans)
should be list of vlan names.
type: list
elements: str
native_vlan:
description:
- Native VLAN to be configured in trunk port. The value of C(native_vlan) should
be vlan id.
type: int
enhanced_layer:
description:
- True if your device has Enhanced Layer 2 Software (ELS).
type: bool
unit:
description:
- Logical interface number. Value of C(unit) should be of type integer.
type: int
filter_input:
description:
- The name of input filter of ethernet-switching.
type: str
filter_output:
description:
- The name of output filter of ethernet-switching.
type: str
state:
description:
- State of the Layer-2 Interface configuration.
type: str
choices:
- present
- absent
active:
description:
- Specifies whether or not the configuration is active or deactivated
type: bool
mode:
description:
- Mode in which interface needs to be configured.
choices:
- access
- trunk
type: str
access_vlan:
description:
- Configure given VLAN in access port. The value of C(access_vlan) should be vlan
name.
type: str
trunk_vlans:
description:
- List of VLAN names to be configured in trunk port. The value of C(trunk_vlans)
should be list of vlan names.
type: list
elements: str
native_vlan:
description:
- Native VLAN to be configured in trunk port. The value of C(native_vlan) should
be vlan id.
type: int
enhanced_layer:
description:
- True if your device has Enhanced Layer 2 Software (ELS).
default: true
type: bool
unit:
description:
- Logical interface number. Value of C(unit) should be of type integer.
default: 0
type: int
filter_input:
description:
- The name of input filter of ethernet-switching.
type: str
filter_output:
description:
- The name of output filter of ethernet-switching.
type: str
state:
description:
- State of the Layer-2 Interface configuration.
type: str
default: present
choices:
- present
- absent
active:
description:
- Specifies whether or not the configuration is active or deactivated
default: true
type: bool
requirements:
- ncclient (>=v0.5.2)
notes:
- This module requires the netconf system service be enabled on the remote device
being managed.
- Tested against vqfx-10000 JUNOS Version 15.1X53-D60.4.
- Recommended connection is C(netconf). See L(the Junos OS Platform Options,../network/user_guide/platform_junos.html).
- This module also works with C(local) connections for legacy playbooks.
extends_documentation_fragment:
- junipernetworks.junos.junos
"""
EXAMPLES = """
- name: Configure interface in access mode
junipernetworks.junos.junos_l2_interface:
name: ge-0/0/1
description: interface-access
mode: access
access_vlan: red
active: true
state: present
- name: Configure interface in trunk mode
junipernetworks.junos.junos_l2_interface:
name: ge-0/0/1
description: interface-trunk
mode: trunk
trunk_vlans:
- blue
- green
native_vlan: 100
active: true
state: present
- name: Configure interface in access and trunk mode using aggregate
junipernetworks.junos.junos_l2_interface:
aggregate:
- name: ge-0/0/1
description: test-interface-access
mode: access
access_vlan: red
- name: ge-0/0/2
description: test-interface-trunk
mode: trunk
trunk_vlans:
- blue
- green
native_vlan: 100
active: true
state: present
"""
RETURN = """
diff:
description: Configuration difference before and after applying change.
returned: when configuration is changed and diff option is enabled.
type: str
sample: >
[edit interfaces]
+ ge-0/0/1 {
+ description "l2 interface configured by Ansible";
+ unit 0 {
+ family ethernet-switching {
+ interface-mode access;
+ vlan {
+ members red;
+ }
+ }
+ }
+ }
"""
import collections
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import (
remove_default_spec,
)
from ansible_collections.junipernetworks.junos.plugins.module_utils.network.junos.junos import (
junos_argument_spec,
tostring,
)
from ansible_collections.junipernetworks.junos.plugins.module_utils.network.junos.junos import (
load_config,
map_params_to_obj,
map_obj_to_ele,
)
from ansible_collections.junipernetworks.junos.plugins.module_utils.network.junos.junos import (
commit_configuration,
discard_changes,
locked_config,
to_param_list,
)
USE_PERSISTENT_CONNECTION = True
def validate_vlan_id(value, module):
if value and not 0 <= value <= 4094:
module.fail_json(msg="vlan_id must be between 1 and 4094")
def validate_param_values(module, obj, param=None):
if not param:
param = module.params
for key in obj:
# validate the param value (if validator func exists)
validator = globals().get("validate_%s" % key)
if callable(validator):
validator(param.get(key), module)
def main():
""" main entry point for module execution
"""
element_spec = dict(
name=dict(),
mode=dict(choices=["access", "trunk"]),
access_vlan=dict(),
native_vlan=dict(type="int"),
trunk_vlans=dict(type="list", elements="str"),
unit=dict(default=0, type="int"),
filter_input=dict(),
filter_output=dict(),
description=dict(),
enhanced_layer=dict(default=True, type="bool"),
state=dict(default="present", choices=["present", "absent"]),
active=dict(default=True, type="bool"),
)
aggregate_spec = deepcopy(element_spec)
aggregate_spec["name"] = dict(required=True)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
required_one_of = [["name", "aggregate"]]
mutually_exclusive = [
["name", "aggregate"],
["access_vlan", "trunk_vlans"],
["access_vlan", "native_vlan"],
]
required_if = [
("mode", "access", ("access_vlan",)),
("mode", "trunk", ("trunk_vlans",)),
]
argument_spec = dict(
aggregate=dict(
type="list",
elements="dict",
options=aggregate_spec,
mutually_exclusive=mutually_exclusive,
required_if=required_if,
)
)
argument_spec.update(element_spec)
argument_spec.update(junos_argument_spec)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
mutually_exclusive=mutually_exclusive,
required_one_of=required_one_of,
required_if=required_if,
)
warnings = list()
result = {"changed": False}
if warnings:
result["warnings"] = warnings
top = "interfaces/interface"
param_to_xpath_map = collections.OrderedDict()
param_to_xpath_map.update(
[
("name", {"xpath": "name", "is_key": True}),
("unit", {"xpath": "name", "top": "unit", "is_key": True}),
(
"mode",
{
"xpath": "interface-mode",
"top": "unit/family/ethernet-switching",
},
),
(
"access_vlan",
{
"xpath": "members",
"top": "unit/family/ethernet-switching/vlan",
},
),
(
"trunk_vlans",
{
"xpath": "members",
"top": "unit/family/ethernet-switching/vlan",
},
),
(
"filter_input",
{
"xpath": "input",
"top": "unit/family/ethernet-switching/filter",
},
),
(
"filter_output",
{
"xpath": "output",
"top": "unit/family/ethernet-switching/filter",
},
),
("native_vlan", {"xpath": "native-vlan-id"}),
("description", "description"),
]
)
params = to_param_list(module)
requests = list()
for param in params:
# if key doesn't exist in the item, get it from module.params
for key in param:
if param.get(key) is None:
param[key] = module.params[key]
item = param.copy()
validate_param_values(module, param_to_xpath_map, param=item)
param_to_xpath_map["mode"]["xpath"] = (
"interface-mode" if param["enhanced_layer"] else "port-mode"
)
want = map_params_to_obj(module, param_to_xpath_map, param=item)
requests.append(map_obj_to_ele(module, want, top, param=item))
diff = None
with locked_config(module):
for req in requests:
diff = load_config(
module, tostring(req), warnings, action="replace"
)
commit = not module.check_mode
if diff:
if commit:
commit_configuration(module)
else:
discard_changes(module)
result["changed"] = True
if module._diff:
result["diff"] = {"prepared": diff}
module.exit_json(**result)
if __name__ == "__main__":
main()
| 27.868735
| 119
| 0.602895
|
de9c8eb2450c695fbd90f343015d25178ebee290
| 1,075
|
py
|
Python
|
tests/test_slice_into_chunks.py
|
thekevinscott/VGGishAudioData
|
2bdf358147695411485163a7009c3b76ead8270b
|
[
"MIT"
] | 1
|
2022-01-19T07:30:17.000Z
|
2022-01-19T07:30:17.000Z
|
tests/test_slice_into_chunks.py
|
thekevinscott/AudioData
|
2bdf358147695411485163a7009c3b76ead8270b
|
[
"MIT"
] | null | null | null |
tests/test_slice_into_chunks.py
|
thekevinscott/AudioData
|
2bdf358147695411485163a7009c3b76ead8270b
|
[
"MIT"
] | null | null | null |
import pytest
import random
from ..AudioData.AudioData import AudioData
from pydub.generators import Sine
audioData = AudioData()
def test_it_returns_chunks():
audio = Sine(440).to_audio_segment()
files = [{
'audio': audio[0:1000],
'file': 'foo',
'label': 'foo',
'start_index': 0,
}]
chunks = audioData.slice_into_single_sample_chunks(files)
print(len(chunks))
assert len(chunks) == 1
def test_it_returns_one_chunk_for_excess_audio():
audio = Sine(440).to_audio_segment() * 2
files = [{
'audio': audio[0:1500],
'file': 'foo',
'label': 'foo',
'start_index': 0,
}]
chunks = audioData.slice_into_single_sample_chunks(files)
assert len(chunks) == 1
def test_it_returns_multiple_chunks():
audio = Sine(440).to_audio_segment() * 5
files = [{
'audio': audio,
'file': 'foo',
'label': 'foo',
'start_index': 0,
'samples': [],
}]
chunks = audioData.slice_into_single_sample_chunks(files)
assert len(chunks) == 5
| 24.431818
| 61
| 0.610233
|
c0539bd10f6bb025181c6b73d398c0cd6097e120
| 2,901
|
py
|
Python
|
src/openfermion/chem/pubchem.py
|
Emieeel/OpenFermion
|
c19d9667c5970473893f9bc0183556c4cd354dd7
|
[
"Apache-2.0"
] | 1,291
|
2017-09-27T22:00:26.000Z
|
2022-03-25T14:34:50.000Z
|
src/openfermion/chem/pubchem.py
|
SamarthVadia/OpenFermion
|
865d8591cad9b0681f6dd25a391a5292ed2de1d4
|
[
"Apache-2.0"
] | 521
|
2017-09-27T21:36:17.000Z
|
2022-03-02T12:45:56.000Z
|
src/openfermion/chem/pubchem.py
|
SamarthVadia/OpenFermion
|
865d8591cad9b0681f6dd25a391a5292ed2de1d4
|
[
"Apache-2.0"
] | 365
|
2017-09-27T21:25:38.000Z
|
2022-03-29T19:28:46.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def geometry_from_pubchem(name: str, structure: str = None):
"""Function to extract geometry using the molecule's name from the PubChem
database. The 'structure' argument can be used to specify which structure
info to use to extract the geometry. If structure=None, the geometry will
be constructed based on 3D info, if available, otherwise on 2D (to keep
backwards compatibility with the times when the argument 'structure'
was not implemented).
Args:
name: a string giving the molecule's name as required by the PubChem
database.
structure: a string '2d' or '3d', to specify a specific structure
information to be retrieved from pubchem. The default is None.
Recommended value is '3d'.
Returns:
geometry: a list of tuples giving the coordinates of each atom with
distances in Angstrom.
"""
import pubchempy
if structure in ['2d', '3d']:
pubchempy_molecule = pubchempy.get_compounds(name,
'name',
record_type=structure)
elif structure is None:
# Ideally get the 3-D geometry if available.
pubchempy_molecule = pubchempy.get_compounds(name,
'name',
record_type='3d')
# If the 3-D geometry isn't available, get the 2-D geometry instead.
if not pubchempy_molecule:
pubchempy_molecule = pubchempy.get_compounds(name,
'name',
record_type='2d')
else:
raise ValueError('Incorrect value for the argument structure=%s' %
structure)
# Check if pubchempy_molecule is an empty list or None
if not pubchempy_molecule:
print("Unable to find structure info in the PubChem database"
"for the specified molecule %s." % name)
return None
pubchempy_geometry = \
pubchempy_molecule[0].to_dict(properties=['atoms'])['atoms']
geometry = [(atom['element'], (atom['x'], atom['y'], atom.get('z', 0)))
for atom in pubchempy_geometry]
return geometry
| 43.954545
| 78
| 0.604619
|
83771757befee8cd2af784ec0026d0001c945b10
| 649
|
py
|
Python
|
welcome/views.py
|
Gaoshengyue/openshfitdemo
|
2d57cf07d4dee3675af2665edbbd8cd1c0d4cb90
|
[
"CC0-1.0"
] | null | null | null |
welcome/views.py
|
Gaoshengyue/openshfitdemo
|
2d57cf07d4dee3675af2665edbbd8cd1c0d4cb90
|
[
"CC0-1.0"
] | null | null | null |
welcome/views.py
|
Gaoshengyue/openshfitdemo
|
2d57cf07d4dee3675af2665edbbd8cd1c0d4cb90
|
[
"CC0-1.0"
] | null | null | null |
import os
from django.shortcuts import render
from django.conf import settings
from django.http import HttpResponse
from . import database
from .models import PageView
# Create your views here.
def index(request):
if request.method == 'GET':
return render(request,'index.html')
# hostname = os.getenv('HOSTNAME', 'unknown')
# PageView.objects.create(hostname=hostname)
#
# return render(request, 'welcome/index.html', {
# 'hostname': hostname,
# 'database': database.info(),
# 'count': PageView.objects.count()
# })
def health(request):
return HttpResponse(PageView.objects.count())
| 24.037037
| 52
| 0.676425
|
180391b637a2adcddff9ff6494b193fa3170ca06
| 9,214
|
py
|
Python
|
python/ray/serve/http_proxy.py
|
heyong4725/ray
|
b73080c85f25e5037e27321661e599c706c42f42
|
[
"Apache-2.0"
] | 2
|
2020-12-22T00:55:48.000Z
|
2021-04-08T22:02:33.000Z
|
python/ray/serve/http_proxy.py
|
mehrdadn/ray
|
3506910c5da257215d38d02f424acc4f419ddbaf
|
[
"Apache-2.0"
] | 8
|
2020-11-13T19:02:47.000Z
|
2022-03-12T00:44:51.000Z
|
python/ray/serve/http_proxy.py
|
mehrdadn/ray
|
3506910c5da257215d38d02f424acc4f419ddbaf
|
[
"Apache-2.0"
] | null | null | null |
import asyncio
from urllib.parse import parse_qs
import socket
import uvicorn
import ray
from ray.exceptions import RayTaskError
from ray import serve
from ray.serve.context import TaskContext
from ray.serve.metric import MetricClient
from ray.serve.request_params import RequestMetadata
from ray.serve.http_util import Response
from ray.serve.router import Router
# The maximum number of times to retry a request due to actor failure.
# TODO(edoakes): this should probably be configurable.
MAX_ACTOR_DEAD_RETRIES = 10
class HTTPProxy:
"""
This class should be instantiated and ran by ASGI server.
>>> import uvicorn
>>> uvicorn.run(HTTPProxy(kv_store_actor_handle, router_handle))
# blocks forever
"""
async def fetch_config_from_controller(self, instance_name=None):
assert ray.is_initialized()
controller = serve.api._get_controller()
self.route_table = await controller.get_router_config.remote()
# The exporter is required to return results for /-/metrics endpoint.
[self.metric_exporter] = await controller.get_metric_exporter.remote()
self.metric_client = MetricClient(self.metric_exporter)
self.request_counter = self.metric_client.new_counter(
"num_http_requests",
description="The number of requests processed",
label_names=("route", ))
self.router = Router()
await self.router.setup(instance_name)
def set_route_table(self, route_table):
self.route_table = route_table
async def receive_http_body(self, scope, receive, send):
body_buffer = []
more_body = True
while more_body:
message = await receive()
assert message["type"] == "http.request"
more_body = message["more_body"]
body_buffer.append(message["body"])
return b"".join(body_buffer)
def _parse_latency_slo(self, scope):
query_string = scope["query_string"].decode("ascii")
query_kwargs = parse_qs(query_string)
relative_slo_ms = query_kwargs.pop("relative_slo_ms", None)
absolute_slo_ms = query_kwargs.pop("absolute_slo_ms", None)
relative_slo_ms = self._validate_slo_ms(relative_slo_ms)
absolute_slo_ms = self._validate_slo_ms(absolute_slo_ms)
if relative_slo_ms is not None and absolute_slo_ms is not None:
raise ValueError("Both relative and absolute slo's"
"cannot be specified.")
return relative_slo_ms, absolute_slo_ms
def _validate_slo_ms(self, request_slo_ms):
if request_slo_ms is None:
return None
if len(request_slo_ms) != 1:
raise ValueError(
"Multiple SLO specified, please specific only one.")
request_slo_ms = request_slo_ms[0]
request_slo_ms = float(request_slo_ms)
if request_slo_ms < 0:
raise ValueError("Request SLO must be positive, it is {}".format(
request_slo_ms))
return request_slo_ms
def _make_error_sender(self, scope, receive, send):
async def sender(error_message, status_code):
response = Response(error_message, status_code=status_code)
await response.send(scope, receive, send)
return sender
async def _handle_system_request(self, scope, receive, send):
current_path = scope["path"]
if current_path == "/-/routes":
await Response(self.route_table).send(scope, receive, send)
elif current_path == "/-/metrics":
metric_info = await self.metric_exporter.inspect_metrics.remote()
await Response(metric_info).send(scope, receive, send)
else:
await Response(
"System path {} not found".format(current_path),
status_code=404).send(scope, receive, send)
async def __call__(self, scope, receive, send):
# NOTE: This implements ASGI protocol specified in
# https://asgi.readthedocs.io/en/latest/specs/index.html
error_sender = self._make_error_sender(scope, receive, send)
assert self.route_table is not None, (
"Route table must be set via set_route_table.")
assert scope["type"] == "http"
current_path = scope["path"]
self.request_counter.labels(route=current_path).add()
if current_path.startswith("/-/"):
await self._handle_system_request(scope, receive, send)
return
try:
endpoint_name, methods_allowed = self.route_table[current_path]
except KeyError:
error_message = (
"Path {} not found. "
"Please ping http://.../-/routes for routing table"
).format(current_path)
await error_sender(error_message, 404)
return
if scope["method"] not in methods_allowed:
error_message = ("Methods {} not allowed. "
"Available HTTP methods are {}.").format(
scope["method"], methods_allowed)
await error_sender(error_message, 405)
return
http_body_bytes = await self.receive_http_body(scope, receive, send)
# get slo_ms before enqueuing the query
try:
relative_slo_ms, absolute_slo_ms = self._parse_latency_slo(scope)
except ValueError as e:
await error_sender(str(e), 400)
return
headers = {k.decode(): v.decode() for k, v in scope["headers"]}
request_metadata = RequestMetadata(
endpoint_name,
TaskContext.Web,
relative_slo_ms=relative_slo_ms,
absolute_slo_ms=absolute_slo_ms,
call_method=headers.get("X-SERVE-CALL-METHOD".lower(), "__call__"),
shard_key=headers.get("X-SERVE-SHARD-KEY".lower(), None),
)
result = await self.router.enqueue_request(request_metadata, scope,
http_body_bytes)
if isinstance(result, RayTaskError):
error_message = "Task Error. Traceback: {}.".format(result)
await error_sender(error_message, 500)
else:
await Response(result).send(scope, receive, send)
@ray.remote
class HTTPProxyActor:
async def __init__(self, host, port, instance_name=None):
serve.init(name=instance_name)
self.app = HTTPProxy()
await self.app.fetch_config_from_controller(instance_name)
self.host = host
self.port = port
# Start running the HTTP server on the event loop.
asyncio.get_event_loop().create_task(self.run())
def ready(self):
return True
async def run(self):
sock = socket.socket()
# These two socket options will allow multiple process to bind the the
# same port. Kernel will evenly load balance among the port listeners.
# Note: this will only work on Linux.
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(socket, "SO_REUSEPORT"):
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
sock.bind((self.host, self.port))
# Note(simon): we have to use lower level uvicorn Config and Server
# class because we want to run the server as a coroutine. The only
# alternative is to call uvicorn.run which is blocking.
config = uvicorn.Config(
self.app,
host=self.host,
port=self.port,
lifespan="off",
access_log=False)
server = uvicorn.Server(config=config)
# TODO(edoakes): we need to override install_signal_handlers here
# because the existing implementation fails if it isn't running in
# the main thread and uvicorn doesn't expose a way to configure it.
server.install_signal_handlers = lambda: None
await server.serve(sockets=[sock])
async def set_route_table(self, route_table):
self.app.set_route_table(route_table)
# ------ Proxy router logic ------ #
async def add_new_worker(self, backend_tag, replica_tag, worker_handle):
return await self.app.router.add_new_worker(backend_tag, replica_tag,
worker_handle)
async def set_traffic(self, endpoint, traffic_policy):
return await self.app.router.set_traffic(endpoint, traffic_policy)
async def set_backend_config(self, backend, config):
return await self.app.router.set_backend_config(backend, config)
async def remove_backend(self, backend):
return await self.app.router.remove_backend(backend)
async def remove_endpoint(self, endpoint):
return await self.app.router.remove_endpoint(endpoint)
async def remove_worker(self, backend_tag, replica_tag):
return await self.app.router.remove_worker(backend_tag, replica_tag)
async def enqueue_request(self, request_meta, *request_args,
**request_kwargs):
return await self.app.router.enqueue_request(
request_meta, *request_args, **request_kwargs)
| 38.552301
| 79
| 0.645648
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.