hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f471298626453a2e9d886c99a481fa1c50afd395
| 4,361
|
py
|
Python
|
rekallpy/rekall/interval.py
|
pierrerappolt/rekall
|
f63659c6b12c68f235bca8df11e418daa2f82995
|
[
"Apache-2.0"
] | 58
|
2019-04-06T08:22:07.000Z
|
2021-09-24T17:06:48.000Z
|
rekallpy/rekall/interval.py
|
pierrerappolt/rekall
|
f63659c6b12c68f235bca8df11e418daa2f82995
|
[
"Apache-2.0"
] | 17
|
2019-03-31T18:00:57.000Z
|
2021-03-08T23:09:07.000Z
|
rekallpy/rekall/interval.py
|
pierrerappolt/rekall
|
f63659c6b12c68f235bca8df11e418daa2f82995
|
[
"Apache-2.0"
] | 12
|
2018-12-09T05:04:08.000Z
|
2021-01-28T05:22:18.000Z
|
"""An Interval is a wrapper around a Bounds instance with a payload.
"""
class Interval:
"""A single Interval.
An Interval is a wrapper around a Bounds and a payload. The payload can be
referenced with the 'payload' key - i.e. ``interval['payload']``, as can
the fields of the Bounds. The bounds field itself can also be referenced
with type 'bounds' key.
Attributes:
bounds: Bounds object.
payload: payload object.
"""
def __init__(self, bounds, payload=None):
"""Initializes an interval with certain bounds and payload.
Args:
bounds: Bounds for this Interval.
payload (optional): Metadata of arbitrary type. Defaults to None.
"""
self.bounds = bounds
self.payload = payload
def __getitem__(self, arg):
"""Access bounds, payload, or a co-ordinate of bounds using key access.
Strings 'bounds' and 'payload' are hard-coded to return the bounds or
payload attributes, respectively.
"""
if arg == 'bounds':
return self.bounds
if arg == 'payload':
return self.payload
return self.bounds[arg]
def __setitem__(self, key, item):
"""Set bounds, payload, or a co-ordinate of bounds using key access.
Strings 'bounds' and 'payload' are hard-coded to reference the bounds
or payload attributes, respectively.
"""
if key == 'bounds':
self.bounds = item
elif key == 'payload':
self.payload = item
else:
self.bounds[key] = item
def __repr__(self):
"""String representation is ``<Interval {bounds} payload:{payload}>``."""
return "<Interval {} payload:{}>".format(self.bounds, self.payload)
def __lt__(self, other):
return self['bounds'] < other['bounds']
def copy(self):
"""Returns a copy of the Interval."""
return Interval(self.bounds.copy(), self.payload)
def combine(self,
other,
bounds_combiner,
payload_combiner=lambda p1, p2: p1):
"""Combines two Intervals into one by separately combining the bounds
and the payload.
Args:
other: The other Interval to combine with.
bounds_combiner: The function to combine the bounds. Takes two
Bounds objects as input and returns one Bounds object.
payload_combiner: The function to combine the two payloads. Takes
two payload objects as input and returns one payload object.
Returns:
A new Interval combined using ``bounds_combiner`` and
``payload_combiner``.
"""
return Interval(bounds_combiner(self.bounds, other.bounds),
payload_combiner(self.payload, other.payload))
def P(pred):
"""This wraps a predicate so it is applied to the payload of Intervals
instead of the Intervals themselves.
The output function expects one or more Intervals as input (depending
on how many arguments ``pred`` expects) and applies the predicate to
the payloads of the Intervals instead of the Interavls themselves.
Arg:
pred: The predicate to wrap.
Returns:
An output function that applies ``pred`` to payloads.
"""
def new_pred(*interval_args):
return pred(*[i.payload for i in interval_args])
return new_pred
def size(self, axis=None):
"""Get the size of the bounds along some axis.
Args:
axis (optional): The axis to compute size on. Represented as a pair
of co-ordinates, such as ``('t1', 't2')``. Defaults to ``None``,
which uses the primary axis of ``self``'s Bounds.
Returns:
The size of the bounds across some axis.
"""
return self.bounds.size(axis)
def to_json(self, payload_to_json):
"""Converts the interval to a JSON object.
Args:
payload_to_json: Function that converts the payload to a JSON object.
Returns:
JSON object for the interval
"""
return {
'bounds': self.bounds.to_json(),
'payload': payload_to_json(self.payload)
}
| 33.290076
| 81
| 0.598487
|
99c15e1189083d38f77d6b66ad7f263f79284215
| 73
|
py
|
Python
|
tests/model/__init__.py
|
ElPapi42/DeepDeblurring
|
8649f607ddf70a14c067cf902fbba341f99635af
|
[
"MIT"
] | 2
|
2020-06-23T21:56:53.000Z
|
2021-02-02T10:21:35.000Z
|
tests/model/__init__.py
|
ElPapi42/DeepDeblurring
|
8649f607ddf70a14c067cf902fbba341f99635af
|
[
"MIT"
] | 6
|
2020-05-27T15:07:07.000Z
|
2021-02-11T12:24:46.000Z
|
tests/model/__init__.py
|
ElPapi42/DeepDeblurring
|
8649f607ddf70a14c067cf902fbba341f99635af
|
[
"MIT"
] | 2
|
2020-09-28T21:04:31.000Z
|
2021-02-11T12:26:56.000Z
|
#!/usr/bin/python
# coding=utf-8
"""Test suit for Model sub-package."""
| 14.6
| 38
| 0.657534
|
2e1a814d92cc3c0c8ecb2d351152bf517a08ad7d
| 1,821
|
py
|
Python
|
qiskit/circuit/library/arithmetic/adders/adder.py
|
Roshan-Thomas/qiskit-terra
|
77219b5c7b7146b1545c5e5190739b36f4064b2f
|
[
"Apache-2.0"
] | 1,599
|
2018-07-10T10:59:12.000Z
|
2022-03-31T23:56:25.000Z
|
qiskit/circuit/library/arithmetic/adders/adder.py
|
Roshan-Thomas/qiskit-terra
|
77219b5c7b7146b1545c5e5190739b36f4064b2f
|
[
"Apache-2.0"
] | 5,244
|
2018-07-10T06:20:13.000Z
|
2022-03-31T22:18:48.000Z
|
qiskit/circuit/library/arithmetic/adders/adder.py
|
Roshan-Thomas/qiskit-terra
|
77219b5c7b7146b1545c5e5190739b36f4064b2f
|
[
"Apache-2.0"
] | 1,409
|
2018-07-10T02:16:12.000Z
|
2022-03-31T09:01:32.000Z
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Compute the sum of two equally sized qubit registers."""
from qiskit.circuit import QuantumCircuit
class Adder(QuantumCircuit):
r"""Compute the sum of two equally sized qubit registers.
For two registers :math:`|a\rangle_n` and :math:|b\rangle_n` with :math:`n` qubits each, an
adder performs the following operation
.. math::
|a\rangle_n |b\rangle_n \mapsto |a\rangle_n |a + b\rangle_{n + 1}.
The quantum register :math:`|a\rangle_n` (and analogously :math:`|b\rangle_n`)
.. math::
|a\rangle_n = |a_0\rangle \otimes \cdots \otimes |a_{n - 1}\rangle,
for :math:`a_i \in \{0, 1\}`, is associated with the integer value
.. math::
a = 2^{0}a_{0} + 2^{1}a_{1} + \cdots + 2^{n - 1}a_{n - 1}.
"""
def __init__(self, num_state_qubits: int, name: str = "Adder") -> None:
"""
Args:
num_state_qubits: The number of qubits in each of the registers.
name: The name of the circuit.
"""
super().__init__(name=name)
self._num_state_qubits = num_state_qubits
@property
def num_state_qubits(self) -> int:
"""The number of state qubits, i.e. the number of bits in each input register.
Returns:
The number of state qubits.
"""
return self._num_state_qubits
| 30.864407
| 95
| 0.646897
|
16371a6234afd4feefbab8c564a042aafa120394
| 6,725
|
py
|
Python
|
test/util/bitcoin-util-test.py
|
bumbacoin/cream
|
f3e72b58a3b5ae108e2e9c1675f95aacb2599711
|
[
"MIT"
] | 4,424
|
2015-10-19T19:04:02.000Z
|
2022-03-21T12:11:29.000Z
|
test/util/bitcoin-util-test.py
|
bumbacoin/cream
|
f3e72b58a3b5ae108e2e9c1675f95aacb2599711
|
[
"MIT"
] | 364
|
2017-09-17T09:08:57.000Z
|
2022-03-28T19:22:50.000Z
|
test/util/bitcoin-util-test.py
|
bumbacoin/cream
|
f3e72b58a3b5ae108e2e9c1675f95aacb2599711
|
[
"MIT"
] | 459
|
2017-07-26T09:09:27.000Z
|
2022-03-06T01:25:17.000Z
|
#!/usr/bin/env python3
# Copyright 2014 BitPay Inc.
# Copyright 2016-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test framework for bitcoin utils.
Runs automatically during `make check`.
Can also be run manually."""
from __future__ import division,print_function,unicode_literals
import argparse
import binascii
try:
import configparser
except ImportError:
import ConfigParser as configparser
import difflib
import json
import logging
import os
import pprint
import subprocess
import sys
def main():
config = configparser.ConfigParser()
config.optionxform = str
config.readfp(open(os.path.join(os.path.dirname(__file__), "../config.ini"), encoding="utf8"))
env_conf = dict(config.items('environment'))
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-v', '--verbose', action='store_true')
args = parser.parse_args()
verbose = args.verbose
if verbose:
level = logging.DEBUG
else:
level = logging.ERROR
formatter = '%(asctime)s - %(levelname)s - %(message)s'
# Add the format/level to the logger
logging.basicConfig(format=formatter, level=level)
bctester(os.path.join(env_conf["SRCDIR"], "test", "util", "data"), "bitcoin-util-test.json", env_conf)
def bctester(testDir, input_basename, buildenv):
""" Loads and parses the input file, runs all tests and reports results"""
input_filename = os.path.join(testDir, input_basename)
raw_data = open(input_filename, encoding="utf8").read()
input_data = json.loads(raw_data)
failed_testcases = []
for testObj in input_data:
try:
bctest(testDir, testObj, buildenv)
logging.info("PASSED: " + testObj["description"])
except:
logging.info("FAILED: " + testObj["description"])
failed_testcases.append(testObj["description"])
if failed_testcases:
error_message = "FAILED_TESTCASES:\n"
error_message += pprint.pformat(failed_testcases, width=400)
logging.error(error_message)
sys.exit(1)
else:
sys.exit(0)
def bctest(testDir, testObj, buildenv):
"""Runs a single test, comparing output and RC to expected output and RC.
Raises an error if input can't be read, executable fails, or output/RC
are not as expected. Error is caught by bctester() and reported.
"""
# Get the exec names and arguments
execprog = os.path.join(buildenv["BUILDDIR"], "src", testObj["exec"] + buildenv["EXEEXT"])
execargs = testObj['args']
execrun = [execprog] + execargs
# Read the input data (if there is any)
stdinCfg = None
inputData = None
if "input" in testObj:
filename = os.path.join(testDir, testObj["input"])
inputData = open(filename, encoding="utf8").read()
stdinCfg = subprocess.PIPE
# Read the expected output data (if there is any)
outputFn = None
outputData = None
outputType = None
if "output_cmp" in testObj:
outputFn = testObj['output_cmp']
outputType = os.path.splitext(outputFn)[1][1:] # output type from file extension (determines how to compare)
try:
outputData = open(os.path.join(testDir, outputFn), encoding="utf8").read()
except:
logging.error("Output file " + outputFn + " can not be opened")
raise
if not outputData:
logging.error("Output data missing for " + outputFn)
raise Exception
if not outputType:
logging.error("Output file %s does not have a file extension" % outputFn)
raise Exception
# Run the test
proc = subprocess.Popen(execrun, stdin=stdinCfg, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
try:
outs = proc.communicate(input=inputData)
except OSError:
logging.error("OSError, Failed to execute " + execprog)
raise
if outputData:
data_mismatch, formatting_mismatch = False, False
# Parse command output and expected output
try:
a_parsed = parse_output(outs[0], outputType)
except Exception as e:
logging.error('Error parsing command output as %s: %s' % (outputType, e))
raise
try:
b_parsed = parse_output(outputData, outputType)
except Exception as e:
logging.error('Error parsing expected output %s as %s: %s' % (outputFn, outputType, e))
raise
# Compare data
if a_parsed != b_parsed:
logging.error("Output data mismatch for " + outputFn + " (format " + outputType + ")")
data_mismatch = True
# Compare formatting
if outs[0] != outputData:
error_message = "Output formatting mismatch for " + outputFn + ":\n"
error_message += "".join(difflib.context_diff(outputData.splitlines(True),
outs[0].splitlines(True),
fromfile=outputFn,
tofile="returned"))
logging.error(error_message)
formatting_mismatch = True
assert not data_mismatch and not formatting_mismatch
# Compare the return code to the expected return code
wantRC = 0
if "return_code" in testObj:
wantRC = testObj['return_code']
if proc.returncode != wantRC:
logging.error("Return code mismatch for " + outputFn)
raise Exception
if "error_txt" in testObj:
want_error = testObj["error_txt"]
# Compare error text
# TODO: ideally, we'd compare the strings exactly and also assert
# That stderr is empty if no errors are expected. However, bitcoin-tx
# emits DISPLAY errors when running as a windows application on
# linux through wine. Just assert that the expected error text appears
# somewhere in stderr.
if want_error not in outs[1]:
logging.error("Error mismatch:\n" + "Expected: " + want_error + "\nReceived: " + outs[1].rstrip())
raise Exception
def parse_output(a, fmt):
"""Parse the output according to specified format.
Raise an error if the output can't be parsed."""
if fmt == 'json': # json: compare parsed data
return json.loads(a)
elif fmt == 'hex': # hex: parse and compare binary data
return binascii.a2b_hex(a.strip())
else:
raise NotImplementedError("Don't know how to compare %s" % fmt)
if __name__ == '__main__':
main()
| 37.154696
| 125
| 0.637026
|
4503be63b3e50a6f6828c0f24f0c1ee87047c3f9
| 205
|
py
|
Python
|
pyfiction/agents/agent.py
|
FPreta/pyfiction
|
a8af76c6badb11aa442122b1f2c4fbda1cf2ac53
|
[
"MIT"
] | 32
|
2016-05-28T06:12:38.000Z
|
2021-09-03T23:10:18.000Z
|
pyfiction/agents/agent.py
|
KailashDN/pyfiction
|
dc126d48578c53a3d2f95723c94da0afdd3282d0
|
[
"MIT"
] | 4
|
2019-12-16T20:18:25.000Z
|
2022-03-01T11:23:10.000Z
|
pyfiction/agents/agent.py
|
KailashDN/pyfiction
|
dc126d48578c53a3d2f95723c94da0afdd3282d0
|
[
"MIT"
] | 13
|
2017-08-15T13:14:00.000Z
|
2022-03-01T01:42:37.000Z
|
class Agent(object):
def __init__(self):
raise NotImplementedError("Agent is an abstract class.")
def act(self, **kwargs):
raise NotImplementedError("Agent is an abstract class.")
| 29.285714
| 64
| 0.678049
|
c57eede68084be249ec2692da39387d11894140b
| 11,302
|
py
|
Python
|
python_jsonschema_objects/wrapper_types.py
|
edwardcwang/python-jsonschema-objects
|
8ba1f8670b4ffbd5979d465038a36ec3dd265f63
|
[
"MIT"
] | null | null | null |
python_jsonschema_objects/wrapper_types.py
|
edwardcwang/python-jsonschema-objects
|
8ba1f8670b4ffbd5979d465038a36ec3dd265f63
|
[
"MIT"
] | null | null | null |
python_jsonschema_objects/wrapper_types.py
|
edwardcwang/python-jsonschema-objects
|
8ba1f8670b4ffbd5979d465038a36ec3dd265f63
|
[
"MIT"
] | 1
|
2020-02-26T06:58:36.000Z
|
2020-02-26T06:58:36.000Z
|
import collections
import logging
import six
from python_jsonschema_objects import util
from python_jsonschema_objects.validators import registry, ValidationError
logger = logging.getLogger(__name__)
class ArrayWrapper(collections.abc.MutableSequence):
""" A wrapper for array-like structures.
This implements all of the array like behavior that one would want,
with a dirty-tracking mechanism to avoid constant validation costs.
"""
def __len__(self):
return len(self.data)
def __delitem__(self, index):
self.data.remove(index)
self._dirty = True
def insert(self, index, value):
self.data.insert(index, value)
self._dirty = True
def __setitem__(self, index, value):
self.data[index] = value
self._dirty = True
def __getitem__(self, idx):
return self.typed_elems[idx]
def __eq__(self, other):
if isinstance(other, ArrayWrapper):
return self.for_json() == other.for_json()
else:
return self.for_json() == other
def __init__(self, ary):
if isinstance(ary, (list, tuple, collections.abc.Sequence)):
self.data = ary
self._dirty = True
self._typed = None
elif isinstance(ary, ArrayWrapper):
self.data = ary.data
self._dirty = True
self._typed = None
else:
raise TypeError("Invalid value given to array validator: {0}"
.format(ary))
@property
def typed_elems(self):
if self._typed is None or self._dirty is True:
self._typed = self.validate_items()
self._dirty = False
return self._typed
def __repr__(self):
return "<%s=%s>" % (
self.__class__.__name__,
str(self.data)
)
@classmethod
def from_json(cls, jsonmsg):
import json
msg = json.loads(jsonmsg)
obj = cls(msg)
obj.validate()
return obj
def serialize(self):
d = self.validate_items()
enc = util.ProtocolJSONEncoder()
return enc.encode(d)
def for_json(self):
from python_jsonschema_objects import classbuilder
out = []
for item in self.typed_elems:
if isinstance(item, (
classbuilder.ProtocolBase,
classbuilder.LiteralValue,
ArrayWrapper)):
out.append(item.for_json())
else:
out.append(item)
return out
def validate(self):
self.validate_items()
self.validate_length()
self.validate_uniqueness()
return True
def validate_uniqueness(self):
from python_jsonschema_objects import classbuilder
if getattr(self, 'uniqueItems', None) is not None:
testset = set(self.data)
if len(testset) != len(self.data):
raise ValidationError(
"{0} has duplicate elements, but uniqueness required"
.format(self.data))
def validate_length(self):
from python_jsonschema_objects import classbuilder
if getattr(self, 'minItems', None) is not None:
if len(self.data) < self.minItems:
raise ValidationError(
"{1} has too few elements. Wanted {0}."
.format(self.minItems, self.data))
if getattr(self, 'maxItems', None) is not None:
if len(self.data) > self.maxItems:
raise ValidationError(
"{1} has too few elements. Wanted {0}."
.format(self.maxItems, self.data))
def validate_items(self):
from python_jsonschema_objects import classbuilder
if self.__itemtype__ is None:
return
type_checks = self.__itemtype__
if not isinstance(type_checks, (tuple, list)):
# we were given items = {'type': 'blah'} ; thus ensure the type for all data.
type_checks = [type_checks] * len(self.data)
elif len(type_checks) > len(self.data):
raise ValidationError(
"{1} does not have sufficient elements to validate against {0}"
.format(self.__itemtype__, self.data))
typed_elems = []
for elem, typ in zip(self.data, type_checks):
if isinstance(typ, dict):
for param, paramval in six.iteritems(typ):
validator = registry(param)
if validator is not None:
validator(paramval, elem, typ)
typed_elems.append(elem)
elif util.safe_issubclass(typ, classbuilder.LiteralValue):
val = typ(elem)
val.validate()
typed_elems.append(val)
elif util.safe_issubclass(typ, classbuilder.ProtocolBase):
if not isinstance(elem, typ):
try:
if isinstance(elem, (six.string_types, six.integer_types, float)):
val = typ(elem)
else:
val = typ(**util.coerce_for_expansion(elem))
except TypeError as e:
raise ValidationError("'{0}' is not a valid value for '{1}': {2}"
.format(elem, typ, e))
else:
val = elem
val.validate()
typed_elems.append(val)
elif util.safe_issubclass(typ, ArrayWrapper):
val = typ(elem)
val.validate()
typed_elems.append(val)
elif isinstance(typ, classbuilder.TypeProxy):
try:
if isinstance(elem, (six.string_types, six.integer_types, float)):
val = typ(elem)
else:
val = typ(**util.coerce_for_expansion(elem))
except TypeError as e:
raise ValidationError("'{0}' is not a valid value for '{1}': {2}"
.format(elem, typ, e))
else:
val.validate()
typed_elems.append(val)
return typed_elems
@staticmethod
def create(name, item_constraint=None, **addl_constraints):
""" Create an array validator based on the passed in constraints.
If item_constraint is a tuple, it is assumed that tuple validation
is being performed. If it is a class or dictionary, list validation
will be performed. Classes are assumed to be subclasses of ProtocolBase,
while dictionaries are expected to be basic types ('string', 'number', ...).
addl_constraints is expected to be key-value pairs of any of the other
constraints permitted by JSON Schema v4.
"""
from python_jsonschema_objects import classbuilder
klassbuilder = addl_constraints.pop("classbuilder", None)
props = {}
if item_constraint is not None:
if isinstance(item_constraint, (tuple, list)):
for i, elem in enumerate(item_constraint):
isdict = isinstance(elem, (dict,))
isklass = isinstance( elem, type) and util.safe_issubclass(
elem, (classbuilder.ProtocolBase, classbuilder.LiteralValue))
if not any([isdict, isklass]):
raise TypeError(
"Item constraint (position {0}) is not a schema".format(i))
elif isinstance(item_constraint, classbuilder.TypeProxy):
pass
elif util.safe_issubclass(item_constraint, ArrayWrapper):
pass
else:
isdict = isinstance(item_constraint, (dict,))
isklass = isinstance( item_constraint, type) and util.safe_issubclass(
item_constraint, (classbuilder.ProtocolBase, classbuilder.LiteralValue))
if not any([isdict, isklass]):
raise TypeError("Item constraint is not a schema")
if isdict and '$ref' in item_constraint:
if klassbuilder is None:
raise TypeError("Cannot resolve {0} without classbuilder"
.format(item_constraint['$ref']))
uri = item_constraint['$ref']
if uri in klassbuilder.resolved:
logger.debug(util.lazy_format(
"Using previously resolved object for {0}", uri))
else:
logger.debug(util.lazy_format("Resolving object for {0}", uri))
with klassbuilder.resolver.resolving(uri) as resolved:
# Set incase there is a circular reference in schema definition
klassbuilder.resolved[uri] = None
klassbuilder.resolved[uri] = klassbuilder.construct(
uri,
resolved,
(classbuilder.ProtocolBase,))
item_constraint = klassbuilder.resolved[uri]
elif isdict and item_constraint.get('type') == 'array':
# We need to create a sub-array validator.
item_constraint = ArrayWrapper.create(name + "#sub",
item_constraint=item_constraint[
'items'],
addl_constraints=item_constraint)
elif isdict and 'oneOf' in item_constraint:
# We need to create a TypeProxy validator
uri = "{0}_{1}".format(name, "<anonymous_list_type>")
type_array = []
for i, item_detail in enumerate(item_constraint['oneOf']):
if '$ref' in item_detail:
subtype = klassbuilder.construct(
util.resolve_ref_uri(
klassbuilder.resolver.resolution_scope,
item_detail['$ref']),
item_detail)
else:
subtype = klassbuilder.construct(
uri + "_%s" % i, item_detail)
type_array.append(subtype)
item_constraint = classbuilder.TypeProxy(type_array)
elif isdict and item_constraint.get('type') == 'object':
""" We need to create a ProtocolBase object for this anonymous definition"""
uri = "{0}_{1}".format(name, "<anonymous_list_type>")
item_constraint = klassbuilder.construct(
uri, item_constraint)
props['__itemtype__'] = item_constraint
props.update(addl_constraints)
validator = type(str(name), (ArrayWrapper,), props)
return validator
| 38.838488
| 96
| 0.530437
|
f446fac257fd6ba7c7b68643719edbb9fbbe1c8c
| 39,002
|
py
|
Python
|
src/aioquic/h3/connection.py
|
sanketpandia/aioquic-docker-server
|
e823aaed138049db089f6cb6db7f9c1f568133a1
|
[
"BSD-3-Clause"
] | null | null | null |
src/aioquic/h3/connection.py
|
sanketpandia/aioquic-docker-server
|
e823aaed138049db089f6cb6db7f9c1f568133a1
|
[
"BSD-3-Clause"
] | null | null | null |
src/aioquic/h3/connection.py
|
sanketpandia/aioquic-docker-server
|
e823aaed138049db089f6cb6db7f9c1f568133a1
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
import re
from enum import Enum, IntEnum
from typing import Dict, FrozenSet, List, Optional, Set
import pylsqpack
from aioquic.buffer import UINT_VAR_MAX_SIZE, Buffer, BufferReadError, encode_uint_var
from aioquic.h3.events import (
DatagramReceived,
DataReceived,
H3Event,
Headers,
HeadersReceived,
PushPromiseReceived,
WebTransportStreamDataReceived,
)
from aioquic.h3.exceptions import NoAvailablePushIDError
from aioquic.quic.connection import QuicConnection, stream_is_unidirectional
from aioquic.quic.events import DatagramFrameReceived, QuicEvent, StreamDataReceived
from aioquic.quic.logger import QuicLoggerTrace
logger = logging.getLogger("http3")
H3_ALPN = ["h3", "h3-32", "h3-31", "h3-30", "h3-29"]
RESERVED_SETTINGS = (0x0, 0x2, 0x3, 0x4, 0x5)
UPPERCASE = re.compile(b"[A-Z]")
class ErrorCode(IntEnum):
H3_NO_ERROR = 0x100
H3_GENERAL_PROTOCOL_ERROR = 0x101
H3_INTERNAL_ERROR = 0x102
H3_STREAM_CREATION_ERROR = 0x103
H3_CLOSED_CRITICAL_STREAM = 0x104
H3_FRAME_UNEXPECTED = 0x105
H3_FRAME_ERROR = 0x106
H3_EXCESSIVE_LOAD = 0x107
H3_ID_ERROR = 0x108
H3_SETTINGS_ERROR = 0x109
H3_MISSING_SETTINGS = 0x10A
H3_REQUEST_REJECTED = 0x10B
H3_REQUEST_CANCELLED = 0x10C
H3_REQUEST_INCOMPLETE = 0x10D
H3_MESSAGE_ERROR = 0x10E
H3_CONNECT_ERROR = 0x10F
H3_VERSION_FALLBACK = 0x110
QPACK_DECOMPRESSION_FAILED = 0x200
QPACK_ENCODER_STREAM_ERROR = 0x201
QPACK_DECODER_STREAM_ERROR = 0x202
class FrameType(IntEnum):
DATA = 0x0
HEADERS = 0x1
PRIORITY = 0x2
CANCEL_PUSH = 0x3
SETTINGS = 0x4
PUSH_PROMISE = 0x5
GOAWAY = 0x7
MAX_PUSH_ID = 0xD
DUPLICATE_PUSH = 0xE
WEBTRANSPORT_STREAM = 0x41
class HeadersState(Enum):
INITIAL = 0
AFTER_HEADERS = 1
AFTER_TRAILERS = 2
class Setting(IntEnum):
QPACK_MAX_TABLE_CAPACITY = 0x1
MAX_HEADER_LIST_SIZE = 0x6
QPACK_BLOCKED_STREAMS = 0x7
NUM_PLACEHOLDERS = 0x9
H3_DATAGRAM = 0xFFD277
ENABLE_WEBTRANSPORT = 0x2B603742
# Dummy setting to check it is correctly ignored by the peer.
# https://tools.ietf.org/html/draft-ietf-quic-http-34#section-7.2.4.1
DUMMY = 0x21
class StreamType(IntEnum):
CONTROL = 0
PUSH = 1
QPACK_ENCODER = 2
QPACK_DECODER = 3
WEBTRANSPORT = 0x54
class ProtocolError(Exception):
"""
Base class for protocol errors.
These errors are not exposed to the API user, they are handled
in :meth:`H3Connection.handle_event`.
"""
error_code = ErrorCode.H3_GENERAL_PROTOCOL_ERROR
def __init__(self, reason_phrase: str = ""):
self.reason_phrase = reason_phrase
class QpackDecompressionFailed(ProtocolError):
error_code = ErrorCode.QPACK_DECOMPRESSION_FAILED
class QpackDecoderStreamError(ProtocolError):
error_code = ErrorCode.QPACK_DECODER_STREAM_ERROR
class QpackEncoderStreamError(ProtocolError):
error_code = ErrorCode.QPACK_ENCODER_STREAM_ERROR
class ClosedCriticalStream(ProtocolError):
error_code = ErrorCode.H3_CLOSED_CRITICAL_STREAM
class FrameUnexpected(ProtocolError):
error_code = ErrorCode.H3_FRAME_UNEXPECTED
class MessageError(ProtocolError):
error_code = ErrorCode.H3_MESSAGE_ERROR
class MissingSettingsError(ProtocolError):
error_code = ErrorCode.H3_MISSING_SETTINGS
class SettingsError(ProtocolError):
error_code = ErrorCode.H3_SETTINGS_ERROR
class StreamCreationError(ProtocolError):
error_code = ErrorCode.H3_STREAM_CREATION_ERROR
def encode_frame(frame_type: int, frame_data: bytes) -> bytes:
frame_length = len(frame_data)
buf = Buffer(capacity=frame_length + 2 * UINT_VAR_MAX_SIZE)
buf.push_uint_var(frame_type)
buf.push_uint_var(frame_length)
buf.push_bytes(frame_data)
return buf.data
def encode_settings(settings: Dict[int, int]) -> bytes:
buf = Buffer(capacity=1024)
for setting, value in settings.items():
buf.push_uint_var(setting)
buf.push_uint_var(value)
return buf.data
def parse_max_push_id(data: bytes) -> int:
buf = Buffer(data=data)
max_push_id = buf.pull_uint_var()
assert buf.eof()
return max_push_id
def parse_settings(data: bytes) -> Dict[int, int]:
buf = Buffer(data=data)
settings: Dict[int, int] = {}
while not buf.eof():
setting = buf.pull_uint_var()
value = buf.pull_uint_var()
if setting in RESERVED_SETTINGS:
raise SettingsError("Setting identifier 0x%x is reserved" % setting)
if setting in settings:
raise SettingsError("Setting identifier 0x%x is included twice" % setting)
settings[setting] = value
return dict(settings)
def validate_headers(
headers: Headers,
allowed_pseudo_headers: FrozenSet[bytes],
required_pseudo_headers: FrozenSet[bytes],
) -> None:
after_pseudo_headers = False
authority: Optional[bytes] = None
path: Optional[bytes] = None
scheme: Optional[bytes] = None
seen_pseudo_headers: Set[bytes] = set()
for key, value in headers:
if UPPERCASE.search(key):
raise MessageError("Header %r contains uppercase letters" % key)
if key.startswith(b":"):
# pseudo-headers
if after_pseudo_headers:
raise MessageError(
"Pseudo-header %r is not allowed after regular headers" % key
)
if key not in allowed_pseudo_headers:
raise MessageError("Pseudo-header %r is not valid" % key)
if key in seen_pseudo_headers:
raise MessageError("Pseudo-header %r is included twice" % key)
seen_pseudo_headers.add(key)
# store value
if key == b":authority":
authority = value
elif key == b":path":
path = value
elif key == b":scheme":
scheme = value
else:
# regular headers
after_pseudo_headers = True
# check required pseudo-headers are present
missing = required_pseudo_headers.difference(seen_pseudo_headers)
if missing:
raise MessageError("Pseudo-headers %s are missing" % sorted(missing))
if scheme in (b"http", b"https"):
if not authority:
raise MessageError("Pseudo-header b':authority' cannot be empty")
if not path:
raise MessageError("Pseudo-header b':path' cannot be empty")
def validate_push_promise_headers(headers: Headers) -> None:
validate_headers(
headers,
allowed_pseudo_headers=frozenset(
(b":method", b":scheme", b":authority", b":path")
),
required_pseudo_headers=frozenset(
(b":method", b":scheme", b":authority", b":path")
),
)
def validate_request_headers(headers: Headers) -> None:
validate_headers(
headers,
allowed_pseudo_headers=frozenset(
# FIXME: The pseudo-header :protocol is not actually defined, but
# we use it for the WebSocket demo.
(b":method", b":scheme", b":authority", b":path", b":protocol")
),
required_pseudo_headers=frozenset((b":method", b":authority")),
)
def validate_response_headers(headers: Headers) -> None:
validate_headers(
headers,
allowed_pseudo_headers=frozenset((b":status",)),
required_pseudo_headers=frozenset((b":status",)),
)
def validate_trailers(headers: Headers) -> None:
validate_headers(
headers,
allowed_pseudo_headers=frozenset(),
required_pseudo_headers=frozenset(),
)
class H3Stream:
def __init__(self, stream_id: int) -> None:
self.blocked = False
self.blocked_frame_size: Optional[int] = None
self.buffer = b""
self.ended = False
self.frame_size: Optional[int] = None
self.frame_type: Optional[int] = None
self.headers_recv_state: HeadersState = HeadersState.INITIAL
self.headers_send_state: HeadersState = HeadersState.INITIAL
self.push_id: Optional[int] = None
self.session_id: Optional[int] = None
self.stream_id = stream_id
self.stream_type: Optional[int] = None
class H3Connection:
"""
A low-level HTTP/3 connection object.
:param quic: A :class:`~aioquic.connection.QuicConnection` instance.
"""
def __init__(self, quic: QuicConnection, enable_webtransport: bool = False) -> None:
# settings
self._max_table_capacity = 4096
self._blocked_streams = 16
self._enable_webtransport = enable_webtransport
self._is_client = quic.configuration.is_client
self._is_done = False
self._quic = quic
self._quic_logger: Optional[QuicLoggerTrace] = quic._quic_logger
self._decoder = pylsqpack.Decoder(
self._max_table_capacity, self._blocked_streams
)
self._decoder_bytes_received = 0
self._decoder_bytes_sent = 0
self._encoder = pylsqpack.Encoder()
self._encoder_bytes_received = 0
self._encoder_bytes_sent = 0
self._settings_received = False
self._stream: Dict[int, H3Stream] = {}
self._max_push_id: Optional[int] = 8 if self._is_client else None
self._next_push_id: int = 0
self._local_control_stream_id: Optional[int] = None
self._local_decoder_stream_id: Optional[int] = None
self._local_encoder_stream_id: Optional[int] = None
self._peer_control_stream_id: Optional[int] = None
self._peer_decoder_stream_id: Optional[int] = None
self._peer_encoder_stream_id: Optional[int] = None
self._received_settings: Optional[Dict[int, int]] = None
self._sent_settings: Optional[Dict[int, int]] = None
self._init_connection()
def create_webtransport_stream(
self, session_id: int, is_unidirectional: bool = False
) -> int:
"""
Create a WebTransport stream and return the stream ID.
:param session_id: The WebTransport session identifier.
:param is_unidirectional: Whether to create a unidirectional stream.
"""
if is_unidirectional:
stream_id = self._create_uni_stream(StreamType.WEBTRANSPORT)
self._quic.send_stream_data(stream_id, encode_uint_var(session_id))
else:
stream_id = self._quic.get_next_available_stream_id()
self._log_stream_type(
stream_id=stream_id, stream_type=StreamType.WEBTRANSPORT
)
self._quic.send_stream_data(
stream_id,
encode_uint_var(FrameType.WEBTRANSPORT_STREAM)
+ encode_uint_var(session_id),
)
return stream_id
def handle_event(self, event: QuicEvent) -> List[H3Event]:
"""
Handle a QUIC event and return a list of HTTP events.
:param event: The QUIC event to handle.
"""
if not self._is_done:
try:
if isinstance(event, StreamDataReceived):
stream_id = event.stream_id
stream = self._get_or_create_stream(stream_id)
if stream_is_unidirectional(stream_id):
return self._receive_stream_data_uni(
stream, event.data, event.end_stream
)
else:
return self._receive_request_or_push_data(
stream, event.data, event.end_stream
)
elif isinstance(event, DatagramFrameReceived):
return self._receive_datagram(event.data)
except ProtocolError as exc:
self._is_done = True
self._quic.close(
error_code=exc.error_code, reason_phrase=exc.reason_phrase
)
return []
def send_datagram(self, flow_id: int, data: bytes) -> None:
"""
Send a datagram for the specified flow.
:param flow_id: The flow ID.
:param data: The HTTP/3 datagram payload.
"""
self._quic.send_datagram_frame(encode_uint_var(flow_id) + data)
def send_push_promise(self, stream_id: int, headers: Headers) -> int:
"""
Send a push promise related to the specified stream.
Returns the stream ID on which headers and data can be sent.
:param stream_id: The stream ID on which to send the data.
:param headers: The HTTP request headers for this push.
"""
assert not self._is_client, "Only servers may send a push promise."
if self._max_push_id is None or self._next_push_id >= self._max_push_id:
raise NoAvailablePushIDError
# send push promise
push_id = self._next_push_id
self._next_push_id += 1
self._quic.send_stream_data(
stream_id,
encode_frame(
FrameType.PUSH_PROMISE,
encode_uint_var(push_id) + self._encode_headers(stream_id, headers),
),
)
# create push stream
push_stream_id = self._create_uni_stream(StreamType.PUSH, push_id=push_id)
self._quic.send_stream_data(push_stream_id, encode_uint_var(push_id))
return push_stream_id
def send_data(self, stream_id: int, data: bytes, end_stream: bool) -> None:
"""
Send data on the given stream.
To retrieve datagram which need to be sent over the network call the QUIC
connection's :meth:`~aioquic.connection.QuicConnection.datagrams_to_send`
method.
:param stream_id: The stream ID on which to send the data.
:param data: The data to send.
:param end_stream: Whether to end the stream.
"""
# check DATA frame is allowed
stream = self._get_or_create_stream(stream_id)
if stream.headers_send_state != HeadersState.AFTER_HEADERS:
raise FrameUnexpected("DATA frame is not allowed in this state")
# log frame
if self._quic_logger is not None:
self._quic_logger.log_event(
category="http",
event="frame_created",
data=self._quic_logger.encode_http3_data_frame(
length=len(data), stream_id=stream_id
),
)
self._quic.send_stream_data(
stream_id, encode_frame(FrameType.DATA, data), end_stream
)
def send_headers(
self, stream_id: int, headers: Headers, end_stream: bool = False
) -> None:
"""
Send headers on the given stream.
To retrieve datagram which need to be sent over the network call the QUIC
connection's :meth:`~aioquic.connection.QuicConnection.datagrams_to_send`
method.
:param stream_id: The stream ID on which to send the headers.
:param headers: The HTTP headers to send.
:param end_stream: Whether to end the stream.
"""
# check HEADERS frame is allowed
stream = self._get_or_create_stream(stream_id)
if stream.headers_send_state == HeadersState.AFTER_TRAILERS:
raise FrameUnexpected("HEADERS frame is not allowed in this state")
frame_data = self._encode_headers(stream_id, headers)
# log frame
if self._quic_logger is not None:
self._quic_logger.log_event(
category="http",
event="frame_created",
data=self._quic_logger.encode_http3_headers_frame(
length=len(frame_data), headers=headers, stream_id=stream_id
),
)
# update state and send headers
if stream.headers_send_state == HeadersState.INITIAL:
stream.headers_send_state = HeadersState.AFTER_HEADERS
else:
stream.headers_send_state = HeadersState.AFTER_TRAILERS
self._quic.send_stream_data(
stream_id, encode_frame(FrameType.HEADERS, frame_data), end_stream
)
@property
def received_settings(self) -> Optional[Dict[int, int]]:
"""
Return the received SETTINGS frame, or None.
"""
return self._received_settings
@property
def sent_settings(self) -> Optional[Dict[int, int]]:
"""
Return the sent SETTINGS frame, or None.
"""
return self._sent_settings
def _create_uni_stream(
self, stream_type: int, push_id: Optional[int] = None
) -> int:
"""
Create an unidirectional stream of the given type.
"""
stream_id = self._quic.get_next_available_stream_id(is_unidirectional=True)
self._log_stream_type(
push_id=push_id, stream_id=stream_id, stream_type=stream_type
)
self._quic.send_stream_data(stream_id, encode_uint_var(stream_type))
return stream_id
def _decode_headers(self, stream_id: int, frame_data: Optional[bytes]) -> Headers:
"""
Decode a HEADERS block and send decoder updates on the decoder stream.
This is called with frame_data=None when a stream becomes unblocked.
"""
try:
if frame_data is None:
decoder, headers = self._decoder.resume_header(stream_id)
else:
decoder, headers = self._decoder.feed_header(stream_id, frame_data)
self._decoder_bytes_sent += len(decoder)
self._quic.send_stream_data(self._local_decoder_stream_id, decoder)
except pylsqpack.DecompressionFailed as exc:
raise QpackDecompressionFailed() from exc
return headers
def _encode_headers(self, stream_id: int, headers: Headers) -> bytes:
"""
Encode a HEADERS block and send encoder updates on the encoder stream.
"""
encoder, frame_data = self._encoder.encode(stream_id, headers)
self._encoder_bytes_sent += len(encoder)
self._quic.send_stream_data(self._local_encoder_stream_id, encoder)
return frame_data
def _get_or_create_stream(self, stream_id: int) -> H3Stream:
if stream_id not in self._stream:
self._stream[stream_id] = H3Stream(stream_id)
return self._stream[stream_id]
def _get_local_settings(self) -> Dict[int, int]:
"""
Return the local HTTP/3 settings.
"""
settings = {
Setting.QPACK_MAX_TABLE_CAPACITY: self._max_table_capacity,
Setting.QPACK_BLOCKED_STREAMS: self._blocked_streams,
Setting.DUMMY: 1,
}
if self._enable_webtransport:
settings[Setting.H3_DATAGRAM] = 1
settings[Setting.ENABLE_WEBTRANSPORT] = 1
return settings
def _handle_control_frame(self, frame_type: int, frame_data: bytes) -> None:
"""
Handle a frame received on the peer's control stream.
"""
if frame_type != FrameType.SETTINGS and not self._settings_received:
raise MissingSettingsError
if frame_type == FrameType.SETTINGS:
if self._settings_received:
raise FrameUnexpected("SETTINGS have already been received")
settings = parse_settings(frame_data)
self._validate_settings(settings)
self._received_settings = settings
encoder = self._encoder.apply_settings(
max_table_capacity=settings.get(Setting.QPACK_MAX_TABLE_CAPACITY, 0),
blocked_streams=settings.get(Setting.QPACK_BLOCKED_STREAMS, 0),
)
self._quic.send_stream_data(self._local_encoder_stream_id, encoder)
self._settings_received = True
elif frame_type == FrameType.MAX_PUSH_ID:
if self._is_client:
raise FrameUnexpected("Servers must not send MAX_PUSH_ID")
self._max_push_id = parse_max_push_id(frame_data)
elif frame_type in (
FrameType.DATA,
FrameType.HEADERS,
FrameType.PUSH_PROMISE,
FrameType.DUPLICATE_PUSH,
):
raise FrameUnexpected("Invalid frame type on control stream")
def _handle_request_or_push_frame(
self,
frame_type: int,
frame_data: Optional[bytes],
stream: H3Stream,
stream_ended: bool,
) -> List[H3Event]:
"""
Handle a frame received on a request or push stream.
"""
http_events: List[H3Event] = []
if frame_type == FrameType.DATA:
# check DATA frame is allowed
if stream.headers_recv_state != HeadersState.AFTER_HEADERS:
raise FrameUnexpected("DATA frame is not allowed in this state")
if stream_ended or frame_data:
http_events.append(
DataReceived(
data=frame_data,
push_id=stream.push_id,
stream_ended=stream_ended,
stream_id=stream.stream_id,
)
)
elif frame_type == FrameType.HEADERS:
# check HEADERS frame is allowed
if stream.headers_recv_state == HeadersState.AFTER_TRAILERS:
raise FrameUnexpected("HEADERS frame is not allowed in this state")
# try to decode HEADERS, may raise pylsqpack.StreamBlocked
headers = self._decode_headers(stream.stream_id, frame_data)
# validate headers
if stream.headers_recv_state == HeadersState.INITIAL:
if self._is_client:
validate_response_headers(headers)
else:
validate_request_headers(headers)
else:
validate_trailers(headers)
# log frame
if self._quic_logger is not None:
self._quic_logger.log_event(
category="http",
event="frame_parsed",
data=self._quic_logger.encode_http3_headers_frame(
length=stream.blocked_frame_size
if frame_data is None
else len(frame_data),
headers=headers,
stream_id=stream.stream_id,
),
)
# update state and emit headers
if stream.headers_recv_state == HeadersState.INITIAL:
stream.headers_recv_state = HeadersState.AFTER_HEADERS
else:
stream.headers_recv_state = HeadersState.AFTER_TRAILERS
http_events.append(
HeadersReceived(
headers=headers,
push_id=stream.push_id,
stream_id=stream.stream_id,
stream_ended=stream_ended,
)
)
elif frame_type == FrameType.PUSH_PROMISE and stream.push_id is None:
if not self._is_client:
raise FrameUnexpected("Clients must not send PUSH_PROMISE")
frame_buf = Buffer(data=frame_data)
push_id = frame_buf.pull_uint_var()
headers = self._decode_headers(
stream.stream_id, frame_data[frame_buf.tell() :]
)
# validate headers
validate_push_promise_headers(headers)
# log frame
if self._quic_logger is not None:
self._quic_logger.log_event(
category="http",
event="frame_parsed",
data=self._quic_logger.encode_http3_push_promise_frame(
length=len(frame_data),
headers=headers,
push_id=push_id,
stream_id=stream.stream_id,
),
)
# emit event
http_events.append(
PushPromiseReceived(
headers=headers, push_id=push_id, stream_id=stream.stream_id
)
)
elif frame_type in (
FrameType.PRIORITY,
FrameType.CANCEL_PUSH,
FrameType.SETTINGS,
FrameType.PUSH_PROMISE,
FrameType.GOAWAY,
FrameType.MAX_PUSH_ID,
FrameType.DUPLICATE_PUSH,
):
raise FrameUnexpected(
"Invalid frame type on request stream"
if stream.push_id is None
else "Invalid frame type on push stream"
)
return http_events
def _init_connection(self) -> None:
# send our settings
self._local_control_stream_id = self._create_uni_stream(StreamType.CONTROL)
self._sent_settings = self._get_local_settings()
self._quic.send_stream_data(
self._local_control_stream_id,
encode_frame(FrameType.SETTINGS, encode_settings(self._sent_settings)),
)
if self._is_client and self._max_push_id is not None:
self._quic.send_stream_data(
self._local_control_stream_id,
encode_frame(FrameType.MAX_PUSH_ID, encode_uint_var(self._max_push_id)),
)
# create encoder and decoder streams
self._local_encoder_stream_id = self._create_uni_stream(
StreamType.QPACK_ENCODER
)
self._local_decoder_stream_id = self._create_uni_stream(
StreamType.QPACK_DECODER
)
def _log_stream_type(
self, stream_id: int, stream_type: int, push_id: Optional[int] = None
) -> None:
if self._quic_logger is not None:
type_name = {
0: "control",
1: "push",
2: "qpack_encoder",
3: "qpack_decoder",
0x54: "webtransport", # NOTE: not standardized yet
}.get(stream_type, "unknown")
data = {"new": type_name, "stream_id": stream_id}
if push_id is not None:
data["associated_push_id"] = push_id
self._quic_logger.log_event(
category="http",
event="stream_type_set",
data=data,
)
def _receive_datagram(self, data: bytes) -> List[H3Event]:
"""
Handle a datagram.
"""
buf = Buffer(data=data)
try:
flow_id = buf.pull_uint_var()
except BufferReadError:
raise ProtocolError("Could not parse flow ID")
return [DatagramReceived(data=data[buf.tell() :], flow_id=flow_id)]
def _receive_request_or_push_data(
self, stream: H3Stream, data: bytes, stream_ended: bool
) -> List[H3Event]:
"""
Handle data received on a request or push stream.
"""
http_events: List[H3Event] = []
stream.buffer += data
if stream_ended:
stream.ended = True
if stream.blocked:
return http_events
# shortcut for WEBTRANSPORT_STREAM frame fragments
if (
stream.frame_type == FrameType.WEBTRANSPORT_STREAM
and stream.session_id is not None
):
http_events.append(
WebTransportStreamDataReceived(
data=stream.buffer,
session_id=stream.session_id,
stream_id=stream.stream_id,
stream_ended=stream_ended,
)
)
stream.buffer = b""
return http_events
# shortcut for DATA frame fragments
if (
stream.frame_type == FrameType.DATA
and stream.frame_size is not None
and len(stream.buffer) < stream.frame_size
):
http_events.append(
DataReceived(
data=stream.buffer,
push_id=stream.push_id,
stream_id=stream.stream_id,
stream_ended=False,
)
)
stream.frame_size -= len(stream.buffer)
stream.buffer = b""
return http_events
# handle lone FIN
if stream_ended and not stream.buffer:
http_events.append(
DataReceived(
data=b"",
push_id=stream.push_id,
stream_id=stream.stream_id,
stream_ended=True,
)
)
return http_events
buf = Buffer(data=stream.buffer)
consumed = 0
while not buf.eof():
# fetch next frame header
if stream.frame_size is None:
try:
stream.frame_type = buf.pull_uint_var()
stream.frame_size = buf.pull_uint_var()
except BufferReadError:
break
consumed = buf.tell()
# WEBTRANSPORT_STREAM frames last until the end of the stream
if stream.frame_type == FrameType.WEBTRANSPORT_STREAM:
stream.session_id = stream.frame_size
stream.frame_size = None
frame_data = stream.buffer[consumed:]
stream.buffer = b""
self._log_stream_type(
stream_id=stream.stream_id, stream_type=StreamType.WEBTRANSPORT
)
if frame_data or stream_ended:
http_events.append(
WebTransportStreamDataReceived(
data=frame_data,
session_id=stream.session_id,
stream_id=stream.stream_id,
stream_ended=stream_ended,
)
)
return http_events
# log frame
if (
self._quic_logger is not None
and stream.frame_type == FrameType.DATA
):
self._quic_logger.log_event(
category="http",
event="frame_parsed",
data=self._quic_logger.encode_http3_data_frame(
length=stream.frame_size, stream_id=stream.stream_id
),
)
# check how much data is available
chunk_size = min(stream.frame_size, buf.capacity - consumed)
if stream.frame_type != FrameType.DATA and chunk_size < stream.frame_size:
break
# read available data
frame_data = buf.pull_bytes(chunk_size)
frame_type = stream.frame_type
consumed = buf.tell()
# detect end of frame
stream.frame_size -= chunk_size
if not stream.frame_size:
stream.frame_size = None
stream.frame_type = None
try:
http_events.extend(
self._handle_request_or_push_frame(
frame_type=frame_type,
frame_data=frame_data,
stream=stream,
stream_ended=stream.ended and buf.eof(),
)
)
except pylsqpack.StreamBlocked:
stream.blocked = True
stream.blocked_frame_size = len(frame_data)
break
# remove processed data from buffer
stream.buffer = stream.buffer[consumed:]
return http_events
def _receive_stream_data_uni(
self, stream: H3Stream, data: bytes, stream_ended: bool
) -> List[H3Event]:
http_events: List[H3Event] = []
stream.buffer += data
if stream_ended:
stream.ended = True
buf = Buffer(data=stream.buffer)
consumed = 0
unblocked_streams: Set[int] = set()
while (
stream.stream_type
in (StreamType.PUSH, StreamType.CONTROL, StreamType.WEBTRANSPORT)
or not buf.eof()
):
# fetch stream type for unidirectional streams
if stream.stream_type is None:
try:
stream.stream_type = buf.pull_uint_var()
except BufferReadError:
break
consumed = buf.tell()
# check unicity
if stream.stream_type == StreamType.CONTROL:
if self._peer_control_stream_id is not None:
raise StreamCreationError("Only one control stream is allowed")
self._peer_control_stream_id = stream.stream_id
elif stream.stream_type == StreamType.QPACK_DECODER:
if self._peer_decoder_stream_id is not None:
raise StreamCreationError(
"Only one QPACK decoder stream is allowed"
)
self._peer_decoder_stream_id = stream.stream_id
elif stream.stream_type == StreamType.QPACK_ENCODER:
if self._peer_encoder_stream_id is not None:
raise StreamCreationError(
"Only one QPACK encoder stream is allowed"
)
self._peer_encoder_stream_id = stream.stream_id
# for PUSH, logging is performed once the push_id is known
if stream.stream_type != StreamType.PUSH:
self._log_stream_type(
stream_id=stream.stream_id, stream_type=stream.stream_type
)
if stream.stream_type == StreamType.CONTROL:
if stream_ended:
raise ClosedCriticalStream("Closing control stream is not allowed")
# fetch next frame
try:
frame_type = buf.pull_uint_var()
frame_length = buf.pull_uint_var()
frame_data = buf.pull_bytes(frame_length)
except BufferReadError:
break
consumed = buf.tell()
self._handle_control_frame(frame_type, frame_data)
elif stream.stream_type == StreamType.PUSH:
# fetch push id
if stream.push_id is None:
try:
stream.push_id = buf.pull_uint_var()
except BufferReadError:
break
consumed = buf.tell()
self._log_stream_type(
push_id=stream.push_id,
stream_id=stream.stream_id,
stream_type=stream.stream_type,
)
# remove processed data from buffer
stream.buffer = stream.buffer[consumed:]
return self._receive_request_or_push_data(stream, b"", stream_ended)
elif stream.stream_type == StreamType.WEBTRANSPORT:
# fetch session id
if stream.session_id is None:
try:
stream.session_id = buf.pull_uint_var()
except BufferReadError:
break
consumed = buf.tell()
frame_data = stream.buffer[consumed:]
stream.buffer = b""
if frame_data or stream_ended:
http_events.append(
WebTransportStreamDataReceived(
data=frame_data,
session_id=stream.session_id,
stream_ended=stream.ended,
stream_id=stream.stream_id,
)
)
return http_events
elif stream.stream_type == StreamType.QPACK_DECODER:
# feed unframed data to decoder
data = buf.pull_bytes(buf.capacity - buf.tell())
consumed = buf.tell()
try:
self._encoder.feed_decoder(data)
except pylsqpack.DecoderStreamError as exc:
raise QpackDecoderStreamError() from exc
self._decoder_bytes_received += len(data)
elif stream.stream_type == StreamType.QPACK_ENCODER:
# feed unframed data to encoder
data = buf.pull_bytes(buf.capacity - buf.tell())
consumed = buf.tell()
try:
unblocked_streams.update(self._decoder.feed_encoder(data))
except pylsqpack.EncoderStreamError as exc:
raise QpackEncoderStreamError() from exc
self._encoder_bytes_received += len(data)
else:
# unknown stream type, discard data
buf.seek(buf.capacity)
consumed = buf.tell()
# remove processed data from buffer
stream.buffer = stream.buffer[consumed:]
# process unblocked streams
for stream_id in unblocked_streams:
stream = self._stream[stream_id]
# resume headers
http_events.extend(
self._handle_request_or_push_frame(
frame_type=FrameType.HEADERS,
frame_data=None,
stream=stream,
stream_ended=stream.ended and not stream.buffer,
)
)
stream.blocked = False
stream.blocked_frame_size = None
# resume processing
if stream.buffer:
http_events.extend(
self._receive_request_or_push_data(stream, b"", stream.ended)
)
return http_events
def _validate_settings(self, settings: Dict[int, int]) -> None:
if Setting.H3_DATAGRAM in settings:
if settings[Setting.H3_DATAGRAM] not in (0, 1):
raise SettingsError("H3_DATAGRAM setting must be 0 or 1")
if (
settings[Setting.H3_DATAGRAM] == 1
and self._quic._remote_max_datagram_frame_size is None
):
raise SettingsError(
"H3_DATAGRAM requires max_datagram_frame_size transport parameter"
)
if Setting.ENABLE_WEBTRANSPORT in settings:
if settings[Setting.ENABLE_WEBTRANSPORT] not in (0, 1):
raise SettingsError("ENABLE_WEBTRANSPORT setting must be 0 or 1")
if (
settings[Setting.ENABLE_WEBTRANSPORT] == 1
and settings.get(Setting.H3_DATAGRAM) != 1
):
raise SettingsError("ENABLE_WEBTRANSPORT requires H3_DATAGRAM")
| 35.748854
| 88
| 0.585226
|
30afc88da14d3e6f75c1cc14055f9b77d4eaf6db
| 25,109
|
py
|
Python
|
great_expectations/cli/v012/upgrade_helpers/upgrade_helper_v11.py
|
arunnthevapalan/great_expectations
|
97f1481bcd1c3f4d8878c6f383f4e6f008b20cd1
|
[
"Apache-2.0"
] | 1
|
2022-03-16T22:09:49.000Z
|
2022-03-16T22:09:49.000Z
|
great_expectations/cli/v012/upgrade_helpers/upgrade_helper_v11.py
|
draev/great_expectations
|
317e15ee7e50f6e0d537b62154177440f33b795d
|
[
"Apache-2.0"
] | null | null | null |
great_expectations/cli/v012/upgrade_helpers/upgrade_helper_v11.py
|
draev/great_expectations
|
317e15ee7e50f6e0d537b62154177440f33b795d
|
[
"Apache-2.0"
] | 1
|
2022-03-03T16:47:32.000Z
|
2022-03-03T16:47:32.000Z
|
import datetime
import json
import os
import traceback
from dateutil.parser import parse
from great_expectations import DataContext
from great_expectations.cli.v012.upgrade_helpers.base_upgrade_helper import (
BaseUpgradeHelper,
)
from great_expectations.data_context.store import (
DatabaseStoreBackend,
HtmlSiteStore,
InMemoryStoreBackend,
MetricStore,
TupleFilesystemStoreBackend,
TupleGCSStoreBackend,
TupleS3StoreBackend,
ValidationsStore,
)
from great_expectations.data_context.types.resource_identifiers import (
ValidationResultIdentifier,
)
"""
NOTE (Shinnnyshinshin): This is not the UpgradeHelperV11 that is normally used by the CLI.
As of 2022-01, it is only triggered by running the CLI-command:
great_expectations --v2-api upgrade project
on a great_expectations/ directory, and cannot be used to fully migrate a v1.0 or v2.0 configuration to a v3.0 config. A
task for the full deprecation of this path has been placed in the backlog.
"""
class UpgradeHelperV11(BaseUpgradeHelper):
def __init__(self, data_context=None, context_root_dir=None):
assert (
data_context or context_root_dir
), "Please provide a data_context object or a context_root_dir."
self.data_context = data_context or DataContext(
context_root_dir=context_root_dir
)
self.upgrade_log = {
"skipped_validations_stores": {
"database_store_backends": [],
"unsupported": [],
},
"skipped_docs_validations_stores": {"unsupported": []},
"skipped_metrics_stores": {
"database_store_backends": [],
"unsupported": [],
},
"exceptions": [
# {
# "validation_store_name": store_name
# "src": src_url,
# "dest": dest_url,
# "exception_message": exception_message,
# },
# {
# "site_name": site_name,
# "src": src_url,
# "dest": dest_url,
# "exception_message": exception_message,
# }
],
"upgraded_validations_stores": {
# STORE_NAME: {
# "validations_updated": [{
# "src": src_url,
# "dest": dest_url
# }],
# "exceptions": BOOL
# }
},
"upgraded_docs_site_validations_stores": {
# SITE_NAME: {
# "validation_result_pages_updated": [{
# src: src_url,
# dest: dest_url
# }],
# "exceptions": BOOL
# }
},
}
self.upgrade_checklist = {
"validations_store_backends": {},
"docs_validations_store_backends": {},
}
self.validation_run_times = {}
self.run_time_setters_by_backend_type = {
TupleFilesystemStoreBackend: self._get_tuple_filesystem_store_backend_run_time,
TupleS3StoreBackend: self._get_tuple_s3_store_backend_run_time,
TupleGCSStoreBackend: self._get_tuple_gcs_store_backend_run_time,
}
self._generate_upgrade_checklist()
def _generate_upgrade_checklist(self):
for (store_name, store) in self.data_context.stores.items():
if not isinstance(store, (ValidationsStore, MetricStore)):
continue
elif isinstance(store, ValidationsStore):
self._process_validations_store_for_checklist(store_name, store)
elif isinstance(store, MetricStore):
self._process_metrics_store_for_checklist(store_name, store)
sites = (
self.data_context.project_config_with_variables_substituted.data_docs_sites
)
if sites:
for site_name, site_config in sites.items():
self._process_docs_site_for_checklist(site_name, site_config)
def _process_docs_site_for_checklist(self, site_name, site_config):
site_html_store = HtmlSiteStore(
store_backend=site_config.get("store_backend"),
runtime_environment={
"data_context": self.data_context,
"root_directory": self.data_context.root_directory,
"site_name": site_name,
},
)
site_validations_store_backend = site_html_store.store_backends[
ValidationResultIdentifier
]
if isinstance(
site_validations_store_backend,
tuple(list(self.run_time_setters_by_backend_type.keys())),
):
self.upgrade_checklist["docs_validations_store_backends"][
site_name
] = site_validations_store_backend
else:
self.upgrade_log["skipped_docs_validations_stores"]["unsupported"].append(
{
"site_name": site_name,
"validations_store_backend_class": type(
site_validations_store_backend
).__name__,
}
)
def _process_validations_store_for_checklist(self, store_name, store):
store_backend = store.store_backend
if isinstance(store_backend, DatabaseStoreBackend):
self.upgrade_log["skipped_validations_stores"][
"database_store_backends"
].append(
{
"store_name": store_name,
"store_backend_class": type(store_backend).__name__,
}
)
elif isinstance(
store_backend, tuple(list(self.run_time_setters_by_backend_type.keys()))
):
self.upgrade_checklist["validations_store_backends"][
store_name
] = store_backend
else:
self.upgrade_log["skipped_validations_stores"]["unsupported"].append(
{
"store_name": store_name,
"store_backend_class": type(store_backend).__name__,
}
)
def _process_metrics_store_for_checklist(self, store_name, store):
store_backend = store.store_backend
if isinstance(store_backend, DatabaseStoreBackend):
self.upgrade_log["skipped_metrics_stores"][
"database_store_backends"
].append(
{
"store_name": store_name,
"store_backend_class": type(store_backend).__name__,
}
)
elif isinstance(store_backend, InMemoryStoreBackend):
pass
else:
self.upgrade_log["skipped_metrics_stores"]["unsupported"].append(
{
"store_name": store_name,
"store_backend_class": type(store_backend).__name__,
}
)
def _upgrade_store_backend(self, store_backend, store_name=None, site_name=None):
assert store_name or site_name, "Must pass either store_name or site_name."
assert not (
store_name and site_name
), "Must pass either store_name or site_name, not both."
try:
validation_source_keys = store_backend.list_keys()
except Exception as e:
exception_traceback = traceback.format_exc()
exception_message = (
f'{type(e).__name__}: "{str(e)}". '
f'Traceback: "{exception_traceback}".'
)
self._update_upgrade_log(
store_backend=store_backend,
store_name=store_name,
site_name=site_name,
exception_message=exception_message,
)
for source_key in validation_source_keys:
try:
run_name = source_key[-2]
dest_key = None
if run_name not in self.validation_run_times:
self.run_time_setters_by_backend_type.get(type(store_backend))(
source_key, store_backend
)
dest_key_list = list(source_key)
dest_key_list.insert(-1, self.validation_run_times[run_name])
dest_key = tuple(dest_key_list)
except Exception as e:
exception_traceback = traceback.format_exc()
exception_message = (
f'{type(e).__name__}: "{str(e)}". '
f'Traceback: "{exception_traceback}".'
)
self._update_upgrade_log(
store_backend=store_backend,
source_key=source_key,
dest_key=dest_key,
store_name=store_name,
site_name=site_name,
exception_message=exception_message,
)
try:
if store_name:
self._update_validation_result_json(
source_key=source_key,
dest_key=dest_key,
run_name=run_name,
store_backend=store_backend,
)
else:
store_backend.move(source_key, dest_key)
self._update_upgrade_log(
store_backend=store_backend,
source_key=source_key,
dest_key=dest_key,
store_name=store_name,
site_name=site_name,
)
except Exception as e:
exception_traceback = traceback.format_exc()
exception_message = (
f'{type(e).__name__}: "{str(e)}". '
f'Traceback: "{exception_traceback}".'
)
self._update_upgrade_log(
store_backend=store_backend,
source_key=source_key,
dest_key=dest_key,
store_name=store_name,
site_name=site_name,
exception_message=exception_message,
)
def _update_upgrade_log(
self,
store_backend,
source_key=None,
dest_key=None,
store_name=None,
site_name=None,
exception_message=None,
):
assert store_name or site_name, "Must pass either store_name or site_name."
assert not (
store_name and site_name
), "Must pass either store_name or site_name, not both."
try:
src_url = store_backend.get_url_for_key(source_key) if source_key else "N/A"
except Exception:
src_url = f"Unable to generate URL for key: {source_key}"
try:
dest_url = store_backend.get_url_for_key(dest_key) if dest_key else "N/A"
except Exception:
dest_url = f"Unable to generate URL for key: {dest_key}"
if not exception_message:
log_dict = {"src": src_url, "dest": dest_url}
else:
key_name = "validation_store_name" if store_name else "site_name"
log_dict = {
key_name: store_name if store_name else site_name,
"src": src_url,
"dest": dest_url,
"exception_message": exception_message,
}
self.upgrade_log["exceptions"].append(log_dict)
if store_name:
if exception_message:
self.upgrade_log["upgraded_validations_stores"][store_name][
"exceptions"
] = True
else:
self.upgrade_log["upgraded_validations_stores"][store_name][
"validations_updated"
].append(log_dict)
else:
if exception_message:
self.upgrade_log["upgraded_docs_site_validations_stores"][site_name][
"exceptions"
] = True
else:
self.upgrade_log["upgraded_docs_site_validations_stores"][site_name][
"validation_result_pages_updated"
].append(log_dict)
def _update_validation_result_json(
self, source_key, dest_key, run_name, store_backend
):
new_run_id_dict = {
"run_name": run_name,
"run_time": self.validation_run_times[run_name],
}
validation_json_dict = json.loads(store_backend.get(source_key))
validation_json_dict["meta"]["run_id"] = new_run_id_dict
store_backend.set(dest_key, json.dumps(validation_json_dict))
store_backend.remove_key(source_key)
def _get_tuple_filesystem_store_backend_run_time(self, source_key, store_backend):
run_name = source_key[-2]
try:
self.validation_run_times[run_name] = parse(run_name).strftime(
"%Y%m%dT%H%M%S.%fZ"
)
except (ValueError, TypeError):
source_path = os.path.join(
store_backend.full_base_directory,
store_backend._convert_key_to_filepath(source_key),
)
path_mod_timestamp = os.path.getmtime(source_path)
path_mod_iso_str = datetime.datetime.fromtimestamp(
path_mod_timestamp
).strftime("%Y%m%dT%H%M%S.%fZ")
self.validation_run_times[run_name] = path_mod_iso_str
def _get_tuple_s3_store_backend_run_time(self, source_key, store_backend):
import boto3
s3 = boto3.resource("s3")
run_name = source_key[-2]
try:
self.validation_run_times[run_name] = parse(run_name).strftime(
"%Y%m%dT%H%M%S.%fZ"
)
except (ValueError, TypeError):
source_path = store_backend._convert_key_to_filepath(source_key)
if not source_path.startswith(store_backend.prefix):
source_path = os.path.join(store_backend.prefix, source_path)
source_object = s3.Object(store_backend.bucket, source_path)
source_object_last_mod = source_object.last_modified.strftime(
"%Y%m%dT%H%M%S.%fZ"
)
self.validation_run_times[run_name] = source_object_last_mod
def _get_tuple_gcs_store_backend_run_time(self, source_key, store_backend):
from google.cloud import storage
gcs = storage.Client(project=store_backend.project)
bucket = gcs.get_bucket(store_backend.bucket)
run_name = source_key[-2]
try:
self.validation_run_times[run_name] = parse(run_name).strftime(
"%Y%m%dT%H%M%S.%fZ"
)
except (ValueError, TypeError):
source_path = store_backend._convert_key_to_filepath(source_key)
if not source_path.startswith(store_backend.prefix):
source_path = os.path.join(store_backend.prefix, source_path)
source_blob_created_time = bucket.get_blob(
source_path
).time_created.strftime("%Y%m%dT%H%M%S.%fZ")
self.validation_run_times[run_name] = source_blob_created_time
def _get_skipped_store_and_site_names(self):
validations_stores_with_database_backends = [
store_dict.get("store_name")
for store_dict in self.upgrade_log["skipped_validations_stores"][
"database_store_backends"
]
]
metrics_stores_with_database_backends = [
store_dict.get("store_name")
for store_dict in self.upgrade_log["skipped_metrics_stores"][
"database_store_backends"
]
]
unsupported_validations_stores = [
store_dict.get("store_name")
for store_dict in self.upgrade_log["skipped_validations_stores"][
"unsupported"
]
]
unsupported_metrics_stores = [
store_dict.get("store_name")
for store_dict in self.upgrade_log["skipped_metrics_stores"]["unsupported"]
]
stores_with_database_backends = (
validations_stores_with_database_backends
+ metrics_stores_with_database_backends
)
stores_with_unsupported_backends = (
unsupported_validations_stores + unsupported_metrics_stores
)
doc_sites_with_unsupported_backends = [
doc_site_dict.get("site_name")
for doc_site_dict in self.upgrade_log["skipped_docs_validations_stores"][
"unsupported"
]
]
return (
stores_with_database_backends,
stores_with_unsupported_backends,
doc_sites_with_unsupported_backends,
)
def get_upgrade_overview(self):
(
skip_with_database_backends,
skip_with_unsupported_backends,
skip_doc_sites_with_unsupported_backends,
) = self._get_skipped_store_and_site_names()
validations_store_name_checklist = [
store_name
for store_name in self.upgrade_checklist[
"validations_store_backends"
].keys()
]
site_name_checklist = [
site_name
for site_name in self.upgrade_checklist[
"docs_validations_store_backends"
].keys()
]
upgrade_overview = """\
<cyan>\
++=====================================================++
|| UpgradeHelperV11: Upgrade Overview (V2-API Version) ||
++=====================================================++\
</cyan>
**WARNING**
You have run the 'great_expectations project upgrade' command using the --v2-api flag, which is not able to perform the full upgrade to the configuration (3.0) that is fully compatible with the V3-API
Please re-run the 'great_expectations project upgrade' command without the --v2-api flag.
UpgradeHelperV11 will upgrade your project to be compatible with Great Expectations 0.11.x.
"""
if not any(
[
validations_store_name_checklist,
site_name_checklist,
skip_with_database_backends,
skip_with_unsupported_backends,
skip_doc_sites_with_unsupported_backends,
]
):
upgrade_overview += """
<green>\
Good news! No special upgrade steps are required to bring your project up to date.
The Upgrade Helper will simply increment the config_version of your great_expectations.yml for you.
</green>
Would you like to proceed?
"""
else:
upgrade_overview += """
<red>**WARNING**: Before proceeding, please make sure you have appropriate backups of your project.</red>
"""
if validations_store_name_checklist or site_name_checklist:
upgrade_overview += """
<cyan>\
Automated Steps
================
</cyan>
The following Stores and/or Data Docs sites will be upgraded:
"""
upgrade_overview += (
f"""\
- Validation Stores: {", ".join(validations_store_name_checklist)}
"""
if validations_store_name_checklist
else ""
)
upgrade_overview += (
f"""\
- Data Docs Sites: {", ".join(site_name_checklist)}
"""
if site_name_checklist
else ""
)
if any(
[
skip_with_database_backends,
skip_with_unsupported_backends,
skip_doc_sites_with_unsupported_backends,
]
):
upgrade_overview += """
<cyan>\
Manual Steps
=============
</cyan>
The following Stores and/or Data Docs sites must be upgraded manually, due to having a database backend, or backend
type that is unsupported or unrecognized:
"""
upgrade_overview += (
f"""\
- Stores with database backends: {", ".join(skip_with_database_backends)}
"""
if skip_with_database_backends
else ""
)
upgrade_overview += (
f"""\
- Stores with unsupported/unrecognized backends: {", ".join(skip_with_unsupported_backends)}
"""
if skip_with_unsupported_backends
else ""
)
upgrade_overview += (
f"""\
- Data Docs sites with unsupported/unrecognized backends: {", ".join(skip_doc_sites_with_unsupported_backends)}
"""
if skip_doc_sites_with_unsupported_backends
else ""
)
else:
upgrade_overview += """
<cyan>\
Manual Steps
=============
</cyan>
No manual upgrade steps are required.
"""
upgrade_overview += """
<cyan>\
Upgrade Confirmation
=====================
</cyan>
Please consult the 0.11.x migration guide for instructions on how to complete any required manual steps or
to learn more about the automated upgrade process:
<cyan>https://docs.greatexpectations.io/en/latest/how_to_guides/migrating_versions.html#id1</cyan>
Would you like to proceed with the project upgrade?\
"""
return upgrade_overview, True
def _save_upgrade_log(self):
current_time = datetime.datetime.now(datetime.timezone.utc).strftime(
"%Y%m%dT%H%M%S.%fZ"
)
dest_path = os.path.join(
self.data_context._context_root_directory,
"uncommitted",
"logs",
"project_upgrades",
f"UpgradeHelperV11_{current_time}.json",
)
dest_dir, dest_filename = os.path.split(dest_path)
os.makedirs(dest_dir, exist_ok=True)
with open(dest_path, "w") as outfile:
json.dump(self.upgrade_log, outfile, indent=2)
return dest_path
def _generate_upgrade_report(self):
upgrade_log_path = self._save_upgrade_log()
skipped_stores_or_sites = any(self._get_skipped_store_and_site_names())
exception_occurred = False
exceptions = self.upgrade_log.get("exceptions")
if skipped_stores_or_sites or exceptions:
increment_version = False
else:
increment_version = True
upgrade_report = """\
<cyan>\
++================++
|| Upgrade Report ||
++================++\
</cyan>
"""
if increment_version:
upgrade_report += f"""
<green>\
Your project was successfully upgraded to be compatible with Great Expectations 0.11.x.
The config_version of your great_expectations.yml has been automatically incremented to 2.0.
A log detailing the upgrade can be found here:
- {upgrade_log_path}\
</green>\
"""
else:
if exceptions:
exception_occurred = True
upgrade_report += f"""
<red>\
The Upgrade Helper encountered some exceptions during the upgrade process.
Please review the exceptions section of the upgrade log and migrate the affected files manually,
as detailed in the 0.11.x migration guide.
The upgrade log can be found here:
- {upgrade_log_path}\
</red>\
"""
else:
upgrade_report += f"""
<yellow>\
The Upgrade Helper has completed the automated upgrade steps.
A log detailing the upgrade can be found here:
- {upgrade_log_path}\
</yellow>\
"""
return upgrade_report, increment_version, exception_occurred
def upgrade_project(self):
try:
for (store_name, store_backend) in self.upgrade_checklist[
"validations_store_backends"
].items():
self.upgrade_log["upgraded_validations_stores"][store_name] = {
"validations_updated": [],
"exceptions": False,
}
self._upgrade_store_backend(store_backend, store_name=store_name)
except Exception:
pass
try:
for (site_name, store_backend) in self.upgrade_checklist[
"docs_validations_store_backends"
].items():
self.upgrade_log["upgraded_docs_site_validations_stores"][site_name] = {
"validation_result_pages_updated": [],
"exceptions": False,
}
self._upgrade_store_backend(store_backend, site_name=site_name)
except Exception:
pass
# return a report of what happened, boolean indicating whether version should be incremented
# if the version should not be incremented, the report should include instructions for steps to
# be performed manually
(
upgrade_report,
increment_version,
exception_occurred,
) = self._generate_upgrade_report()
return upgrade_report, increment_version, exception_occurred
| 36.389855
| 200
| 0.579513
|
7ede75d17357c9aa9dd9ac2031c273b2930cd22e
| 4,875
|
py
|
Python
|
src/icolos/core/workflow_steps/pmx/prepare_transitions.py
|
CMargreitter/Icolos
|
fd7b664ce177df875fefa910dc4d5c574b521cb3
|
[
"Apache-2.0"
] | 11
|
2022-01-30T14:36:13.000Z
|
2022-03-22T09:40:57.000Z
|
src/icolos/core/workflow_steps/pmx/prepare_transitions.py
|
CMargreitter/Icolos
|
fd7b664ce177df875fefa910dc4d5c574b521cb3
|
[
"Apache-2.0"
] | 2
|
2022-03-23T07:56:49.000Z
|
2022-03-24T12:01:42.000Z
|
src/icolos/core/workflow_steps/pmx/prepare_transitions.py
|
CMargreitter/Icolos
|
fd7b664ce177df875fefa910dc4d5c574b521cb3
|
[
"Apache-2.0"
] | 8
|
2022-01-28T10:32:31.000Z
|
2022-03-22T09:40:59.000Z
|
from typing import Dict, List
from icolos.core.containers.perturbation_map import Edge
from icolos.core.workflow_steps.pmx.base import StepPMXBase
from icolos.core.workflow_steps.step import _LE
from pydantic import BaseModel
from icolos.utils.enums.program_parameters import (
GromacsEnum,
StepPMXEnum,
)
from icolos.utils.execute_external.pmx import PMXExecutor
from icolos.utils.general.parallelization import SubtaskContainer
import os
_PSE = StepPMXEnum()
_GE = GromacsEnum()
class StepPMXPrepareTransitions(StepPMXBase, BaseModel):
"""
Prepare transitions: extract snapshots from equilibrium simulations, prepare .tpr files for each
"""
def __init__(self, **data):
super().__init__(**data)
self._initialize_backend(executor=PMXExecutor)
def execute(self):
edges = [e.get_edge_id() for e in self.get_edges()]
self.execution.parallelization.max_length_sublists = 1
self._subtask_container = SubtaskContainer(
max_tries=self.execution.failure_policy.n_tries
)
self._subtask_container.load_data(edges)
self._execute_pmx_step_parallel(
run_func=self.prepare_transitions,
step_id="pmx prepare_transitions",
result_checker=self._check_result,
)
def _extract_snapshots(self, eqpath, tipath):
tpr = "{0}/tpr.tpr".format(eqpath)
trr = "{0}/traj.trr".format(eqpath)
frame = "{0}/frame.gro".format(tipath)
trjconv_args = {
"-s": tpr,
"-f": trr,
"-o": frame,
"-sep": "",
"-ur": "compact",
"-pbc": "mol",
"-b": 2000,
}
trjconv_args = self.get_arguments(trjconv_args)
self._gromacs_executor.execute(
_GE.TRJCONV, arguments=trjconv_args, pipe_input="echo System"
)
# move frame0.gro to frame80.gro
cmd = "mv {0}/frame0.gro {0}/frame80.gro".format(tipath)
os.system(cmd)
self._clean_backup_files(tipath)
def _prepare_system(self, edge: str, state: str, wp: str, r: int, toppath: str):
eqpath = self._get_specific_path(
workPath=self.work_dir,
edge=edge,
wp=wp,
state=state,
r=r,
sim="eq",
)
tipath = self._get_specific_path(
workPath=self.work_dir,
edge=edge,
wp=wp,
state=state,
r=r,
sim="transitions",
)
self._extract_snapshots(eqpath, tipath)
result = self._prepare_single_tpr(
simpath=tipath,
toppath=toppath,
state=state,
sim_type="transitions",
framestart=1,
framestop=81,
)
if result.returncode != 0:
self._logger.log(f"WARNING, grompp has failed in {tipath}", _LE.WARNING)
for line in result.stderr.split("\n"):
self._logger.log(line, _LE.DEBUG)
self._clean_backup_files(tipath)
def prepare_transitions(self, jobs: List[str]):
for edge in jobs:
ligTopPath = self._get_specific_path(
workPath=self.work_dir, edge=edge, wp="ligand"
)
protTopPath = self._get_specific_path(
workPath=self.work_dir, edge=edge, wp="complex"
)
for state in self.states:
for r in range(1, self.get_perturbation_map().replicas + 1):
self._logger.log(
f"Preparing transitions: {edge}, {state}, run {r}", _LE.DEBUG
)
self._prepare_system(
edge=edge, state=state, wp="ligand", r=r, toppath=ligTopPath
)
self._prepare_system(
edge=edge, state=state, wp="complex", r=r, toppath=protTopPath
)
def _check_result(self, batch: List[List[str]]) -> List[List[bool]]:
"""
Look in each hybridStrTop dir and check the output pdb files exist for the edges
"""
output_files = [
f"ligand/stateA/run1/transitions/ti80.tpr",
f"ligand/stateB/run1/transitions/ti80.tpr",
f"complex/stateA/run1/transitions/ti80.tpr",
f"complex/stateB/run1/transitions/ti80.tpr",
]
results = []
for subjob in batch:
subjob_results = []
for job in subjob:
subjob_results.append(
all(
[
os.path.isfile(os.path.join(self.work_dir, job, f))
for f in output_files
]
)
)
results.append(subjob_results)
return results
| 33.62069
| 100
| 0.559179
|
1b0584dc3b96d7da3fb901263181c084e3d60abf
| 3,235
|
py
|
Python
|
RecoEgamma/ElectronIdentification/python/Identification/mvaElectronID_tools.py
|
pasmuss/cmssw
|
566f40c323beef46134485a45ea53349f59ae534
|
[
"Apache-2.0"
] | null | null | null |
RecoEgamma/ElectronIdentification/python/Identification/mvaElectronID_tools.py
|
pasmuss/cmssw
|
566f40c323beef46134485a45ea53349f59ae534
|
[
"Apache-2.0"
] | null | null | null |
RecoEgamma/ElectronIdentification/python/Identification/mvaElectronID_tools.py
|
pasmuss/cmssw
|
566f40c323beef46134485a45ea53349f59ae534
|
[
"Apache-2.0"
] | null | null | null |
import FWCore.ParameterSet.Config as cms
# =======================================================
# Define simple containers for MVA cut values and related
# =======================================================
class EleMVA_3Categories_WP:
"""
This is a container class to hold MVA cut values for a 3-category MVA
as well as the names of the value maps that contain the MVA values computed
for all particles in a producer upstream.
"""
def __init__(self,
idName,
mvaValueMapName,
mvaCategoriesMapName,
cutCategory0,
cutCategory1,
cutCategory2
):
self.idName = idName
self.mvaValueMapName = mvaValueMapName
self.mvaCategoriesMapName = mvaCategoriesMapName
self.cutCategory0 = cutCategory0
self.cutCategory1 = cutCategory1
self.cutCategory2 = cutCategory2
def getCutValues(self):
return [self.cutCategory0, self.cutCategory1, self.cutCategory2]
class EleMVA_6Categories_WP:
"""
This is a container class to hold MVA cut values for a 6-category MVA
as well as the names of the value maps that contain the MVA values computed
for all particles in a producer upstream.
"""
def __init__(self,
idName,
mvaValueMapName,
mvaCategoriesMapName,
cutCategory0,
cutCategory1,
cutCategory2,
cutCategory3,
cutCategory4,
cutCategory5
):
self.idName = idName
self.mvaValueMapName = mvaValueMapName
self.mvaCategoriesMapName = mvaCategoriesMapName
self.cutCategory0 = cutCategory0
self.cutCategory1 = cutCategory1
self.cutCategory2 = cutCategory2
self.cutCategory3 = cutCategory3
self.cutCategory4 = cutCategory4
self.cutCategory5 = cutCategory5
def getCutValues(self):
return [self.cutCategory0, self.cutCategory1, self.cutCategory2,
self.cutCategory3, self.cutCategory4, self.cutCategory5]
# ==============================================================
# Define the complete MVA cut sets
# ==============================================================
def configureVIDMVAEleID_V1( mvaWP ):
"""
This function configures the full cms.PSet for a VID ID and returns it.
The inputs: an object of the class EleMVA_6Categories_WP or similar
that contains all necessary parameters for this MVA.
"""
parameterSet = cms.PSet(
#
idName = cms.string( mvaWP.idName ),
cutFlow = cms.VPSet(
cms.PSet( cutName = cms.string("GsfEleMVACut"),
mvaCuts = cms.vdouble( mvaWP.getCutValues() ),
mvaValueMapName = cms.InputTag( mvaWP.mvaValueMapName ),
mvaCategoriesMapName =cms.InputTag( mvaWP.mvaCategoriesMapName ),
needsAdditionalProducts = cms.bool(True),
isIgnored = cms.bool(False)
)
)
)
#
return parameterSet
| 37.183908
| 87
| 0.558887
|
6c3b267c232341826199beb71b2f18ec9bf2880c
| 1,015
|
py
|
Python
|
orchestra/tests/management_commands/test_migrate_certifications.py
|
code-review-doctor/orchestra
|
1ba58d26c508e89d2050dc9b0c8439eb1eadd014
|
[
"Apache-2.0"
] | 444
|
2016-07-25T05:24:30.000Z
|
2022-02-21T08:12:29.000Z
|
orchestra/tests/management_commands/test_migrate_certifications.py
|
code-review-doctor/orchestra
|
1ba58d26c508e89d2050dc9b0c8439eb1eadd014
|
[
"Apache-2.0"
] | 395
|
2016-07-26T20:49:59.000Z
|
2022-03-15T20:58:45.000Z
|
orchestra/tests/management_commands/test_migrate_certifications.py
|
code-review-doctor/orchestra
|
1ba58d26c508e89d2050dc9b0c8439eb1eadd014
|
[
"Apache-2.0"
] | 63
|
2016-07-26T04:10:21.000Z
|
2022-03-29T03:32:19.000Z
|
from unittest.mock import patch
from django.core.management import call_command
from django.core.management.base import CommandError
from orchestra.tests.helpers import OrchestraTestCase
class MigrateCertificationsTestCase(OrchestraTestCase):
patch_path = ('orchestra.management.commands.'
'migrate_certifications.migrate_certifications')
@patch(patch_path)
def test_options(self, mock_migrate):
# Test no options
with self.assertRaises(CommandError):
call_command('migrate_certifications')
mock_migrate.assert_not_called()
# Test
call_command('migrate_certifications',
'test_source_workflow_slug',
'ntest_destination_workflow_slug',
certifications=['test_cert_1', 'test_cert_2'])
mock_migrate.called_once_with(
'test_source_workflow_slug',
'test_destination_workflow_slug',
['test_cert_1', 'test_cert_2']
)
| 33.833333
| 67
| 0.673892
|
1432e63b63ec3e9a968d3d95ca09f3a688bcbfeb
| 5,399
|
py
|
Python
|
optuna/visualization/matplotlib/_edf.py
|
agarwalrounak/optuna
|
b5fd0439dc33c94c06251974b8cb023a3f9bccc7
|
[
"MIT"
] | 2
|
2021-02-28T10:35:23.000Z
|
2021-06-16T10:01:44.000Z
|
optuna/visualization/matplotlib/_edf.py
|
agarwalrounak/optuna
|
b5fd0439dc33c94c06251974b8cb023a3f9bccc7
|
[
"MIT"
] | 12
|
2021-03-28T06:50:16.000Z
|
2022-03-07T15:07:23.000Z
|
optuna/visualization/matplotlib/_edf.py
|
agarwalrounak/optuna
|
b5fd0439dc33c94c06251974b8cb023a3f9bccc7
|
[
"MIT"
] | 1
|
2021-03-17T04:28:42.000Z
|
2021-03-17T04:28:42.000Z
|
import itertools
from typing import Callable
from typing import cast
from typing import List
from typing import Optional
from typing import Sequence
from typing import Union
import numpy as np
from optuna._experimental import experimental
from optuna.logging import get_logger
from optuna.study import Study
from optuna.trial import FrozenTrial
from optuna.trial import TrialState
from optuna.visualization._utils import _check_plot_args
from optuna.visualization.matplotlib._matplotlib_imports import _imports
if _imports.is_successful():
from optuna.visualization.matplotlib._matplotlib_imports import Axes
from optuna.visualization.matplotlib._matplotlib_imports import plt
_logger = get_logger(__name__)
@experimental("2.2.0")
def plot_edf(
study: Union[Study, Sequence[Study]],
*,
target: Optional[Callable[[FrozenTrial], float]] = None,
target_name: str = "Objective Value",
) -> "Axes":
"""Plot the objective value EDF (empirical distribution function) of a study with Matplotlib.
.. seealso::
Please refer to :func:`optuna.visualization.plot_edf` for an example,
where this function can be replaced with it.
Example:
The following code snippet shows how to plot EDF.
.. plot::
import math
import optuna
def ackley(x, y):
a = 20 * math.exp(-0.2 * math.sqrt(0.5 * (x ** 2 + y ** 2)))
b = math.exp(0.5 * (math.cos(2 * math.pi * x) + math.cos(2 * math.pi * y)))
return -a - b + math.e + 20
def objective(trial, low, high):
x = trial.suggest_float("x", low, high)
y = trial.suggest_float("y", low, high)
return ackley(x, y)
sampler = optuna.samplers.RandomSampler(seed=10)
# Widest search space.
study0 = optuna.create_study(study_name="x=[0,5), y=[0,5)", sampler=sampler)
study0.optimize(lambda t: objective(t, 0, 5), n_trials=500)
# Narrower search space.
study1 = optuna.create_study(study_name="x=[0,4), y=[0,4)", sampler=sampler)
study1.optimize(lambda t: objective(t, 0, 4), n_trials=500)
# Narrowest search space but it doesn't include the global optimum point.
study2 = optuna.create_study(study_name="x=[1,3), y=[1,3)", sampler=sampler)
study2.optimize(lambda t: objective(t, 1, 3), n_trials=500)
optuna.visualization.matplotlib.plot_edf([study0, study1, study2])
Args:
study:
A target :class:`~optuna.study.Study` object.
You can pass multiple studies if you want to compare those EDFs.
target:
A function to specify the value to display. If it is :obj:`None` and ``study`` is being
used for single-objective optimization, the objective values are plotted.
.. note::
Specify this argument if ``study`` is being used for multi-objective optimization.
target_name:
Target's name to display on the axis label.
Returns:
A :class:`matplotlib.axes.Axes` object.
Raises:
:exc:`ValueError`:
If ``target`` is :obj:`None` and ``study`` is being used for multi-objective
optimization.
"""
_imports.check()
if isinstance(study, Study):
studies = [study]
else:
studies = list(study)
_check_plot_args(studies, target, target_name)
return _get_edf_plot(studies, target, target_name)
def _get_edf_plot(
studies: List[Study],
target: Optional[Callable[[FrozenTrial], float]] = None,
target_name: str = "Objective Value",
) -> "Axes":
# Set up the graph style.
plt.style.use("ggplot") # Use ggplot style sheet for similar outputs to plotly.
_, ax = plt.subplots()
ax.set_title("Empirical Distribution Function Plot")
ax.set_xlabel(target_name)
ax.set_ylabel("Cumulative Probability")
ax.set_ylim(0, 1)
cmap = plt.get_cmap("tab20") # Use tab20 colormap for multiple line plots.
# Prepare data for plotting.
if len(studies) == 0:
_logger.warning("There are no studies.")
return ax
all_trials = list(
itertools.chain.from_iterable(
(
trial
for trial in study.get_trials(deepcopy=False)
if trial.state == TrialState.COMPLETE
)
for study in studies
)
)
if len(all_trials) == 0:
_logger.warning("There are no complete trials.")
return ax
if target is None:
def _target(t: FrozenTrial) -> float:
return cast(float, t.value)
target = _target
min_x_value = min(target(trial) for trial in all_trials)
max_x_value = max(target(trial) for trial in all_trials)
x_values = np.linspace(min_x_value, max_x_value, 100)
# Draw multiple line plots.
for i, study in enumerate(studies):
values = np.asarray(
[
target(trial)
for trial in study.get_trials(deepcopy=False)
if trial.state == TrialState.COMPLETE
]
)
y_values = np.sum(values[:, np.newaxis] <= x_values, axis=0) / values.size
ax.plot(x_values, y_values, color=cmap(i), alpha=0.7, label=study.study_name)
return ax
| 31.208092
| 99
| 0.624375
|
d01b8bb42fce3d3eab61d1b147af6506e08050f6
| 157
|
py
|
Python
|
Models/admin.py
|
EmmanuelMuriungi/facepass
|
c16f5af4e8db59638666d708cba2042364be1f54
|
[
"MIT"
] | 3
|
2020-09-24T22:57:13.000Z
|
2022-02-01T17:17:26.000Z
|
Models/admin.py
|
EmmanuelMuriungi/facepass
|
c16f5af4e8db59638666d708cba2042364be1f54
|
[
"MIT"
] | null | null | null |
Models/admin.py
|
EmmanuelMuriungi/facepass
|
c16f5af4e8db59638666d708cba2042364be1f54
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import Records
admin.site.register(Records)
| 17.444444
| 39
| 0.770701
|
0756b5b3a8534b7497f56a0847f09744312b0d2b
| 468
|
py
|
Python
|
stubs/micropython-v1_18-esp8266/random.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
stubs/micropython-v1_18-esp8266/random.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
stubs/micropython-v1_18-esp8266/random.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
"""
Module: 'random' on micropython-v1.18-esp8266
"""
# MCU: {'ver': 'v1.18', 'port': 'esp8266', 'arch': 'xtensa', 'sysname': 'esp8266', 'release': '1.18', 'name': 'micropython', 'mpy': 9733, 'version': '1.18', 'machine': 'ESP module with ESP8266', 'build': '', 'nodename': 'esp8266', 'platform': 'esp8266', 'family': 'micropython'}
# Stubber: 1.5.4
from typing import Any
def getrandbits(*args, **kwargs) -> Any:
...
def seed(*args, **kwargs) -> Any:
...
| 31.2
| 278
| 0.594017
|
a89f09cbf4e674bcad6359607f729fac91014bae
| 3,471
|
py
|
Python
|
tests/contracts/KT1G393LjojNshvMdf68XQD24Hwjn7xarzNe/test_micheline_coding_KT1G39.py
|
juztin/pytezos-1
|
7e608ff599d934bdcf129e47db43dbdb8fef9027
|
[
"MIT"
] | 1
|
2021-05-20T16:52:08.000Z
|
2021-05-20T16:52:08.000Z
|
tests/contracts/KT1G393LjojNshvMdf68XQD24Hwjn7xarzNe/test_micheline_coding_KT1G39.py
|
juztin/pytezos-1
|
7e608ff599d934bdcf129e47db43dbdb8fef9027
|
[
"MIT"
] | 1
|
2020-12-30T16:44:56.000Z
|
2020-12-30T16:44:56.000Z
|
tests/contracts/KT1G393LjojNshvMdf68XQD24Hwjn7xarzNe/test_micheline_coding_KT1G39.py
|
juztin/pytezos-1
|
7e608ff599d934bdcf129e47db43dbdb8fef9027
|
[
"MIT"
] | 1
|
2022-03-20T19:01:00.000Z
|
2022-03-20T19:01:00.000Z
|
from unittest import TestCase
from tests import get_data
from pytezos.michelson.converter import build_schema, decode_micheline, encode_micheline
class MichelineCodingTestKT1G39(TestCase):
@classmethod
def setUpClass(cls):
cls.maxDiff = None
cls.code = get_data(
path='contracts/KT1G393LjojNshvMdf68XQD24Hwjn7xarzNe/code_KT1G39.json')
cls.schema = dict(
parameter=build_schema(cls.code[0]),
storage=build_schema(cls.code[1])
)
def test_micheline_inverse_storage_KT1G39(self):
expected = get_data(
path='contracts/KT1G393LjojNshvMdf68XQD24Hwjn7xarzNe/storage_KT1G39.json')
decoded = decode_micheline(expected, self.code[1], self.schema['storage'])
actual = encode_micheline(decoded, self.schema['storage'])
self.assertEqual(expected, actual)
def test_micheline_inverse_parameter_ong4Gv(self):
expected = get_data(
path='contracts/KT1G393LjojNshvMdf68XQD24Hwjn7xarzNe/parameter_ong4Gv.json')
decoded = decode_micheline(expected, self.code[0], self.schema['parameter'])
actual = encode_micheline(decoded, self.schema['parameter'])
self.assertEqual(expected, actual)
def test_micheline_inverse_parameter_ooqEHd(self):
expected = get_data(
path='contracts/KT1G393LjojNshvMdf68XQD24Hwjn7xarzNe/parameter_ooqEHd.json')
decoded = decode_micheline(expected, self.code[0], self.schema['parameter'])
actual = encode_micheline(decoded, self.schema['parameter'])
self.assertEqual(expected, actual)
def test_micheline_inverse_parameter_onynir(self):
expected = get_data(
path='contracts/KT1G393LjojNshvMdf68XQD24Hwjn7xarzNe/parameter_onynir.json')
decoded = decode_micheline(expected, self.code[0], self.schema['parameter'])
actual = encode_micheline(decoded, self.schema['parameter'])
self.assertEqual(expected, actual)
def test_micheline_inverse_parameter_onn4pk(self):
expected = get_data(
path='contracts/KT1G393LjojNshvMdf68XQD24Hwjn7xarzNe/parameter_onn4pk.json')
decoded = decode_micheline(expected, self.code[0], self.schema['parameter'])
actual = encode_micheline(decoded, self.schema['parameter'])
self.assertEqual(expected, actual)
def test_micheline_inverse_parameter_ooYJ85(self):
expected = get_data(
path='contracts/KT1G393LjojNshvMdf68XQD24Hwjn7xarzNe/parameter_ooYJ85.json')
decoded = decode_micheline(expected, self.code[0], self.schema['parameter'])
actual = encode_micheline(decoded, self.schema['parameter'])
self.assertEqual(expected, actual)
def test_micheline_inverse_parameter_ooDRnz(self):
expected = get_data(
path='contracts/KT1G393LjojNshvMdf68XQD24Hwjn7xarzNe/parameter_ooDRnz.json')
decoded = decode_micheline(expected, self.code[0], self.schema['parameter'])
actual = encode_micheline(decoded, self.schema['parameter'])
self.assertEqual(expected, actual)
def test_micheline_inverse_parameter_oophVz(self):
expected = get_data(
path='contracts/KT1G393LjojNshvMdf68XQD24Hwjn7xarzNe/parameter_oophVz.json')
decoded = decode_micheline(expected, self.code[0], self.schema['parameter'])
actual = encode_micheline(decoded, self.schema['parameter'])
self.assertEqual(expected, actual)
| 46.905405
| 88
| 0.715356
|
0462fc89680e09f3706b02a0c69d253caf861e3b
| 3,418
|
py
|
Python
|
DQM/BeamMonitor/python/BeamMonitor_Cosmics_cff.py
|
Purva-Chaudhari/cmssw
|
32e5cbfe54c4d809d60022586cf200b7c3020bcf
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
DQM/BeamMonitor/python/BeamMonitor_Cosmics_cff.py
|
Purva-Chaudhari/cmssw
|
32e5cbfe54c4d809d60022586cf200b7c3020bcf
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
DQM/BeamMonitor/python/BeamMonitor_Cosmics_cff.py
|
Purva-Chaudhari/cmssw
|
32e5cbfe54c4d809d60022586cf200b7c3020bcf
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
from DQMServices.Core.DQMEDAnalyzer import DQMEDAnalyzer
dqmBeamMonitor = DQMEDAnalyzer("BeamMonitor",
monitorName = cms.untracked.string('BeamMonitor'),
beamSpot = cms.untracked.InputTag('offlineBeamSpot'), ## hltOfflineBeamSpot for HLTMON
primaryVertex = cms.untracked.InputTag('offlinePrimaryVertices'),
timeInterval = cms.untracked.int32(920),
fitEveryNLumi = cms.untracked.int32(2),
resetEveryNLumi = cms.untracked.int32(20),
resetPVEveryNLumi = cms.untracked.int32(2),
Debug = cms.untracked.bool(False),
recordName = cms.untracked.string('BeamSpotOnlineHLTObjectsRcd'),
useLockRecords = cms.untracked.bool(False),
BeamFitter = cms.PSet(
Debug = cms.untracked.bool(False),
TrackCollection = cms.untracked.InputTag('ctfWithMaterialTracksP5'), ## ctfWithMaterialTracksP5 for CRAFT
IsMuonCollection = cms.untracked.bool(False),
WriteAscii = cms.untracked.bool(False),
AsciiFileName = cms.untracked.string('BeamFit.txt'),
SaveNtuple = cms.untracked.bool(False),
OutputFileName = cms.untracked.string('BeamFit.root'),
MinimumPt = cms.untracked.double(1.2),
MaximumEta = cms.untracked.double(2.4),
MaximumImpactParameter = cms.untracked.double(5),
MaximumZ = cms.untracked.double(300),
MinimumTotalLayers = cms.untracked.int32(0),
MinimumPixelLayers = cms.untracked.int32(0),
MaximumNormChi2 = cms.untracked.double(100.0),
TrackAlgorithm = cms.untracked.vstring(), ## ctf,rs,cosmics,initialStep,lowPtTripletStep...; for all algos, leave it blank
TrackQuality = cms.untracked.vstring(), ## loose, tight, highPurity...; for all qualities, leave it blank
InputBeamWidth = cms.untracked.double(-1.0), ## if -1 use the value calculated by the analyzer
FractionOfFittedTrks = cms.untracked.double(0.9),
MinimumInputTracks = cms.untracked.int32(100),
deltaSignificanceCut = cms.untracked.double(20)
),
dxBin = cms.int32(200),
dxMin = cms.double(-1.0),
dxMax = cms.double(1.0),
vxBin = cms.int32(100),
vxMin = cms.double(-.1),
vxMax = cms.double(.1),
dzBin = cms.int32(80),
dzMin = cms.double(-20),
dzMax = cms.double(20),
phiBin = cms.int32(63),
phiMin = cms.double(-3.15),
phiMax = cms.double(3.15)
)
| 64.490566
| 154
| 0.481276
|
94b615a31bff13351602f8c1841670cdf2a95825
| 373
|
py
|
Python
|
2020/day_01/python/day01.py
|
josephroquedev/advent-of-code
|
bb217deb7a5f5ed5c8c04cb726ddadb5b042ee4d
|
[
"MIT"
] | null | null | null |
2020/day_01/python/day01.py
|
josephroquedev/advent-of-code
|
bb217deb7a5f5ed5c8c04cb726ddadb5b042ee4d
|
[
"MIT"
] | 2
|
2021-06-02T00:41:38.000Z
|
2021-11-30T10:05:29.000Z
|
2020/day_01/python/day01.py
|
autoreleasefool/advent-of-code
|
bb217deb7a5f5ed5c8c04cb726ddadb5b042ee4d
|
[
"MIT"
] | null | null | null |
from aoc import AOC
import math
from itertools import chain
aoc = AOC(year=2020, day=1)
data = aoc.load()
# Part 1
expenses = set(data.numbers())
product = math.prod([e for e in expenses if (2020 - e) in expenses])
aoc.p1(product)
# Part 2
product = math.prod(
set(chain(*[[e for e in expenses if (2020 - f - e) in expenses] for f in expenses]))
)
aoc.p2(product)
| 18.65
| 88
| 0.675603
|
7ca23da1f374a3368b02520c263bd6ba1f55b8cb
| 24,745
|
py
|
Python
|
src/pathfinding/algorithms.py
|
ShanaryS/algorithm-visualizer
|
85d37cc2c85e1c9d3b96113f14bcceec1a1f69b9
|
[
"MIT"
] | 1
|
2021-09-09T07:48:51.000Z
|
2021-09-09T07:48:51.000Z
|
src/pathfinding/algorithms.py
|
ShanaryS/algorithm-visualizer
|
85d37cc2c85e1c9d3b96113f14bcceec1a1f69b9
|
[
"MIT"
] | null | null | null |
src/pathfinding/algorithms.py
|
ShanaryS/algorithm-visualizer
|
85d37cc2c85e1c9d3b96113f14bcceec1a1f69b9
|
[
"MIT"
] | null | null | null |
"""Contains pathfinding and maze generation algorithms"""
from src.pathfinding.colors import *
from dataclasses import dataclass
import pygame
from src.pathfinding.graph import draw, draw_vis_text, reset_algo, GraphState, VisText
from src.pathfinding.values import get_random_sample, get_randrange
from queue import PriorityQueue
from src.pathfinding.node import Square
@dataclass
class AlgoState:
"""Stores the state of the algorithms, whether they are finished or not"""
ordinal_node_clicked: list
dijkstra_finished: bool = False
a_star_finished: bool = False
bi_dijkstra_finished: bool = False
maze: bool = False
best_path_sleep: int = 3
highway_multiplier = 3
def dijkstra(gph: GraphState,
algo: AlgoState,
txt: VisText,
start: Square,
end: Square,
ignore_node: Square = None,
draw_best_path: bool = True,
visualize: bool = True) \
-> [dict, bool]:
"""Code for the dijkstra algorithm"""
# Used to determine the order of squares to check. Order of args helper decide the priority.
queue_pos: int = 0
open_set = PriorityQueue()
open_set.put((0, queue_pos, start))
open_set_hash: set = {start}
# Determine what is the best square to check
g_score: dict = {square: float('inf') for row in gph.graph for square in row}
g_score[start] = 0
# Keeps track of next node for every node in graph. A linked list basically.
came_from: dict = {}
# Continues until every node has been checked or best path found
i = 0
while not open_set.empty():
# If uses closes window the program terminates
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
# Gets the square currently being checked
curr_square: Square = open_set.get()[2]
open_set_hash.remove(curr_square)
# Terminates if found the best path
if curr_square == end:
if draw_best_path:
best_path(gph, algo, txt, came_from, end, visualize=visualize)
return True
return came_from
# Decides the order of neighbours to check
for nei in curr_square.neighbours:
temp_g_score: int = g_score[curr_square] + 1
if temp_g_score < g_score[nei]:
came_from[nei] = curr_square
g_score[nei] = temp_g_score
if nei not in open_set_hash:
queue_pos += 1
open_set.put((g_score[nei], queue_pos, nei))
open_set_hash.add(nei)
if nei != end and nei.color != CLOSED_COLOR and nei != ignore_node:
nei.set_open()
# Only visualize if called. Checks if square is closed to not repeat when mid node included.
i += 1
if visualize and not curr_square.is_closed():
if i % gph.speed_multiplier == 0:
i = 0
draw(gph, txt, display_update=False)
draw_vis_text(txt, is_dijkstra=True)
# Sets square to closed after finished checking
if curr_square != start and curr_square != ignore_node:
curr_square.set_closed()
return False
def a_star(gph: GraphState,
algo: AlgoState,
txt: VisText,
start: Square,
end: Square,
ignore_node: Square = None,
draw_best_path: bool = True,
visualize: bool = True) \
-> [dict, bool]:
"""Code for the A* algorithm"""
# Used to determine the order of squares to check. Order of args helper decide the priority.
queue_pos: int = 0
open_set = PriorityQueue()
open_set.put((0, queue_pos, start))
open_set_hash: set = {start}
# Determine what is the best square to check
g_score: dict = {square: float('inf') for row in gph.graph for square in row}
g_score[start] = 0
f_score: dict = {square: float('inf') for row in gph.graph for square in row}
f_score[start] = heuristic(start.get_pos(), end.get_pos())
# Keeps track of next node for every node in graph. A linked list basically.
came_from: dict = {}
# Continues until every node has been checked or best path found
i = 0 # Used to speed up graph if using map
while not open_set.empty():
# If uses closes window the program terminates
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
# Gets the square currently being checked
curr_square: Square = open_set.get()[2]
open_set_hash.remove(curr_square)
# Terminates if found the best path
if curr_square == end:
if draw_best_path:
best_path(gph, algo, txt, came_from, end, visualize=visualize)
return True
return came_from
# Decides the order of neighbours to check
for nei in curr_square.neighbours:
temp_g_score: int = g_score[curr_square] + 1
if temp_g_score < g_score[nei]:
came_from[nei] = curr_square
g_score[nei] = temp_g_score
f_score[nei] = temp_g_score + heuristic(nei.get_pos(), end.get_pos())
if nei not in open_set_hash:
queue_pos += 1
open_set.put((f_score[nei], queue_pos, nei))
open_set_hash.add(nei)
if nei != end and nei.color != CLOSED_COLOR and nei != ignore_node:
nei.set_open()
# Only visualize if called. Checks if square is closed to not repeat when mid node included.
i += 1
if visualize and not curr_square.is_closed():
if i % gph.speed_multiplier == 0:
i = 0
draw(gph, txt, display_update=False)
draw_vis_text(txt, is_a_star=True)
# Sets square to closed after finished checking
if curr_square != start and curr_square != ignore_node:
curr_square.set_closed()
return False
def heuristic(pos1: tuple, pos2: tuple) -> int:
"""Used by A* to prioritize traveling towards next node"""
x1, y1 = pos1
x2, y2 = pos2
return abs(x1 - x2) + abs(y1 - y2)
def bi_dijkstra(gph: GraphState,
algo: AlgoState,
txt: VisText,
start: Square,
end: Square,
alt_color: bool = False,
ignore_node: Square = None,
draw_best_path: bool = True,
visualize: bool = True) \
-> [dict, bool]:
"""Code for Bi-directional Dijkstra algorithm. Custom algorithm made by me."""
# Used to determine the order of squares to check. Order of args helper decide the priority.
queue_pos: int = 0
open_set = PriorityQueue()
open_set_hash: set = {start, end}
open_set.put((0, queue_pos, start, 'start'))
queue_pos += 1
open_set.put((0, queue_pos, end, 'end'))
# Determine what is the best square to check
g_score: dict = {square: float('inf') for row in gph.graph for square in row}
g_score[start] = 0
g_score[end] = 0
# Keeps track of next node for every node in graph. A linked list basically.
came_from_start: dict = {}
came_from_end: dict = {}
# Continues until every node has been checked or best path found
i = 0
while not open_set.empty():
# If uses closes window the program terminates
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
# Gets the square currently being checked
temp: tuple = open_set.get()
curr_square: Square = temp[2]
open_set_hash.remove(curr_square)
# Terminates if found the best path
for nei in curr_square.neighbours:
if curr_square.is_open() and nei.is_open_alt():
if draw_best_path:
best_path_bi_dijkstra(gph, algo, txt, came_from_start, came_from_end,
curr_square, nei, visualize=visualize)
return True
return came_from_start, came_from_end, curr_square, nei
elif curr_square.is_open_alt() and nei.is_open() and not alt_color:
if draw_best_path:
best_path_bi_dijkstra(gph, algo, txt, came_from_start, came_from_end,
nei, curr_square, visualize=visualize)
return True
return came_from_start, came_from_end, nei, curr_square
elif curr_square.is_open_alt() and nei.is_open_alt_():
if draw_best_path:
best_path_bi_dijkstra(gph, algo, txt, came_from_start, came_from_end,
curr_square, nei, visualize=visualize)
return True
return came_from_start, came_from_end, curr_square, nei
elif curr_square.is_open_alt_() and nei.is_open_alt():
if draw_best_path:
best_path_bi_dijkstra(gph, algo, txt, came_from_start, came_from_end,
nei, curr_square, visualize=visualize)
return True
return came_from_start, came_from_end, nei, curr_square
# Decides the order of neighbours to check for both swarms.
temp_g_score: int
if temp[3] == 'start':
for nei in curr_square.neighbours:
temp_g_score = g_score[curr_square] + 1
if temp_g_score < g_score[nei]:
came_from_start[nei] = curr_square
g_score[nei] = temp_g_score
if nei not in open_set_hash:
queue_pos += 1
open_set.put((g_score[nei], queue_pos, nei, 'start'))
open_set_hash.add(nei)
if nei != end and nei.color != CLOSED_COLOR and nei != ignore_node:
if alt_color:
nei.set_open_alt()
else:
nei.set_open()
elif temp[3] == 'end':
for nei in curr_square.neighbours:
temp_g_score = g_score[curr_square] + 1
if temp_g_score < g_score[nei]:
came_from_end[nei] = curr_square
g_score[nei] = temp_g_score
if nei not in open_set_hash:
queue_pos += 1
open_set.put((g_score[nei], queue_pos, nei, 'end'))
open_set_hash.add(nei)
if nei != start and nei.color != CLOSED_COLOR and nei != ignore_node:
if alt_color:
nei.set_open_alt_()
else:
nei.set_open_alt()
# Only visualize if called. Checks if square is closed to not repeat when mid node included.
i += 1
if visualize and not curr_square.is_closed():
if i % gph.speed_multiplier == 0:
i = 0
draw(gph, txt, display_update=False)
draw_vis_text(txt, is_bi_dijkstra=True)
# Sets square to closed after finished checking
if curr_square != start and curr_square != end and curr_square != ignore_node:
curr_square.set_closed()
return False
def best_path_bi_dijkstra(gph: GraphState,
algo: AlgoState,
txt: VisText,
came_from_start: dict,
came_from_end: dict,
first_meet_node: Square,
second_meet_node: Square,
visualize: bool = True) \
-> None:
"""Used by bi_dijkstra to draw best path from in two parts"""
# Fixes bug when can't find a path
if isinstance(came_from_start, bool) or isinstance(came_from_end, bool):
return
# Draws best path for first swarm
best_path(gph, algo, txt, came_from_start, first_meet_node, visualize=visualize)
# To not skip the last two at once, need a draw, draw_vis_text, and sleep here
first_meet_node.set_path()
# To not skip the last two at once, need a draw, draw_vis_text, and sleep here
# Draws best path for second swarm
second_meet_node.set_path()
# To not skip the last two at once, need a draw and draw_vis_text here
best_path(gph, algo, txt, came_from_end, second_meet_node, reverse=True, visualize=visualize)
# To not skip the last two at once, need a draw, draw_vis_text, and sleep here
def best_path(gph: GraphState,
algo: AlgoState,
txt: VisText,
came_from: dict,
curr_square: Square,
reverse: bool = False,
visualize: bool = True) \
-> None:
"""Main algo for reconstructing path"""
# Fixes bug when dragging where came_from would evaluate to bool instead of dict.
if isinstance(came_from, bool):
return
# Puts node path into list so it's easier to traverse in either direction and choose start and end points
path: list = []
while curr_square in came_from:
curr_square = came_from[curr_square]
path.append(curr_square)
# Need to traverse in reverse depending on what part of algo
i = 0
if reverse:
for square in path[:-1]:
square.set_path()
i += 1
if visualize:
if i % gph.speed_multiplier == 0:
i = 0
pygame.time.delay(algo.best_path_sleep)
draw(gph, txt, display_update=False)
draw_vis_text(txt, is_best_path=True)
else:
for square in path[len(path)-2::-1]:
square.set_path()
i += 1
if visualize:
if i % gph.speed_multiplier == 0:
i = 0
pygame.time.delay(algo.best_path_sleep)
draw(gph, txt, display_update=False)
draw_vis_text(txt, is_best_path=True)
def start_mid_end(gph: GraphState,
algo: AlgoState,
txt: VisText,
start: Square,
mid: Square,
end: Square,
is_dijkstra: bool = False,
is_a_star: bool = False,
is_bi_dijkstra: bool = False,
visualize: bool = True) \
-> None:
"""Used if algos need to reach mid node first"""
# Selects the correct algo to use
if is_dijkstra:
if visualize:
start_to_mid = dijkstra(gph, algo, txt, start, mid, ignore_node=end, draw_best_path=False)
mid_to_end = dijkstra(gph, algo, txt, mid, end, ignore_node=start, draw_best_path=False)
else:
start_to_mid = algo_no_vis(gph, algo, txt, start, mid,
is_dijkstra=True, ignore_node=end, draw_best_path=False)
mid_to_end = algo_no_vis(gph, algo, txt, mid, end, is_dijkstra=True, ignore_node=start,
draw_best_path=False, reset=False)
start.set_start(), mid.set_mid(), end.set_end() # Fixes nodes disappearing when dragging
best_path(gph, algo, txt, start_to_mid, mid, visualize=visualize)
best_path(gph, algo, txt, mid_to_end, end, visualize=visualize)
elif is_a_star:
if visualize:
start_to_mid = a_star(gph, algo, txt, start, mid, ignore_node=end, draw_best_path=False)
mid_to_end = a_star(gph, algo, txt, mid, end, ignore_node=start, draw_best_path=False)
else:
start_to_mid = algo_no_vis(gph, algo, txt, start, mid,
is_a_star=True, ignore_node=end, draw_best_path=False)
mid_to_end = algo_no_vis(gph, algo, txt, mid, end, is_a_star=True, ignore_node=start,
draw_best_path=False, reset=False)
start.set_start(), mid.set_mid(), end.set_end() # Fixes nodes disappearing when dragging
best_path(gph, algo, txt, start_to_mid, mid, visualize=visualize)
best_path(gph, algo, txt, mid_to_end, end, visualize=visualize)
elif is_bi_dijkstra:
if visualize:
start_to_mid = bi_dijkstra(gph, algo, txt, start, mid, ignore_node=end, draw_best_path=False)
mid_to_end = bi_dijkstra(gph, algo, txt, mid, end,
alt_color=True, ignore_node=start, draw_best_path=False)
else:
start_to_mid = algo_no_vis(gph, algo, txt, start, mid,
is_bi_dijkstra=True, ignore_node=end, draw_best_path=False)
mid_to_end = algo_no_vis(gph, algo, txt, mid, end, alt_color=True, is_bi_dijkstra=True, ignore_node=start,
draw_best_path=False, reset=False)
start.set_start(), mid.set_mid(), end.set_end() # Fixes nodes disappearing when dragging
# Fixes bug when can't find a path
if not isinstance(start_to_mid, bool):
best_path_bi_dijkstra(gph, algo, txt, start_to_mid[0], start_to_mid[1],
start_to_mid[2], start_to_mid[3], visualize=visualize)
if not isinstance(mid_to_end, bool):
best_path_bi_dijkstra(gph, algo, txt, mid_to_end[0], mid_to_end[1],
mid_to_end[2], mid_to_end[3], visualize=visualize)
def algo_no_vis(gph: GraphState,
algo: AlgoState,
txt: VisText,
start: Square,
end: Square,
is_dijkstra: bool = False,
is_a_star: bool = False,
is_bi_dijkstra: bool = False,
alt_color: bool = False,
ignore_node: Square = None,
draw_best_path: bool = True,
reset: bool = True) \
-> [dict, bool]:
"""Skip steps to end when visualizing algo. Used when dragging ordinal node once finished"""
# Selects the correct algo to use
if is_dijkstra:
if reset: # Used to not reset start -> mid visualizations if going from mid -> end
reset_algo(gph, algo)
algo.dijkstra_finished = True
# Separates calling algo_no_vis with mid node or not
if draw_best_path:
dijkstra(gph, algo, txt, start, end, visualize=False)
start.set_start() # Fixes start disappearing when dragging
else:
return dijkstra(gph, algo, txt, start, end,
ignore_node=ignore_node, draw_best_path=False, visualize=False)
elif is_a_star:
if reset: # Used to not reset start -> mid visualizations if going from mid -> end
reset_algo(gph, algo)
algo.a_star_finished = True
# Separates calling algo_no_vis with mid node or not
if draw_best_path:
a_star(gph, algo, txt, start, end, visualize=False)
start.set_start() # Fixes start disappearing when dragging
else:
return a_star(gph, algo, txt, start, end, ignore_node=ignore_node, draw_best_path=False, visualize=False)
elif is_bi_dijkstra:
if reset: # Used to not reset start -> mid visualizations if going from mid -> end
reset_algo(gph, algo)
algo.bi_dijkstra_finished = True
# Separates calling algo_no_vis with mid node or not
if draw_best_path:
bi_dijkstra(gph, algo, txt, start, end, alt_color=alt_color, visualize=False)
start.set_start() # Fixes start disappearing when dragging
else:
return bi_dijkstra(gph, algo, txt, start, end, alt_color=alt_color, ignore_node=ignore_node,
draw_best_path=False, visualize=False)
def draw_recursive_maze(gph: GraphState,
txt: VisText,
chamber: tuple = None,
visualize: bool = True) \
-> None:
"""Creates maze using recursive division.
Implemented following wikipedia guidelines.
https://en.wikipedia.org/wiki/Maze_generation_algorithm#Recursive_division_method
Inspired by https://github.com/ChrisKneller/pygame-pathfinder
"""
# Sets min size for division
division_limit: int = 3
# Creates chambers to divide into
if chamber is None:
chamber_width: int = len(gph.graph)
chamber_height: int = len(gph.graph[1])
chamber_left: int = 0
chamber_top: int = 0
else:
chamber_width: int = chamber[2]
chamber_height: int = chamber[3]
chamber_left: int = chamber[0]
chamber_top: int = chamber[1]
# Helps with location of chambers
x_divide = int(chamber_width/2)
y_divide = int(chamber_height/2)
# Draws vertical maze line within chamber
if chamber_width >= division_limit:
for y in range(chamber_height):
gph.graph[chamber_left + x_divide][chamber_top + y].set_wall()
gph.wall_nodes.add(gph.graph[chamber_left + x_divide][chamber_top + y])
if visualize:
draw(gph, txt, display_update=False)
draw_vis_text(txt, is_recursive_maze=True)
# Draws horizontal maze line within chamber
if chamber_height >= division_limit:
for x in range(chamber_width):
gph.graph[chamber_left + x][chamber_top + y_divide].set_wall()
gph.wall_nodes.add(gph.graph[chamber_left + x][chamber_top + y_divide])
if visualize:
draw(gph, txt, display_update=False)
draw_vis_text(txt, is_recursive_maze=True)
# Terminates if below division limit
if chamber_width < division_limit and chamber_height < division_limit:
return
# Defining limits on where to draw walls
top_left: tuple = (chamber_left, chamber_top, x_divide, y_divide)
top_right: tuple = (chamber_left + x_divide+1, chamber_top, chamber_width - x_divide-1, y_divide)
bottom_left: tuple = (chamber_left, chamber_top + y_divide+1, x_divide, chamber_height - y_divide-1)
bottom_right: tuple = (chamber_left + x_divide+1, chamber_top + y_divide+1,
chamber_width - x_divide-1, chamber_height - y_divide-1)
# Combines all chambers into one object
chambers: tuple = (top_left, top_right, bottom_left, bottom_right)
# Defines location of the walls
left: tuple = (chamber_left, chamber_top + y_divide, x_divide, 1)
right: tuple = (chamber_left + x_divide+1, chamber_top + y_divide, chamber_width - x_divide-1, 1)
top: tuple = (chamber_left + x_divide, chamber_top, 1, y_divide)
bottom: tuple = (chamber_left + x_divide, chamber_top + y_divide+1, 1, chamber_height - y_divide-1)
# Combines walls into one object
walls: tuple = (left, right, top, bottom)
# Number of gaps to leave in walls after each division into four sub quadrants.
num_gaps: int = 3
# Prevents drawing wall over gaps
gaps_to_offset: list = [x for x in range(num_gaps - 1, gph.rows, num_gaps)]
# Draws the gaps into the walls
for wall in get_random_sample(walls, num_gaps):
if wall[3] == 1:
x = get_randrange(wall[0], wall[0] + wall[2])
y = wall[1]
if x in gaps_to_offset and y in gaps_to_offset:
if wall[2] == x_divide:
x -= 1
else:
x += 1
if x >= gph.rows:
x = gph.rows - 1
else:
x = wall[0]
y = get_randrange(wall[1], wall[1] + wall[3])
if y in gaps_to_offset and x in gaps_to_offset:
if wall[3] == y_divide:
y -= 1
else:
y += 1
if y >= gph.rows:
y = gph.rows - 1
gph.graph[x][y].reset()
gph.wall_nodes.discard(gph.graph[x][y])
if visualize:
draw(gph, txt, display_update=False)
draw_vis_text(txt, is_recursive_maze=True)
# Recursively divides chambers
for chamber in chambers:
if visualize:
draw_recursive_maze(gph, txt, chamber)
else:
draw_recursive_maze(gph, txt, chamber, visualize=False)
| 40.105348
| 118
| 0.582582
|
b4c42761008a079a047925d4f3483fe8419be47c
| 2,346
|
py
|
Python
|
src/thespian/actor.py
|
pbs/thespian
|
c6552ea9e1a0106add82a62c919511d37e03c668
|
[
"Unlicense"
] | 1
|
2017-04-19T12:45:43.000Z
|
2017-04-19T12:45:43.000Z
|
src/thespian/actor.py
|
pbs/thespian
|
c6552ea9e1a0106add82a62c919511d37e03c668
|
[
"Unlicense"
] | null | null | null |
src/thespian/actor.py
|
pbs/thespian
|
c6552ea9e1a0106add82a62c919511d37e03c668
|
[
"Unlicense"
] | null | null | null |
import multiprocessing as mp
import os
import logging
log = logging.getLogger(__name__)
class DeadActorException(Exception):
pass
def die(msg="Giving up!"):
raise RuntimeError(msg)
class Actor(object):
inbox_size = 4
SEND_TIMEOUT = 3600 * 4
def __init__(self, inbox_size=None):
self.inbox_size = inbox_size or self.inbox_size
self._q = mp.Queue(maxsize=self.inbox_size)
self._lock = mp.RLock()
self._p = None
def send(self, args):
log.debug("sending to %r", self)
self._q.put(args, timeout=self.SEND_TIMEOUT)
def recv(self, nowait=False):
log.debug("recv %r", self)
self._lock.acquire(False) or die(
"Failed to acquire lock in recv.")
try:
if nowait and self._q.empty():
log.debug("%r empty queue", self)
return None
msg = self._q.get()
log.debug("%r got %r", self, msg)
return msg
finally:
self._lock.release()
def start(self):
assert self._p is None
self._p = mp.Process(target=self._start)
self._p.daemon = True
self._p.start()
def sub_init(self):
"""Initialize stuff in subprocess, called after fork."""
pass
def _start(self):
self.sub_init()
self._lock.acquire(False) or die(
"failed to acquire lock in _start")
log.debug("start %r", self)
self.process()
def process(self):
raise NotImplementedError()
def check_alive(self):
if not self._p.is_alive():
raise DeadActorException("{}:{} is dead".format(self, self._p))
def __repr__(self):
return "<%s object at %x pid:%d>" % (type(self).__name__, id(self), os.getpid())
class LActor(Actor):
"""a simple looping actor"""
STOP = "stop"
def process(self):
while self.tick() is not self.STOP:
pass
def tick(self):
raise NotImplementedError()
class CPActor(Actor):
"""An Actor for the Current Process. Doesn't spawn a child."""
def start(self):
raise AssertionError("This actor doesn't spawn a process!")
def process(self):
pass
def check_alive(self):
raise AssertionError("current process actor has no subprocess to check")
| 24.4375
| 88
| 0.589514
|
e3d775a4c649b3a9c749a9b170b147186c126747
| 20,031
|
py
|
Python
|
conans/client/generators/cmake_find_package_multi.py
|
czoido/conan
|
81370e515f3ed7ba0486d6ffeb4def8a2765ae14
|
[
"MIT"
] | 1
|
2021-08-05T15:33:08.000Z
|
2021-08-05T15:33:08.000Z
|
conans/client/generators/cmake_find_package_multi.py
|
czoido/conan
|
81370e515f3ed7ba0486d6ffeb4def8a2765ae14
|
[
"MIT"
] | null | null | null |
conans/client/generators/cmake_find_package_multi.py
|
czoido/conan
|
81370e515f3ed7ba0486d6ffeb4def8a2765ae14
|
[
"MIT"
] | null | null | null |
import os
import textwrap
from jinja2 import Template
from conans.client.generators import CMakeFindPackageGenerator
from conans.client.generators.cmake import DepsCppCmake
from conans.client.generators.cmake_find_package_common import (find_transitive_dependencies,
target_template,
CMakeFindPackageCommonMacros)
from conans.client.generators.cmake_multi import extend
from conans.util.files import save
class CMakeFindPackageMultiGenerator(CMakeFindPackageGenerator):
name = "cmake_find_package_multi"
config_template = textwrap.dedent("""
{macros_and_functions}
# Requires CMake > 3.0
if(${{CMAKE_VERSION}} VERSION_LESS "3.0")
message(FATAL_ERROR "The 'cmake_find_package_multi' generator only works with CMake > 3.0")
endif()
include(${{CMAKE_CURRENT_LIST_DIR}}/{filename}Targets.cmake)
{target_props_block}
{find_dependencies_block}
""")
targets_template = textwrap.dedent("""
if(NOT TARGET {name}::{name})
add_library({name}::{name} INTERFACE IMPORTED)
endif()
# Load the debug and release library finders
get_filename_component(_DIR "${{CMAKE_CURRENT_LIST_FILE}}" PATH)
file(GLOB CONFIG_FILES "${{_DIR}}/{filename}Target-*.cmake")
foreach(f ${{CONFIG_FILES}})
include(${{f}})
endforeach()
""")
# This template takes the "name" of the target name::name and configs = ["Release", "Debug"..]
target_properties = Template("""
# Assign target properties
set_property(TARGET {{name}}::{{name}}
PROPERTY INTERFACE_LINK_LIBRARIES
{%- for config in configs %}
$<$<CONFIG:{{config}}>:${{'{'}}{{name}}_LIBRARIES_TARGETS_{{config.upper()}}}
${{'{'}}{{name}}_LINKER_FLAGS_{{config.upper()}}_LIST}>
{%- endfor %})
set_property(TARGET {{name}}::{{name}}
PROPERTY INTERFACE_INCLUDE_DIRECTORIES
{%- for config in configs %}
$<$<CONFIG:{{config}}>:${{'{'}}{{name}}_INCLUDE_DIRS_{{config.upper()}}}>
{%- endfor %})
set_property(TARGET {{name}}::{{name}}
PROPERTY INTERFACE_COMPILE_DEFINITIONS
{%- for config in configs %}
$<$<CONFIG:{{config}}>:${{'{'}}{{name}}_COMPILE_DEFINITIONS_{{config.upper()}}}>
{%- endfor %})
set_property(TARGET {{name}}::{{name}}
PROPERTY INTERFACE_COMPILE_OPTIONS
{%- for config in configs %}
$<$<CONFIG:{{config}}>:${{'{'}}{{name}}_COMPILE_OPTIONS_{{config.upper()}}_LIST}>
{%- endfor %})
""")
# https://gitlab.kitware.com/cmake/cmake/blob/master/Modules/BasicConfigVersion-SameMajorVersion.cmake.in
config_version_template = textwrap.dedent("""
set(PACKAGE_VERSION "{version}")
if(PACKAGE_VERSION VERSION_LESS PACKAGE_FIND_VERSION)
set(PACKAGE_VERSION_COMPATIBLE FALSE)
else()
if("{version}" MATCHES "^([0-9]+)\\\\.")
set(CVF_VERSION_MAJOR "${{CMAKE_MATCH_1}}")
else()
set(CVF_VERSION_MAJOR "{version}")
endif()
if(PACKAGE_FIND_VERSION_MAJOR STREQUAL CVF_VERSION_MAJOR)
set(PACKAGE_VERSION_COMPATIBLE TRUE)
else()
set(PACKAGE_VERSION_COMPATIBLE FALSE)
endif()
if(PACKAGE_FIND_VERSION STREQUAL PACKAGE_VERSION)
set(PACKAGE_VERSION_EXACT TRUE)
endif()
endif()
""")
components_target_build_type_tpl = Template(textwrap.dedent("""\
########## MACROS ###########################################################################
#############################################################################################
{{ conan_message }}
{{ conan_find_apple_frameworks }}
{{ conan_package_library_targets }}
########### VARIABLES #######################################################################
#############################################################################################
{{ global_target_variables }}
set({{ pkg_name }}_COMPONENTS_{{ build_type }} {{ pkg_components }})
{%- for comp_name, comp in components %}
########### COMPONENT {{ comp_name }} VARIABLES #############################################
set({{ pkg_name }}_{{ comp_name }}_INCLUDE_DIRS_{{ build_type }} {{ comp.include_paths }})
set({{ pkg_name }}_{{ comp_name }}_INCLUDE_DIR_{{ build_type }} {{ comp.include_path }})
set({{ pkg_name }}_{{ comp_name }}_INCLUDES_{{ build_type }} {{ comp.include_paths }})
set({{ pkg_name }}_{{ comp_name }}_LIB_DIRS_{{ build_type }} {{ comp.lib_paths }})
set({{ pkg_name }}_{{ comp_name }}_RES_DIRS_{{ build_type }} {{ comp.res_paths }})
set({{ pkg_name }}_{{ comp_name }}_DEFINITIONS_{{ build_type }} {{ comp.defines }})
set({{ pkg_name }}_{{ comp_name }}_COMPILE_DEFINITIONS_{{ build_type }} {{ comp.compile_definitions }})
set({{ pkg_name }}_{{ comp_name }}_COMPILE_OPTIONS_C_{{ build_type }} "{{ comp.cflags_list }}")
set({{ pkg_name }}_{{ comp_name }}_COMPILE_OPTIONS_CXX_{{ build_type }} "{{ comp.cxxflags_list }}")
set({{ pkg_name }}_{{ comp_name }}_LIBS_{{ build_type }} {{ comp.libs }})
set({{ pkg_name }}_{{ comp_name }}_SYSTEM_LIBS_{{ build_type }} {{ comp.system_libs }})
set({{ pkg_name }}_{{ comp_name }}_FRAMEWORK_DIRS_{{ build_type }} {{ comp.framework_paths }})
set({{ pkg_name }}_{{ comp_name }}_FRAMEWORKS_{{ build_type }} {{ comp.frameworks }})
set({{ pkg_name }}_{{ comp_name }}_BUILD_MODULES_PATHS_{{ build_type }} {{ comp.build_modules_paths }})
set({{ pkg_name }}_{{ comp_name }}_DEPENDENCIES_{{ build_type }} {{ comp.public_deps }})
set({{ pkg_name }}_{{ comp_name }}_LINKER_FLAGS_LIST_{{ build_type }}
$<$<STREQUAL:$<TARGET_PROPERTY:TYPE>,SHARED_LIBRARY>:{{ comp.sharedlinkflags_list }}>
$<$<STREQUAL:$<TARGET_PROPERTY:TYPE>,MODULE_LIBRARY>:{{ comp.sharedlinkflags_list }}>
$<$<STREQUAL:$<TARGET_PROPERTY:TYPE>,EXECUTABLE>:{{ comp.exelinkflags_list }}>
)
########## COMPONENT {{ comp_name }} FIND LIBRARIES & FRAMEWORKS / DYNAMIC VARS #############
set({{ pkg_name }}_{{ comp_name }}_FRAMEWORKS_FOUND_{{ build_type }} "")
conan_find_apple_frameworks({{ pkg_name }}_{{ comp_name }}_FRAMEWORKS_FOUND_{{ build_type }} "{{ '${'+pkg_name+'_'+comp_name+'_FRAMEWORKS_'+build_type+'}' }}" "{{ '${'+pkg_name+'_'+comp_name+'_FRAMEWORK_DIRS_'+build_type+'}' }}")
set({{ pkg_name }}_{{ comp_name }}_LIB_TARGETS_{{ build_type }} "")
set({{ pkg_name }}_{{ comp_name }}_NOT_USED_{{ build_type }} "")
set({{ pkg_name }}_{{ comp_name }}_LIBS_FRAMEWORKS_DEPS_{{ build_type }} {{ '${'+pkg_name+'_'+comp_name+'_FRAMEWORKS_FOUND_'+build_type+'}' }} {{ '${'+pkg_name+'_'+comp_name+'_SYSTEM_LIBS_'+build_type+'}' }} {{ '${'+pkg_name+'_'+comp_name+'_DEPENDENCIES_'+build_type+'}' }})
conan_package_library_targets("{{ '${'+pkg_name+'_'+comp_name+'_LIBS_'+build_type+'}' }}"
"{{ '${'+pkg_name+'_'+comp_name+'_LIB_DIRS_'+build_type+'}' }}"
"{{ '${'+pkg_name+'_'+comp_name+'_LIBS_FRAMEWORKS_DEPS_'+build_type+'}' }}"
{{ pkg_name }}_{{ comp_name }}_NOT_USED_{{ build_type }}
{{ pkg_name }}_{{ comp_name }}_LIB_TARGETS_{{ build_type }}
"{{ build_type }}"
"{{ pkg_name }}_{{ comp_name }}")
set({{ pkg_name }}_{{ comp_name }}_LINK_LIBS_{{ build_type }} {{ '${'+pkg_name+'_'+comp_name+'_LIB_TARGETS_'+build_type+'}' }} {{ '${'+pkg_name+'_'+comp_name+'_LIBS_FRAMEWORKS_DEPS_'+build_type+'}' }})
{%- endfor %}
"""))
components_targets_tpl = Template(textwrap.dedent("""\
{%- for comp_name, comp in components %}
if(NOT TARGET {{ pkg_name }}::{{ comp_name }})
add_library({{ pkg_name }}::{{ comp_name }} INTERFACE IMPORTED)
endif()
{%- endfor %}
if(NOT TARGET {{ pkg_name }}::{{ pkg_name }})
add_library({{ pkg_name }}::{{ pkg_name }} INTERFACE IMPORTED)
endif()
# Load the debug and release library finders
get_filename_component(_DIR "${CMAKE_CURRENT_LIST_FILE}" PATH)
file(GLOB CONFIG_FILES "${_DIR}/{{ pkg_filename }}Target-*.cmake")
foreach(f ${CONFIG_FILES})
include(${f})
endforeach()
if({{ pkg_name }}_FIND_COMPONENTS)
foreach(_FIND_COMPONENT {{ '${'+pkg_name+'_FIND_COMPONENTS}' }})
list(FIND {{ pkg_name }}_COMPONENTS_{{ build_type }} "{{ pkg_name }}::${_FIND_COMPONENT}" _index)
if(${_index} EQUAL -1)
conan_message(FATAL_ERROR "Conan: Component '${_FIND_COMPONENT}' NOT found in package '{{ pkg_name }}'")
else()
conan_message(STATUS "Conan: Component '${_FIND_COMPONENT}' found in package '{{ pkg_name }}'")
endif()
endforeach()
endif()
"""))
components_config_tpl = Template(textwrap.dedent("""\
########## MACROS ###########################################################################
#############################################################################################
{{ conan_message }}
# Requires CMake > 3.0
if(${CMAKE_VERSION} VERSION_LESS "3.0")
message(FATAL_ERROR "The 'cmake_find_package_multi' generator only works with CMake > 3.0")
endif()
include(${CMAKE_CURRENT_LIST_DIR}/{{ pkg_filename }}Targets.cmake)
########## FIND PACKAGE DEPENDENCY ##########################################################
#############################################################################################
include(CMakeFindDependencyMacro)
{%- for public_dep in pkg_public_deps %}
if(NOT {{ public_dep }}_FOUND)
if(${CMAKE_VERSION} VERSION_LESS "3.9.0")
find_package({{ public_dep }} REQUIRED NO_MODULE)
else()
find_dependency({{ public_dep }} REQUIRED NO_MODULE)
endif()
else()
message(STATUS "Dependency {{ public_dep }} already found")
endif()
{%- endfor %}
########## TARGETS PROPERTIES ###############################################################
#############################################################################################
{%- macro tvalue(pkg_name, comp_name, var, config) -%}
{{'${'+pkg_name+'_'+comp_name+'_'+var+'_'+config.upper()+'}'}}
{%- endmacro -%}
{%- for comp_name, comp in components %}
########## COMPONENT {{ comp_name }} TARGET PROPERTIES ######################################
set_property(TARGET {{ pkg_name }}::{{ comp_name }} PROPERTY INTERFACE_LINK_LIBRARIES
{%- for config in configs %}
$<$<CONFIG:{{config}}>:{{tvalue(pkg_name, comp_name, 'LINK_LIBS', config)}}
{{tvalue(pkg_name, comp_name, 'LINKER_FLAGS_LIST', config)}}>
{%- endfor %})
set_property(TARGET {{ pkg_name }}::{{ comp_name }} PROPERTY INTERFACE_INCLUDE_DIRECTORIES
{%- for config in configs %}
$<$<CONFIG:{{config}}>:{{tvalue(pkg_name, comp_name, 'INCLUDE_DIRS', config)}}>
{%- endfor %})
set_property(TARGET {{ pkg_name }}::{{ comp_name }} PROPERTY INTERFACE_COMPILE_DEFINITIONS
{%- for config in configs %}
$<$<CONFIG:{{config}}>:{{tvalue(pkg_name, comp_name, 'COMPILE_DEFINITIONS', config)}}>
{%- endfor %})
set_property(TARGET {{ pkg_name }}::{{ comp_name }} PROPERTY INTERFACE_COMPILE_OPTIONS
{%- for config in configs %}
$<$<CONFIG:{{config}}>:
{{tvalue(pkg_name, comp_name, 'COMPILE_OPTIONS_C', config)}}
{{tvalue(pkg_name, comp_name, 'COMPILE_OPTIONS_CXX', config)}}>
{%- endfor %})
set({{ pkg_name }}_{{ comp_name }}_TARGET_PROPERTIES TRUE)
{%- endfor %}
########## GLOBAL TARGET PROPERTIES #########################################################
if(NOT {{ pkg_name }}_{{ pkg_name }}_TARGET_PROPERTIES)
set_property(TARGET {{ pkg_name }}::{{ pkg_name }} APPEND PROPERTY INTERFACE_LINK_LIBRARIES
{%- for config in configs %}
$<$<CONFIG:{{config}}>:{{ '${'+pkg_name+'_COMPONENTS_'+config.upper()+'}'}}>
{%- endfor %})
endif()
"""))
def __init__(self, conanfile):
super(CMakeFindPackageMultiGenerator, self).__init__(conanfile)
self.configuration = str(self.conanfile.settings.build_type)
self.configurations = [v for v in conanfile.settings.build_type.values_range if v != "None"]
# FIXME: Ugly way to define the output path
self.output_path = os.getcwd()
def generate(self):
generator_files = self.content
for generator_file, content in generator_files.items():
generator_file = os.path.join(self.output_path, generator_file)
save(generator_file, content)
@property
def filename(self):
return None
@property
def content(self):
ret = {}
build_type = str(self.conanfile.settings.build_type).upper()
build_type_suffix = "_{}".format(self.configuration.upper()) if self.configuration else ""
for pkg_name, cpp_info in self.deps_build_info.dependencies:
self._validate_components(cpp_info)
pkg_filename = self._get_filename(cpp_info)
pkg_findname = self._get_name(cpp_info)
pkg_version = cpp_info.version
public_deps = self.get_public_deps(cpp_info)
deps_names = []
for it in public_deps:
name = "{}::{}".format(*self._get_require_name(*it))
if name not in deps_names:
deps_names.append(name)
deps_names = ';'.join(deps_names)
pkg_public_deps_filenames = [self._get_filename(self.deps_build_info[it[0]]) for it in
public_deps]
config_version = self.config_version_template.format(version=pkg_version)
ret[self._config_version_filename(pkg_filename)] = config_version
if not cpp_info.components:
ret[self._config_filename(pkg_filename)] = self._config(
filename=pkg_filename,
name=pkg_findname,
version=cpp_info.version,
public_deps_names=pkg_public_deps_filenames
)
ret["{}Targets.cmake".format(pkg_filename)] = self.targets_template.format(
filename=pkg_filename, name=pkg_findname)
# If any config matches the build_type one, add it to the cpp_info
dep_cpp_info = extend(cpp_info, build_type.lower())
deps = DepsCppCmake(dep_cpp_info)
find_lib = target_template.format(name=pkg_findname, deps=deps,
build_type_suffix=build_type_suffix,
deps_names=deps_names)
ret["{}Target-{}.cmake".format(pkg_filename, self.configuration.lower())] = find_lib
else:
cpp_info = extend(cpp_info, build_type.lower())
pkg_info = DepsCppCmake(cpp_info)
components = self._get_components(pkg_name, cpp_info)
# Note these are in reversed order, from more dependent to less dependent
pkg_components = " ".join(["{p}::{c}".format(p=pkg_findname, c=comp_findname) for
comp_findname, _ in reversed(components)])
global_target_variables = target_template.format(name=pkg_findname, deps=pkg_info,
build_type_suffix=build_type_suffix,
deps_names=deps_names)
variables = self.components_target_build_type_tpl.render(
pkg_name=pkg_findname,
global_target_variables=global_target_variables,
pkg_components=pkg_components,
build_type=build_type,
components=components,
conan_find_apple_frameworks=CMakeFindPackageCommonMacros.apple_frameworks_macro,
conan_package_library_targets=CMakeFindPackageCommonMacros.conan_package_library_targets
)
ret["{}Target-{}.cmake".format(pkg_filename, build_type.lower())] = variables
targets = self.components_targets_tpl.render(
pkg_name=pkg_findname,
pkg_filename=pkg_filename,
components=components,
build_type=build_type
)
ret["{}Targets.cmake".format(pkg_filename)] = targets
target_config = self.components_config_tpl.render(
pkg_name=pkg_findname,
pkg_filename=pkg_filename,
components=components,
pkg_public_deps=pkg_public_deps_filenames,
conan_message=CMakeFindPackageCommonMacros.conan_message,
configs=self.configurations
)
ret[self._config_filename(pkg_filename)] = target_config
return ret
def _config_filename(self, pkg_filename):
if pkg_filename == pkg_filename.lower():
return "{}-config.cmake".format(pkg_filename)
else:
return "{}Config.cmake".format(pkg_filename)
def _config_version_filename(self, pkg_filename):
if pkg_filename == pkg_filename.lower():
return "{}-config-version.cmake".format(pkg_filename)
else:
return "{}ConfigVersion.cmake".format(pkg_filename)
def _config(self, filename, name, version, public_deps_names):
# Builds the XXXConfig.cmake file for one package
# The common macros
macros_and_functions = "\n".join([
CMakeFindPackageCommonMacros.conan_message,
CMakeFindPackageCommonMacros.apple_frameworks_macro,
CMakeFindPackageCommonMacros.conan_package_library_targets,
])
# Define the targets properties
targets_props = self.target_properties.render(name=name, configs=self.configurations)
# The find_dependencies_block
find_dependencies_block = ""
if public_deps_names:
# Here we are generating only Config files, so do not search for FindXXX modules
find_dependencies_block = find_transitive_dependencies(public_deps_names,
find_modules=False)
tmp = self.config_template.format(name=name, version=version,
filename=filename,
target_props_block=targets_props,
find_dependencies_block=find_dependencies_block,
macros_and_functions=macros_and_functions)
return tmp
| 51.09949
| 282
| 0.543907
|
6f8962dbf8f160beda5a5eecdf318d853eaeeaf1
| 1,040
|
py
|
Python
|
ktrain/tests/test_regression.py
|
ReluXingZeng/ktrain
|
bfbf85f7b60c54ee208e8a22d9a1f4067405021e
|
[
"Apache-2.0"
] | null | null | null |
ktrain/tests/test_regression.py
|
ReluXingZeng/ktrain
|
bfbf85f7b60c54ee208e8a22d9a1f4067405021e
|
[
"Apache-2.0"
] | null | null | null |
ktrain/tests/test_regression.py
|
ReluXingZeng/ktrain
|
bfbf85f7b60c54ee208e8a22d9a1f4067405021e
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
"""
Tests of ktrain text classification flows
"""
import testenv
from unittest import TestCase, main, skip
import ktrain
Sequential = ktrain.imports.Sequential
Dense = ktrain.imports.Dense
def bostonhousing():
from tensorflow.keras.datasets import boston_housing
(x_train, y_train), (x_test, y_test) = boston_housing.load_data()
model = Sequential()
model.add(Dense(1, input_shape=(x_train.shape[1],), activation='linear'))
model.compile(optimizer='adam', loss='mse', metrics=['mse', 'mae'])
learner = ktrain.get_learner(model, train_data=(x_train, y_train), val_data=(x_test, y_test))
learner.lr_find()
hist = learner.fit(0.05, 8, cycle_len=1, cycle_mult=2)
learner.view_top_losses(n=5)
learner.validate()
return hist
class TestRegression(TestCase):
def test_bostonhousing(self):
hist = bostonhousing()
min_loss = min(hist.history['val_loss'])
print(min_loss)
self.assertLess(min_loss, 55)
if __name__ == "__main__":
main()
| 28.108108
| 97
| 0.698077
|
9842a02379f90fdc8b55d2a5395707750918a713
| 6,354
|
py
|
Python
|
src/test/proto_library_test.py
|
liuyongsong/typhoon-blade
|
2a076ff87b2a91e552948b47ed5168c2b37e99e6
|
[
"BSD-3-Clause"
] | 1
|
2018-04-09T09:06:45.000Z
|
2018-04-09T09:06:45.000Z
|
src/test/proto_library_test.py
|
liuyongsong/typhoon-blade
|
2a076ff87b2a91e552948b47ed5168c2b37e99e6
|
[
"BSD-3-Clause"
] | null | null | null |
src/test/proto_library_test.py
|
liuyongsong/typhoon-blade
|
2a076ff87b2a91e552948b47ed5168c2b37e99e6
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Copyright (c) 2011 Tencent Inc.
All rights reserved.
This is the test module for proto_library target.
Author: Michaelpeng <michaelpeng@tencent.com>
Date: October 20, 2011
"""
import os
import sys
sys.path.append('..')
import unittest
import subprocess
import blade.blade
import blade.configparse
from blade.blade import Blade
from blade.configparse import BladeConfig
from blade_namespace import Namespace
from html_test_runner import HTMLTestRunner
class TestProtoLibrary(unittest.TestCase):
"""Test proto_library """
def setUp(self):
"""setup method. """
self.command = 'build'
self.targets = ['test_proto_library/...']
self.target_path = 'test_proto_library'
self.cur_dir = os.getcwd()
os.chdir('./testdata')
self.blade_path = '../../blade'
self.working_dir = '.'
self.current_building_path = 'build64_release'
self.current_source_dir = '.'
self.options = Namespace({'m' : '64',
'profile' : 'release',
'generate_dynamic' : True,
'generate_java' : True,
'generate_php' : True,
'verbose' : True
})
self.direct_targets = []
self.all_command_targets = []
self.related_targets = {}
# Init global configuration manager
blade.configparse.blade_config = BladeConfig(self.current_source_dir)
blade.configparse.blade_config.parse()
blade.blade.blade = Blade(self.targets,
self.blade_path,
self.working_dir,
self.current_building_path,
self.current_source_dir,
self.options,
blade_command=self.command)
self.blade = blade.blade.blade
(self.direct_targets,
self.all_command_targets) = self.blade.load_targets()
def tearDown(self):
"""tear down method. """
os.chdir(self.cur_dir)
def testLoadBuildsNotNone(self):
"""Test direct targets and all command targets are not none. """
self.assertEqual(self.direct_targets, [])
self.assertTrue(self.all_command_targets)
def testGenerateRules(self):
"""Test that rules are generated correctly. """
self.all_targets = self.blade.analyze_targets()
self.rules_buf = self.blade.generate_build_rules()
cc_library_lower = (self.target_path, 'lowercase')
proto_library_option = (self.target_path, 'rpc_option_proto')
proto_library_meta = (self.target_path, 'rpc_option_proto')
self.command_file = 'cmds.tmp'
self.assertTrue(cc_library_lower in self.all_targets.keys())
self.assertTrue(proto_library_option in self.all_targets.keys())
self.assertTrue(proto_library_meta in self.all_targets.keys())
p = subprocess.Popen("scons --dry-run > %s" % self.command_file,
stdout=subprocess.PIPE,
shell=True)
try:
p.wait()
self.assertEqual(p.returncode, 0)
com_lower_line = ''
com_proto_cpp_option = ''
com_proto_java_option = ''
com_proto_cpp_meta = ''
com_proto_java_meta = ''
com_proto_option_cc = ''
com_proto_meta_cc = ''
meta_depends_libs = ''
lower_depends_libs = ''
for line in open(self.command_file):
if 'plowercase.cpp.o -c' in line:
com_lower_line = line
if 'protobuf/bin/protoc' in line:
if 'cpp_out' in line:
if 'rpc_option.proto' in line:
com_proto_cpp_option = line
elif 'rpc_meta_info.proto' in line:
com_proto_cpp_meta = line
if 'java_out' in line:
if 'rpc_option.proto' in line:
com_proto_java_option = line
elif 'rpc_meta_info.proto' in line:
com_proto_java_meta = line
if 'rpc_option.pb.cc.o -c' in line:
com_proto_option_cc = line
if 'rpc_meta_info.pb.cc.o -c' in line:
com_proto_meta_cc = line
if 'librpc_meta_info_proto.so -m64' in line:
meta_depends_libs = line
if 'liblowercase.so -m64' in line:
lower_depends_libs = line
except:
print sys.exc_info()
self.fail("Failed while dry running in test case")
self.assertTrue('-fPIC -Wall -Wextra' in com_lower_line)
self.assertTrue('-Wframe-larger-than=69632' in com_lower_line)
self.assertTrue('-Werror=overloaded-virtual' in com_lower_line)
self.assertTrue(com_proto_cpp_option)
self.assertTrue(com_proto_cpp_meta)
self.assertTrue(com_proto_java_option)
self.assertTrue(com_proto_java_meta)
self.assertTrue('-fPIC -Wall -Wextra' in com_proto_option_cc)
self.assertTrue('-Wframe-larger-than=' in com_proto_option_cc)
self.assertTrue('-Werror=overloaded-virtual' not in com_proto_option_cc)
self.assertTrue('-fPIC -Wall -Wextra' in com_proto_meta_cc)
self.assertTrue('-Wframe-larger-than=' in com_proto_meta_cc)
self.assertTrue('-Werror=overloaded-virtual' not in com_proto_meta_cc)
self.assertTrue(meta_depends_libs)
self.assertTrue('librpc_option_proto.so' in meta_depends_libs)
self.assertTrue('liblowercase.so' in lower_depends_libs)
self.assertTrue('librpc_meta_info_proto.so' in lower_depends_libs)
self.assertTrue('librpc_option_proto.so' in lower_depends_libs)
os.remove('./SConstruct')
os.remove(self.command_file)
if __name__ == "__main__":
suite_test = unittest.TestSuite()
suite_test.addTests(
[unittest.defaultTestLoader.loadTestsFromTestCase(TestProtoLibrary)])
runner = unittest.TextTestRunner()
runner.run(suite_test)
| 38.047904
| 81
| 0.587347
|
c0ba6825ba92f62f54d33f3092675cd8b3ff83cc
| 3,639
|
py
|
Python
|
cycada/data/cityscapes.py
|
peterzcc/cycada_release
|
bfd1a9dd01bdb2a956cad13b01f3e305930b7d09
|
[
"BSD-2-Clause"
] | 532
|
2018-07-09T00:37:32.000Z
|
2022-03-09T15:10:07.000Z
|
cycada/data/cityscapes.py
|
ckevin4747/cycada_review
|
aac0c4724d704165738bfad9684fbffa9337c211
|
[
"BSD-2-Clause"
] | 41
|
2018-07-16T07:20:34.000Z
|
2021-12-10T21:20:23.000Z
|
cycada/data/cityscapes.py
|
ckevin4747/cycada_review
|
aac0c4724d704165738bfad9684fbffa9337c211
|
[
"BSD-2-Clause"
] | 143
|
2018-07-09T13:10:17.000Z
|
2022-02-15T14:24:29.000Z
|
import os.path
import sys
import numpy as np
import torch
import torch.utils.data as data
from PIL import Image
from .data_loader import register_data_params, register_dataset_obj
from .data_loader import DatasetParams
ignore_label = 255
id2label = {-1: ignore_label, 0: ignore_label, 1: ignore_label, 2: ignore_label,
3: ignore_label, 4: ignore_label, 5: ignore_label, 6: ignore_label,
7: 0, 8: 1, 9: ignore_label, 10: ignore_label, 11: 2, 12: 3, 13: 4,
14: ignore_label, 15: ignore_label, 16: ignore_label, 17: 5,
18: ignore_label, 19: 6, 20: 7, 21: 8, 22: 9, 23: 10, 24: 11, 25: 12, 26: 13, 27: 14,
28: 15, 29: ignore_label, 30: ignore_label, 31: 16, 32: 17, 33: 18}
palette = [128, 64, 128, 244, 35, 232, 70, 70, 70, 102, 102, 156, 190, 153, 153, 153, 153, 153, 250, 170, 30,
220, 220, 0, 107, 142, 35, 152, 251, 152, 70, 130, 180, 220, 20, 60, 255, 0, 0, 0, 0, 142, 0, 0, 70,
0, 60, 100, 0, 80, 100, 0, 0, 230, 119, 11, 32]
classes = ['road', 'sidewalk', 'building', 'wall', 'fence', 'pole', 'traffic light', 'traffic sign',
'vegetation', 'terrain', 'sky', 'person', 'rider', 'car', 'truck', 'bus', 'train', 'motorcycle',
'bicycle']
def remap_labels_to_train_ids(arr):
out = ignore_label * np.ones(arr.shape, dtype=np.uint8)
for id, label in id2label.items():
out[arr == id] = int(label)
return out
@register_data_params('cityscapes')
class CityScapesParams(DatasetParams):
num_channels = 3
image_size = 1024
mean = 0.5
std = 0.5
num_cls = 19
target_transform = None
@register_dataset_obj('cityscapes')
class Cityscapes(data.Dataset):
def __init__(self, root, split='train', remap_labels=True, transform=None,
target_transform=None):
self.root = root
sys.path.append(root)
self.split = split
self.remap_labels = remap_labels
self.ids = self.collect_ids()
self.transform = transform
self.target_transform = target_transform
self.num_cls = 19
self.id2label = id2label
self.classes = classes
def collect_ids(self):
im_dir = os.path.join(self.root, 'leftImg8bit', self.split)
ids = []
for dirpath, dirnames, filenames in os.walk(im_dir):
for filename in filenames:
if filename.endswith('.png'):
ids.append('_'.join(filename.split('_')[:3]))
return ids
def img_path(self, id):
fmt = 'leftImg8bit/{}/{}/{}_leftImg8bit.png'
subdir = id.split('_')[0]
path = fmt.format(self.split, subdir, id)
return os.path.join(self.root, path)
def label_path(self, id):
fmt = 'gtFine/{}/{}/{}_gtFine_labelIds.png'
subdir = id.split('_')[0]
path = fmt.format(self.split, subdir, id)
return os.path.join(self.root, path)
def __getitem__(self, index):
id = self.ids[index]
img = Image.open(self.img_path(id)).convert('RGB')
if self.transform is not None:
img = self.transform(img)
target = Image.open(self.label_path(id)).convert('L')
if self.remap_labels:
target = np.asarray(target)
target = remap_labels_to_train_ids(target)
target = Image.fromarray(np.uint8(target), 'L')
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
def __len__(self):
return len(self.ids)
if __name__ == '__main__':
cs = Cityscapes('/x/CityScapes')
| 36.029703
| 112
| 0.60044
|
ee1e257b5a35dd26f3d3ddc00bc2c33710d632ed
| 6,030
|
py
|
Python
|
machine_learning/Neural_Networks/Back Propogation/backpropagation.py
|
avi-pal/al-go-rithms
|
5167a20f1db7b366ff19f2962c1746a02e4f5067
|
[
"CC0-1.0"
] | 1,253
|
2017-06-06T07:19:25.000Z
|
2022-03-30T17:07:58.000Z
|
machine_learning/deep_learning/python/backpropagation.py
|
rishabh99-rc/al-go-rithms
|
4df20d7ef7598fda4bc89101f9a99aac94cdd794
|
[
"CC0-1.0"
] | 554
|
2017-09-29T18:56:01.000Z
|
2022-02-21T15:48:13.000Z
|
machine_learning/deep_learning/python/backpropagation.py
|
rishabh99-rc/al-go-rithms
|
4df20d7ef7598fda4bc89101f9a99aac94cdd794
|
[
"CC0-1.0"
] | 2,226
|
2017-09-29T19:59:59.000Z
|
2022-03-25T08:59:55.000Z
|
# Backprop on the Seeds Dataset
from random import seed
from random import randrange
from random import random
from csv import reader
from math import exp
# Load a CSV file
def load_csv(filename):
dataset = list()
with open(filename, 'r') as file:
csv_reader = reader(file)
for row in csv_reader:
if not row:
continue
dataset.append(row)
return dataset
# Convert string column to float
def str_column_to_float(dataset, column):
for row in dataset:
row[column] = float(row[column].strip())
# Convert string column to integer
def str_column_to_int(dataset, column):
class_values = [row[column] for row in dataset]
unique = set(class_values)
lookup = dict()
for i, value in enumerate(unique):
lookup[value] = i
for row in dataset:
row[column] = lookup[row[column]]
return lookup
# Find the min and max values for each column
def dataset_minmax(dataset):
minmax = list()
stats = [[min(column), max(column)] for column in zip(*dataset)]
return stats
# Rescale dataset columns to the range 0-1
def normalize_dataset(dataset, minmax):
for row in dataset:
for i in range(len(row)-1):
row[i] = (row[i] - minmax[i][0]) / (minmax[i][1] - minmax[i][0])
# Split a dataset into k folds
def cross_validation_split(dataset, n_folds):
dataset_split = list()
dataset_copy = list(dataset)
fold_size = int(len(dataset) / n_folds)
for i in range(n_folds):
fold = list()
while len(fold) < fold_size:
index = randrange(len(dataset_copy))
fold.append(dataset_copy.pop(index))
dataset_split.append(fold)
return dataset_split
# Calculate accuracy percentage
def accuracy_metric(actual, predicted):
correct = 0
for i in range(len(actual)):
if actual[i] == predicted[i]:
correct += 1
return correct / float(len(actual)) * 100.0
# Evaluate an algorithm using a cross validation split
def evaluate_algorithm(dataset, algorithm, n_folds, *args):
folds = cross_validation_split(dataset, n_folds)
scores = list()
for fold in folds:
train_set = list(folds)
train_set.remove(fold)
train_set = sum(train_set, [])
test_set = list()
for row in fold:
row_copy = list(row)
test_set.append(row_copy)
row_copy[-1] = None
predicted = algorithm(train_set, test_set, *args)
actual = [row[-1] for row in fold]
accuracy = accuracy_metric(actual, predicted)
scores.append(accuracy)
return scores
# Calculate neuron activation for an input
def activate(weights, inputs):
activation = weights[-1]
for i in range(len(weights)-1):
activation += weights[i] * inputs[i]
return activation
# Transfer neuron activation
def transfer(activation):
return 1.0 / (1.0 + exp(-activation))
# Forward propagate input to a network output
def forward_propagate(network, row):
inputs = row
for layer in network:
new_inputs = []
for neuron in layer:
activation = activate(neuron['weights'], inputs)
neuron['output'] = transfer(activation)
new_inputs.append(neuron['output'])
inputs = new_inputs
return inputs
# Calculate the derivative of an neuron output
def transfer_derivative(output):
return output * (1.0 - output)
# Backpropagate error and store in neurons
def backward_propagate_error(network, expected):
for i in reversed(range(len(network))):
layer = network[i]
errors = list()
if i != len(network)-1:
for j in range(len(layer)):
error = 0.0
for neuron in network[i + 1]:
error += (neuron['weights'][j] * neuron['delta'])
errors.append(error)
else:
for j in range(len(layer)):
neuron = layer[j]
errors.append(expected[j] - neuron['output'])
for j in range(len(layer)):
neuron = layer[j]
neuron['delta'] = errors[j] * transfer_derivative(neuron['output'])
# Update network weights with error
def update_weights(network, row, l_rate):
for i in range(len(network)):
inputs = row[:-1]
if i != 0:
inputs = [neuron['output'] for neuron in network[i - 1]]
for neuron in network[i]:
for j in range(len(inputs)):
neuron['weights'][j] += l_rate * neuron['delta'] * inputs[j]
neuron['weights'][-1] += l_rate * neuron['delta']
# Train a network for a fixed number of epochs
def train_network(network, train, l_rate, n_epoch, n_outputs):
for epoch in range(n_epoch):
for row in train:
outputs = forward_propagate(network, row)
expected = [0 for i in range(n_outputs)]
expected[row[-1]] = 1
backward_propagate_error(network, expected)
update_weights(network, row, l_rate)
# Initialize a network
def initialize_network(n_inputs, n_hidden, n_outputs):
network = list()
hidden_layer = [{'weights':[random() for i in range(n_inputs + 1)]} for i in range(n_hidden)]
network.append(hidden_layer)
output_layer = [{'weights':[random() for i in range(n_hidden + 1)]} for i in range(n_outputs)]
network.append(output_layer)
return network
# Make a prediction with a network
def predict(network, row):
outputs = forward_propagate(network, row)
return outputs.index(max(outputs))
# Backpropagation Algorithm With Stochastic Gradient Descent
def back_propagation(train, test, l_rate, n_epoch, n_hidden):
n_inputs = len(train[0]) - 1
n_outputs = len(set([row[-1] for row in train]))
network = initialize_network(n_inputs, n_hidden, n_outputs)
train_network(network, train, l_rate, n_epoch, n_outputs)
predictions = list()
for row in test:
prediction = predict(network, row)
predictions.append(prediction)
return(predictions)
# Test Backprop on Seeds dataset
seed(1)
# load and prepare data
filename = 'seeds_dataset.csv'
dataset = load_csv(filename)
for i in range(len(dataset[0])-1):
str_column_to_float(dataset, i)
# convert class column to integers
str_column_to_int(dataset, len(dataset[0])-1)
# normalize input variables
minmax = dataset_minmax(dataset)
normalize_dataset(dataset, minmax)
# evaluate algorithm
n_folds = 5
l_rate = 0.3
n_epoch = 500
n_hidden = 5
scores = evaluate_algorithm(dataset, back_propagation, n_folds, l_rate, n_epoch, n_hidden)
print('Scores: %s' % scores)
print('Mean Accuracy: %.3f%%' % (sum(scores)/float(len(scores))))
| 30.15
| 95
| 0.717579
|
e4e11f28349293871ebd37362135361f975f37e2
| 2,151
|
py
|
Python
|
tests/models/symbol/flash_cache_test.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 5
|
2016-08-23T17:52:22.000Z
|
2019-05-16T08:45:30.000Z
|
tests/models/symbol/flash_cache_test.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 2
|
2016-11-10T05:30:21.000Z
|
2019-04-05T15:03:37.000Z
|
tests/models/symbol/flash_cache_test.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 7
|
2016-08-25T16:11:44.000Z
|
2021-02-22T05:31:25.000Z
|
#!/usr/bin/env python
# coding: utf-8
"""
The Clear BSD License
Copyright (c) – 2016, NetApp, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the limitations in the disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of NetApp, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import unittest
from netapp.santricity.models.symbol.flash_cache import FlashCache
class FlashCacheTest(unittest.TestCase):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
# Try instantiating the model
def test_flash_cache(self):
flash_cache_obj = FlashCache()
self.assertNotEqual(flash_cache_obj, None)
| 56.605263
| 845
| 0.770339
|
07306d205eca301f58644266316d997c18fbe95f
| 6,150
|
py
|
Python
|
trax/supervised/lr_functions.py
|
pkol/trax
|
4dda0a5079b48ca8c325164884e677a87a0e3b37
|
[
"Apache-2.0"
] | 1
|
2020-03-15T00:50:06.000Z
|
2020-03-15T00:50:06.000Z
|
trax/supervised/lr_functions.py
|
pkol/trax
|
4dda0a5079b48ca8c325164884e677a87a0e3b37
|
[
"Apache-2.0"
] | null | null | null |
trax/supervised/lr_functions.py
|
pkol/trax
|
4dda0a5079b48ca8c325164884e677a87a0e3b37
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2020 The Trax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Learning rate (LR) schedules as functions of time (step number).
This is work in progress, intertwined with ongoing changes in supervised
training and optimizers. When complete, the learning rate schedules in this
file are intended to replace the prior ones in trax/lr_schedules.py. The
current package location/name (trax/supervised/lr_functions.py) is
correspondingly temporary.
"""
import math
def constant(constant_value):
"""Returns an LR schedule that is constant from time/step 1 to infinity."""
return _BodyAndTail(constant_value, body_start=1)
def warmup(n_warmup_steps, constant_value):
"""Returns an LR schedule with linear warm-up followed by constant value.
Args:
n_warmup_steps: Number of steps during which the learning rate rises on
a line connecting (0, 0) and (n_warmup_steps, constant_value).
constant_value: Value for learning rate after warm-up has finshed.
"""
return _BodyAndTail(constant_value, body_start=n_warmup_steps + 1)
def warmup_and_rsqrt_decay(n_warmup_steps, max_value):
"""Returns an LR schedule with warm-up + reciprocal square root decay."""
return _BodyAndTail(max_value, tail_start=n_warmup_steps + 1, tail_fn=_rsqrt)
class _BodyAndTail:
"""Defines a curve over time as a linear ramp + constant body + curvy tail.
The body is a span of constant learning rate, and can be the entire curve.
The warm-up, if present, is based on the line connecting points (0, 0) and
(body_start, body_value). The tail, if defined, is a function from time to
learning rate that is used for all training steps from tail_start on.
"""
def __init__(
self, body_value, body_start=None, tail_start=None, tail_fn=None):
"""Specifies a body-and-tail time curve.
Args:
body_value: Constant learning rate for the body of the curve (after
warm-up and before tail). Also is the reference (maximum) value for
calculating warm-up values and tail values.
body_start: Training step number at which the body starts. If None, takes
its value from tail_start, which amounts to there being no body. All
steps from 1 to body_start - 1 are computed using a linear warm-up.
tail_start: Training step number at which the tail starts. If None, the
body value remains until the end of training.
tail_fn: Function returning a floating point learning rate, given inputs:
- step_number (absolute step number from the start of training)
- tail_start (step number at which the tail starts)
- body_value (value relative to which the tail should be computed)
"""
if body_start is None and tail_start is None:
raise ValueError(f'Both body start and tail start are None.')
if tail_start is not None and tail_fn is None:
raise ValueError(
f'Tail start has value ({tail_start}) but tail_fn is None.')
if body_start is None:
body_start = tail_start if tail_start is not None else 1
self._body_value = body_value
self._body_start = body_start
self._tail_start = tail_start
self._tail_fn = tail_fn
def __call__(self, step_number):
"""Returns the learning rate for the given step number."""
if step_number < self._body_start:
return (step_number / self._body_start) * self._body_value
elif self._tail_start is not None and step_number >= self._tail_start:
return self._tail_fn(step_number, self._tail_start, self._body_value)
else:
return self._body_value
def _rsqrt(step_number, tail_start, body_value):
"""Computes a tail using a scaled reciprocal square root of step number.
Args:
step_number: Absolute step number from the start of training.
tail_start: Step number at which the tail of the curve starts.
body_value: Value relative to which the tail should be computed.
Returns:
A learning rate value that falls as the reciprocal square root of the step
number, scaled so that it joins smoothly with the body of a BodyAndTail
instance.
"""
return body_value * (math.sqrt(tail_start) / math.sqrt(step_number))
class _CosineSawtoothTail:
"""Cosine-sawtooth-shaped tail that simulates warm restarts.
Creates a cyclic learning rate curve; each cycle is half of a cosine, falling
from maximum value to minimum value. For motivation and further details, see
Loshchilov & Hutter (2017) [https://arxiv.org/abs/1608.03983].
"""
def __init__(self, steps_per_cycle, min_value=1e-5):
"""Configures the periodic behavior of this learning rate function.
Args:
steps_per_cycle: Number of training steps per sawtooth cycle. The
learning rate will be highest at the start of each cycle, and lowest
at the end.
min_value: Minimum value, reached at the end of each cycle.
"""
self._steps_per_cycle = steps_per_cycle
self._min_value = min_value
def __call__(self, step_number, tail_start, body_value):
"""Returns the learning rate for the given step number, when in the tail.
Args:
step_number: Absolute step number from the start of training.
tail_start: Step number at which the tail of the curve starts.
body_value: Value relative to which the tail should be computed.
"""
max_value = body_value
min_value = self._min_value
position_in_cycle = (
((step_number - tail_start) / self._steps_per_cycle) % 1.0)
theta = math.pi * position_in_cycle
return min_value + (max_value - min_value) * .5 * (1 + math.cos(theta))
| 41
| 79
| 0.72748
|
fd24d5eb9f68c7839b08130a8c8b446d0cda6c9d
| 448
|
py
|
Python
|
Strings/String Validators.py
|
adi-797/Python-Hackerrank
|
b769cb07aca09ac0eda1d81c51b8e277101da870
|
[
"MIT"
] | 3
|
2018-08-26T15:49:54.000Z
|
2018-08-31T17:06:48.000Z
|
Strings/String Validators.py
|
adi-797/Python-Hackerrank
|
b769cb07aca09ac0eda1d81c51b8e277101da870
|
[
"MIT"
] | null | null | null |
Strings/String Validators.py
|
adi-797/Python-Hackerrank
|
b769cb07aca09ac0eda1d81c51b8e277101da870
|
[
"MIT"
] | null | null | null |
if __name__ == '__main__':
S = raw_input()
print True if any(k in "0123456789" or k.lower() in "abcdefghijklmnopqrstuvwxyz" for k in S) else False
print True if any(k.lower() in "abcdefghijklmnopqrstuvwxyz" for k in S) else False
print True if any(k in "0123456789" for k in S) else False
print True if any(k in "abcdefghijklmnopqrstuvwxyz" for k in S) else False
print True if any(k in "ABCDEFGHIJKLMNOPQRSTUVWXYZ" for k in S) else False
| 49.777778
| 103
| 0.741071
|
d39d544a90649caffb86579847126d8b96b793a6
| 757
|
py
|
Python
|
setup.py
|
rivergold/vit-pytorch
|
6d1df1a9701dcef683b19abd09136ba23a01a770
|
[
"MIT"
] | null | null | null |
setup.py
|
rivergold/vit-pytorch
|
6d1df1a9701dcef683b19abd09136ba23a01a770
|
[
"MIT"
] | null | null | null |
setup.py
|
rivergold/vit-pytorch
|
6d1df1a9701dcef683b19abd09136ba23a01a770
|
[
"MIT"
] | null | null | null |
from setuptools import setup, find_packages
setup(
name = 'vit-pytorch',
packages = find_packages(exclude=['examples']),
version = '0.2.5',
license='MIT',
description = 'Vision Transformer (ViT) - Pytorch',
author = 'Phil Wang',
author_email = 'lucidrains@gmail.com',
url = 'https://github.com/lucidrains/vit-pytorch',
keywords = [
'artificial intelligence',
'attention mechanism',
'image recognition'
],
install_requires=[
'torch>=1.6',
'einops>=0.3'
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
],
)
| 26.103448
| 65
| 0.645971
|
36ab7276b22071eed6404f415669f57846a6ae9b
| 1,170
|
py
|
Python
|
mindinsight/profiler/proposer/allproposers/__init__.py
|
fapbatista/mindinsight
|
db5769eb80cbd13a2a9af7682c11f5667d8bf141
|
[
"Apache-2.0"
] | null | null | null |
mindinsight/profiler/proposer/allproposers/__init__.py
|
fapbatista/mindinsight
|
db5769eb80cbd13a2a9af7682c11f5667d8bf141
|
[
"Apache-2.0"
] | null | null | null |
mindinsight/profiler/proposer/allproposers/__init__.py
|
fapbatista/mindinsight
|
db5769eb80cbd13a2a9af7682c11f5667d8bf141
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""All proposers."""
from mindinsight.profiler.proposer.allproposers.common_proposer import CommonProposer
from mindinsight.profiler.proposer.allproposers.minddata_proposer import MinddataProposer
from mindinsight.profiler.proposer.allproposers.step_trace_proposer import StepTraceProposer
from mindinsight.profiler.proposer.allproposers.minddata_pipeline_proposer import \
MinddataPipelineProposer
__all__ = ["CommonProposer", "StepTraceProposer", "MinddataProposer", "MinddataPipelineProposer"]
| 48.75
| 97
| 0.757265
|
9180e8dc918cd217acd962eea812858197ff3dcc
| 767
|
py
|
Python
|
src/users/models/microsoftgraphshare_action.py
|
peombwa/Sample-Graph-Python-Client
|
3396f531fbe6bb40a740767c4e31aee95a3b932e
|
[
"MIT"
] | null | null | null |
src/users/models/microsoftgraphshare_action.py
|
peombwa/Sample-Graph-Python-Client
|
3396f531fbe6bb40a740767c4e31aee95a3b932e
|
[
"MIT"
] | null | null | null |
src/users/models/microsoftgraphshare_action.py
|
peombwa/Sample-Graph-Python-Client
|
3396f531fbe6bb40a740767c4e31aee95a3b932e
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class MicrosoftgraphshareAction(Model):
"""shareAction.
:param recipients:
:type recipients: list[~users.models.MicrosoftgraphidentitySet]
"""
_attribute_map = {
'recipients': {'key': 'recipients', 'type': '[MicrosoftgraphidentitySet]'},
}
def __init__(self, recipients=None):
super(MicrosoftgraphshareAction, self).__init__()
self.recipients = recipients
| 30.68
| 83
| 0.564537
|
f4c45423f6c68e91515e8824703653e642ff6ee4
| 206,508
|
py
|
Python
|
test/unit/common/middleware/test_slo.py
|
aguirguis/swift
|
2aaeab6f5ddfdbcd75eeee970287f08f19de19ef
|
[
"Apache-2.0"
] | 1,802
|
2015-01-02T11:25:47.000Z
|
2022-03-28T15:02:11.000Z
|
test/unit/common/middleware/test_slo.py
|
aguirguis/swift
|
2aaeab6f5ddfdbcd75eeee970287f08f19de19ef
|
[
"Apache-2.0"
] | 15
|
2015-01-04T09:12:39.000Z
|
2021-04-15T13:05:37.000Z
|
test/unit/common/middleware/test_slo.py
|
aguirguis/swift
|
2aaeab6f5ddfdbcd75eeee970287f08f19de19ef
|
[
"Apache-2.0"
] | 814
|
2015-01-04T15:14:01.000Z
|
2022-03-15T00:15:49.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import json
import time
import unittest
from mock import patch
import six
from io import BytesIO
from swift.common import swob, utils
from swift.common.header_key_dict import HeaderKeyDict
from swift.common.middleware import slo
from swift.common.swob import Request, HTTPException, str_to_wsgi, \
bytes_to_wsgi
from swift.common.utils import quote, closing_if_possible, close_if_possible, \
parse_content_type, iter_multipart_mime_documents, parse_mime_headers, \
Timestamp, get_expirer_container, md5
from test.unit.common.middleware.helpers import FakeSwift
test_xml_data = '''<?xml version="1.0" encoding="UTF-8"?>
<static_large_object>
<object_segment>
<path>/cont/object</path>
<etag>etagoftheobjectsegment</etag>
<size_bytes>100</size_bytes>
</object_segment>
</static_large_object>
'''
test_json_data = json.dumps([{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
def fake_start_response(*args, **kwargs):
pass
def md5hex(s):
if not isinstance(s, bytes):
s = s.encode('ascii')
return md5(s, usedforsecurity=False).hexdigest()
class SloTestCase(unittest.TestCase):
def setUp(self):
self.app = FakeSwift()
slo_conf = {'rate_limit_under_size': '0'}
self.slo = slo.filter_factory(slo_conf)(self.app)
self.slo.logger = self.app.logger
self.manifest_abcd_etag = md5hex(
md5hex("a" * 5) + md5hex(md5hex("b" * 10) + md5hex("c" * 15)) +
md5hex("d" * 20))
def call_app(self, req, app=None):
if app is None:
app = self.app
req.headers.setdefault("User-Agent", "Mozzarella Foxfire")
status = [None]
headers = [None]
def start_response(s, h, ei=None):
status[0] = s
headers[0] = h
body_iter = app(req.environ, start_response)
body = b''
# appease the close-checker
with closing_if_possible(body_iter):
for chunk in body_iter:
body += chunk
return status[0], headers[0], body
def call_slo(self, req, **kwargs):
return self.call_app(req, app=self.slo, **kwargs)
class TestSloMiddleware(SloTestCase):
def setUp(self):
super(TestSloMiddleware, self).setUp()
self.app.register(
'GET', '/', swob.HTTPOk, {}, b'passed')
self.app.register(
'PUT', '/', swob.HTTPOk, {}, b'passed')
def test_handle_multipart_no_obj(self):
req = Request.blank('/')
resp_iter = self.slo(req.environ, fake_start_response)
self.assertEqual(self.app.calls, [('GET', '/')])
self.assertEqual(b''.join(resp_iter), b'passed')
def test_slo_header_assigned(self):
req = Request.blank(
'/v1/a/c/o', headers={'x-static-large-object': "true"},
environ={'REQUEST_METHOD': 'PUT'})
resp = b''.join(self.slo(req.environ, fake_start_response))
self.assertTrue(
resp.startswith(b'X-Static-Large-Object is a reserved header'))
def test_slo_PUT_env_override(self):
path = '/v1/a/c/o'
body = b'manifest body not checked when override flag set'
resp_status = []
def start_response(status, headers, *args):
resp_status.append(status)
req = Request.blank(
path, headers={'x-static-large-object': "true"},
environ={'REQUEST_METHOD': 'PUT', 'swift.slo_override': True},
body=body)
self.app.register('PUT', path, swob.HTTPCreated, {})
resp_iter = self.slo(req.environ, start_response)
self.assertEqual(b'', b''.join(resp_iter))
self.assertEqual(self.app.calls, [('PUT', path)])
self.assertEqual(body, self.app.uploaded[path][1])
self.assertEqual(resp_status[0], '201 Created')
def _put_bogus_slo(self, manifest_text,
manifest_path='/v1/a/c/the-manifest'):
with self.assertRaises(HTTPException) as catcher:
slo.parse_and_validate_input(manifest_text, manifest_path)
self.assertEqual(400, catcher.exception.status_int)
return catcher.exception.body.decode('utf-8')
def _put_slo(self, manifest_text, manifest_path='/v1/a/c/the-manifest'):
return slo.parse_and_validate_input(manifest_text, manifest_path)
def test_bogus_input(self):
self.assertEqual('Manifest must be valid JSON.\n',
self._put_bogus_slo('some non json'))
self.assertEqual('Manifest must be a list.\n',
self._put_bogus_slo('{}'))
self.assertEqual('Index 0: not a JSON object\n',
self._put_bogus_slo('["zombocom"]'))
def test_bogus_input_bad_keys(self):
self.assertEqual(
"Index 0: extraneous keys \"baz\", \"foo\"\n",
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'etagoftheobjectsegment',
'size_bytes': 100,
'foo': 'bar', 'baz': 'quux'}])))
# This also catches typos
self.assertEqual(
'Index 0: extraneous keys "egat"\n',
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'egat': 'etagoftheobjectsegment',
'size_bytes': 100}])))
self.assertEqual(
'Index 0: extraneous keys "siez_bytes"\n',
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'etagoftheobjectsegment',
'siez_bytes': 100}])))
def test_bogus_input_ranges(self):
self.assertEqual(
"Index 0: invalid range\n",
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'blah',
'size_bytes': 100, 'range': 'non-range value'}])))
self.assertEqual(
"Index 0: multiple ranges (only one allowed)\n",
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'blah',
'size_bytes': 100, 'range': '1-20,30-40'}])))
def test_bogus_input_unsatisfiable_range(self):
self.assertEqual(
"Index 0: unsatisfiable range\n",
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'blah',
'size_bytes': 100, 'range': '8888-9999'}])))
# since size is optional, we have to be able to defer this check
segs = self._put_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'blah',
'size_bytes': None, 'range': '8888-9999'}]))
self.assertEqual(1, len(segs))
def test_bogus_input_path(self):
self.assertEqual(
"Index 0: path does not refer to an object. Path must be of the "
"form /container/object.\n"
"Index 1: path does not refer to an object. Path must be of the "
"form /container/object.\n",
self._put_bogus_slo(json.dumps(
[{'path': '/cont', 'etag': 'etagoftheobjectsegment',
'size_bytes': 100},
{'path': '/c-trailing-slash/', 'etag': 'e',
'size_bytes': 100},
{'path': '/con/obj', 'etag': 'e',
'size_bytes': 100},
{'path': '/con/obj-trailing-slash/', 'etag': 'e',
'size_bytes': 100},
{'path': '/con/obj/with/slashes', 'etag': 'e',
'size_bytes': 100}])))
def test_bogus_input_multiple(self):
self.assertEqual(
"Index 0: invalid range\nIndex 1: not a JSON object\n",
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'etagoftheobjectsegment',
'size_bytes': 100, 'range': 'non-range value'},
None])))
def test_bogus_input_size_bytes(self):
self.assertEqual(
"Index 0: invalid size_bytes\n",
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'blah', 'size_bytes': "fht"},
{'path': '/cont/object', 'etag': 'blah', 'size_bytes': None},
{'path': '/cont/object', 'etag': 'blah', 'size_bytes': 100}],
)))
self.assertEqual(
"Index 0: invalid size_bytes\n",
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'blah', 'size_bytes': []}],
)))
def test_bogus_input_self_referential(self):
self.assertEqual(
"Index 0: manifest must not include itself as a segment\n",
self._put_bogus_slo(json.dumps(
[{'path': '/c/the-manifest', 'etag': 'gate',
'size_bytes': 100, 'range': 'non-range value'}])))
def test_bogus_input_self_referential_non_ascii(self):
self.assertEqual(
"Index 0: manifest must not include itself as a segment\n",
self._put_bogus_slo(
json.dumps([{'path': u'/c/あ_1',
'etag': 'a', 'size_bytes': 1}]),
manifest_path=quote(u'/v1/a/c/あ_1')))
def test_bogus_input_self_referential_last_segment(self):
test_json_data = json.dumps([
{'path': '/c/seg_1', 'etag': 'a', 'size_bytes': 1},
{'path': '/c/seg_2', 'etag': 'a', 'size_bytes': 1},
{'path': '/c/seg_3', 'etag': 'a', 'size_bytes': 1},
{'path': '/c/the-manifest', 'etag': 'a', 'size_bytes': 1},
]).encode('ascii')
self.assertEqual(
"Index 3: manifest must not include itself as a segment\n",
self._put_bogus_slo(
test_json_data,
manifest_path=quote('/v1/a/c/the-manifest')))
def test_bogus_input_undersize_segment(self):
self.assertEqual(
"Index 1: too small; each segment "
"must be at least 1 byte.\n"
"Index 2: too small; each segment "
"must be at least 1 byte.\n",
self._put_bogus_slo(
json.dumps([
{'path': u'/c/s1', 'etag': 'a', 'size_bytes': 1},
{'path': u'/c/s2', 'etag': 'b', 'size_bytes': 0},
{'path': u'/c/s3', 'etag': 'c', 'size_bytes': 0},
# No error for this one since size_bytes is unspecified
{'path': u'/c/s4', 'etag': 'd', 'size_bytes': None},
{'path': u'/c/s5', 'etag': 'e', 'size_bytes': 1000}])))
def test_valid_input(self):
data = json.dumps(
[{'path': '/cont/object', 'etag': 'etagoftheobjectsegment',
'size_bytes': 100}])
self.assertEqual(
'/cont/object',
slo.parse_and_validate_input(data, '/v1/a/cont/man')[0]['path'])
data = json.dumps(
[{'path': '/cont/object', 'etag': 'etagoftheobjectsegment',
'size_bytes': 100, 'range': '0-40'}])
parsed = slo.parse_and_validate_input(data, '/v1/a/cont/man')
self.assertEqual('/cont/object', parsed[0]['path'])
self.assertEqual([(0, 40)], parsed[0]['range'].ranges)
data = json.dumps(
[{'path': '/cont/object', 'etag': 'etagoftheobjectsegment',
'size_bytes': None, 'range': '0-40'}])
parsed = slo.parse_and_validate_input(data, '/v1/a/cont/man')
self.assertEqual('/cont/object', parsed[0]['path'])
self.assertIsNone(parsed[0]['size_bytes'])
self.assertEqual([(0, 40)], parsed[0]['range'].ranges)
def test_container_listing(self):
listing_json = json.dumps([{
"bytes": 104857600,
"content_type": "application/x-troff-me",
"hash": "8de7b0b1551660da51d8d96a53b85531; this=that;"
"slo_etag=dc9947c2b53a3f55fe20c1394268e216",
"last_modified": "2018-07-12T03:14:39.532020",
"name": "test.me"
}]).encode('ascii')
self.app.register(
'GET', '/v1/a/c',
swob.HTTPOk,
{'Content-Type': 'application/json',
'Content-Length': len(listing_json)},
listing_json)
req = Request.blank('/v1/a/c', method='GET')
status, headers, body = self.call_slo(req)
self.assertEqual(json.loads(body), [{
"slo_etag": '"dc9947c2b53a3f55fe20c1394268e216"',
"hash": "8de7b0b1551660da51d8d96a53b85531; this=that",
"name": "test.me",
"bytes": 104857600,
"last_modified": "2018-07-12T03:14:39.532020",
"content_type": "application/x-troff-me",
}])
class TestSloPutManifest(SloTestCase):
def setUp(self):
super(TestSloPutManifest, self).setUp()
self.app.register(
'GET', '/', swob.HTTPOk, {}, b'passed')
self.app.register(
'PUT', '/', swob.HTTPOk, {}, b'passed')
self.app.register(
'HEAD', '/v1/AUTH_test/cont/missing-object',
swob.HTTPNotFound, {}, None)
self.app.register(
'HEAD', '/v1/AUTH_test/cont/object',
swob.HTTPOk,
{'Content-Length': '100', 'Etag': 'etagoftheobjectsegment'},
None)
self.app.register(
'HEAD', '/v1/AUTH_test/cont/object2',
swob.HTTPOk,
{'Content-Length': '100', 'Etag': 'etagoftheobjectsegment'},
None)
self.app.register(
'HEAD', '/v1/AUTH_test/cont/object\xe2\x99\xa1',
swob.HTTPOk,
{'Content-Length': '100', 'Etag': 'etagoftheobjectsegment'},
None)
self.app.register(
'HEAD', '/v1/AUTH_test/cont/small_object',
swob.HTTPOk,
{'Content-Length': '10', 'Etag': 'etagoftheobjectsegment'},
None)
self.app.register(
'HEAD', '/v1/AUTH_test/cont/empty_object',
swob.HTTPOk,
{'Content-Length': '0', 'Etag': 'etagoftheobjectsegment'},
None)
self.app.register(
'HEAD', u'/v1/AUTH_test/cont/あ_1',
swob.HTTPOk,
{'Content-Length': '1', 'Etag': 'a'},
None)
self.app.register(
'PUT', '/v1/AUTH_test/c/man', swob.HTTPCreated,
{'Last-Modified': 'Fri, 01 Feb 2012 20:38:36 GMT'}, None)
self.app.register(
'DELETE', '/v1/AUTH_test/c/man', swob.HTTPNoContent, {}, None)
self.app.register(
'HEAD', '/v1/AUTH_test/checktest/a_1',
swob.HTTPOk,
{'Content-Length': '1', 'Etag': 'a'},
None)
self.app.register(
'HEAD', '/v1/AUTH_test/checktest/badreq',
swob.HTTPBadRequest, {}, None)
self.app.register(
'HEAD', '/v1/AUTH_test/checktest/b_2',
swob.HTTPOk,
{'Content-Length': '2', 'Etag': 'b',
'Last-Modified': 'Fri, 01 Feb 2012 20:38:36 GMT'},
None)
_manifest_json = json.dumps(
[{'name': '/checktest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5'}]).encode('ascii')
self.app.register(
'GET', '/v1/AUTH_test/checktest/slob',
swob.HTTPOk,
{'X-Static-Large-Object': 'true', 'Etag': 'slob-etag',
'Content-Type': 'cat/picture',
'Content-Length': len(_manifest_json)},
_manifest_json)
self.app.register(
'PUT', '/v1/AUTH_test/checktest/man_3', swob.HTTPCreated, {}, None)
def test_put_manifest_too_quick_fail(self):
req = Request.blank('/v1/a/c/o?multipart-manifest=put', method='PUT')
req.content_length = self.slo.max_manifest_size + 1
status, headers, body = self.call_slo(req)
self.assertEqual(status, '413 Request Entity Too Large')
with patch.object(self.slo, 'max_manifest_segments', 0):
req = Request.blank('/v1/a/c/o?multipart-manifest=put',
method='PUT', body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual(status, '413 Request Entity Too Large')
req = Request.blank('/v1/a/c/o?multipart-manifest=put', method='PUT',
headers={'X-Copy-From': 'lala'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '405 Method Not Allowed')
# we already validated that there are enough path segments in __call__
for path in ('/', '/v1/', '/v1/a/', '/v1/a/c/'):
req = Request.blank(
path + '?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=test_json_data)
with self.assertRaises(ValueError):
list(self.slo.handle_multipart_put(req, fake_start_response))
req = Request.blank(
path.rstrip('/') + '?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=test_json_data)
with self.assertRaises(ValueError):
list(self.slo.handle_multipart_put(req, fake_start_response))
def test_handle_multipart_put_success(self):
override_header = 'X-Object-Sysmeta-Container-Update-Override-Etag'
headers = {
'Accept': 'test',
override_header: '; params=are important',
}
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, headers=headers,
body=test_json_data)
for h in ('X-Static-Large-Object', 'X-Object-Sysmeta-Slo-Etag',
'X-Object-Sysmeta-Slo-Size'):
# Sanity
self.assertNotIn(h, req.headers)
status, headers, body = self.call_slo(req)
gen_etag = '"' + md5hex('etagoftheobjectsegment') + '"'
self.assertIn(('Etag', gen_etag), headers)
self.assertIn('X-Static-Large-Object', req.headers)
self.assertEqual(req.headers['X-Static-Large-Object'], 'True')
self.assertIn('Etag', req.headers)
self.assertIn('X-Object-Sysmeta-Slo-Etag', req.headers)
self.assertIn('X-Object-Sysmeta-Container-Update-Override-Etag',
req.headers)
self.assertEqual(req.headers['X-Object-Sysmeta-Slo-Etag'],
gen_etag.strip('"'))
self.assertEqual(
req.headers['X-Object-Sysmeta-Container-Update-Override-Etag'],
'%s; params=are important; slo_etag=%s' % (
req.headers['Etag'], gen_etag.strip('"')))
self.assertIn('X-Object-Sysmeta-Slo-Size', req.headers)
self.assertEqual(req.headers['X-Object-Sysmeta-Slo-Size'], '100')
self.assertIn('Content-Type', req.headers)
self.assertTrue(
req.headers['Content-Type'].endswith(';swift_bytes=100'),
'Content-Type %r does not end with swift_bytes=100' %
req.headers['Content-Type'])
@patch('swift.common.middleware.slo.time')
def test_handle_multipart_put_fast_heartbeat(self, mock_time):
mock_time.time.side_effect = [
0, # start time
1, # first segment's fast
2, # second segment's also fast!
]
test_json_data = json.dumps([{'path': u'/cont/object\u2661',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100},
{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put&heartbeat=on',
environ={'REQUEST_METHOD': 'PUT'}, headers={'Accept': 'test'},
body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual('202 Accepted', status)
headers_found = [h.lower() for h, v in headers]
self.assertNotIn('etag', headers_found)
gen_etag = '"' + md5hex('etagoftheobjectsegment' * 2) + '"'
self.assertTrue(body.startswith(b' \r\n\r\n'),
'Expected body to start with single space and two '
'blank lines; got %r' % body)
self.assertIn(b'\nResponse Status: 201 Created\n', body)
self.assertIn(b'\nResponse Body: \n', body)
self.assertIn(('\nEtag: %s\n' % gen_etag).encode('ascii'), body)
self.assertIn(b'\nLast Modified: Fri, 01 Feb 2012 20:38:36 GMT\n',
body)
@patch('swift.common.middleware.slo.time')
def test_handle_multipart_long_running_put_success(self, mock_time):
mock_time.time.side_effect = [
0, # start time
1, # first segment's fast
20, # second segment's slow
]
test_json_data = json.dumps([{'path': u'/cont/object\u2661',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100},
{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put&heartbeat=on',
environ={'REQUEST_METHOD': 'PUT'}, headers={'Accept': 'test'},
body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual('202 Accepted', status)
headers_found = [h.lower() for h, v in headers]
self.assertNotIn('etag', headers_found)
gen_etag = '"' + md5hex('etagoftheobjectsegment' * 2) + '"'
self.assertTrue(body.startswith(b' \r\n\r\n'),
'Expected body to start with two spaces and two '
'blank lines; got %r' % body)
self.assertIn(b'\nResponse Status: 201 Created\n', body)
self.assertIn(b'\nResponse Body: \n', body)
self.assertIn(('\nEtag: %s\n' % gen_etag).encode('ascii'), body)
self.assertIn(b'\nLast Modified: Fri, 01 Feb 2012 20:38:36 GMT\n',
body)
@patch('swift.common.middleware.slo.time')
def test_handle_multipart_long_running_put_success_json(self, mock_time):
mock_time.time.side_effect = [
0, # start time
11, # first segment's slow
22, # second segment's also slow
]
test_json_data = json.dumps([{'path': u'/cont/object\u2661',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100},
{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put&heartbeat=on',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Accept': 'application/json'},
body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual('202 Accepted', status)
headers_found = [h.lower() for h, v in headers]
self.assertNotIn('etag', headers_found)
gen_etag = '"' + md5hex('etagoftheobjectsegment' * 2) + '"'
self.assertTrue(body.startswith(b' \r\n\r\n'),
'Expected body to start with three spaces and two '
'blank lines; got %r' % body)
body = json.loads(body)
self.assertEqual(body['Response Status'], '201 Created')
self.assertEqual(body['Response Body'], '')
self.assertEqual(body['Etag'], gen_etag)
self.assertEqual(body['Last Modified'],
'Fri, 01 Feb 2012 20:38:36 GMT')
@patch('swift.common.middleware.slo.time')
def test_handle_multipart_long_running_put_failure(self, mock_time):
mock_time.time.side_effect = [
0, # start time
1, # first segment's fast
20, # second segment's slow
]
test_json_data = json.dumps([{'path': u'/cont/missing-object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100},
{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 99}]).encode('ascii')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put&heartbeat=on',
environ={'REQUEST_METHOD': 'PUT'}, headers={'Accept': 'test'},
body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual('202 Accepted', status)
headers_found = [h.lower() for h, v in headers]
self.assertNotIn('etag', headers_found)
body = body.split(b'\n')
self.assertEqual([b' \r', b'\r'], body[:2],
'Expected body to start with two spaces and two '
'blank lines; got %r' % b'\n'.join(body))
self.assertIn(b'Response Status: 400 Bad Request', body[2:5])
self.assertIn(b'Response Body: Bad Request', body)
self.assertIn(b'The server could not comply with the request since it '
b'is either malformed or otherwise incorrect.', body)
self.assertFalse(any(line.startswith(b'Etag: ') for line in body))
self.assertFalse(any(line.startswith(b'Last Modified: ')
for line in body))
self.assertEqual(body[-4], b'Errors:')
self.assertEqual(sorted(body[-3:-1]), [
b'/cont/missing-object, 404 Not Found',
b'/cont/object, Size Mismatch',
])
self.assertEqual(body[-1], b'')
@patch('swift.common.middleware.slo.time')
def test_handle_multipart_long_running_put_failure_json(self, mock_time):
mock_time.time.side_effect = [
0, # start time
11, # first segment's slow
22, # second segment's also slow
]
test_json_data = json.dumps([{'path': u'/cont/object\u2661',
'etag': 'etagoftheobjectsegment',
'size_bytes': 99},
{'path': '/cont/object',
'etag': 'some other etag',
'size_bytes': 100}]).encode('ascii')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put&heartbeat=on',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Accept': 'application/json'},
body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual('202 Accepted', status)
headers_found = [h.lower() for h, v in headers]
self.assertNotIn('etag', headers_found)
self.assertTrue(body.startswith(b' \r\n\r\n'),
'Expected body to start with three spaces and two '
'blank lines; got %r' % body)
body = json.loads(body)
self.assertEqual(body['Response Status'], '400 Bad Request')
self.assertEqual(body['Response Body'], 'Bad Request\nThe server '
'could not comply with the request since it is '
'either malformed or otherwise incorrect.')
self.assertNotIn('Etag', body)
self.assertNotIn('Last Modified', body)
self.assertEqual(sorted(body['Errors']), [
['/cont/object', 'Etag Mismatch'],
[quote(u'/cont/object\u2661'.encode('utf8')).decode('ascii'),
'Size Mismatch'],
])
@patch('swift.common.middleware.slo.time')
def test_handle_multipart_long_running_put_bad_etag_json(self, mock_time):
mock_time.time.side_effect = [
0, # start time
11, # first segment's slow
22, # second segment's also slow
]
test_json_data = json.dumps([{'path': u'/cont/object\u2661',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100},
{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put&heartbeat=on',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Accept': 'application/json', 'ETag': 'bad etag'},
body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual('202 Accepted', status)
headers_found = [h.lower() for h, v in headers]
self.assertNotIn('etag', headers_found)
self.assertTrue(body.startswith(b' \r\n\r\n'),
'Expected body to start with three spaces and two '
'blank lines; got %r' % body)
body = json.loads(body)
self.assertEqual(body['Response Status'], '422 Unprocessable Entity')
self.assertEqual('Unprocessable Entity\nUnable to process the '
'contained instructions', body['Response Body'])
self.assertNotIn('Etag', body)
self.assertNotIn('Last Modified', body)
self.assertEqual(body['Errors'], [])
def test_manifest_put_no_etag_success(self):
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
method='PUT', body=test_json_data)
resp = req.get_response(self.slo)
self.assertEqual(resp.status_int, 201)
def test_manifest_put_with_etag_success(self):
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
method='PUT', body=test_json_data)
req.headers['Etag'] = md5hex('etagoftheobjectsegment')
resp = req.get_response(self.slo)
self.assertEqual(resp.status_int, 201)
def test_manifest_put_with_etag_with_quotes_success(self):
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
method='PUT', body=test_json_data)
req.headers['Etag'] = '"%s"' % md5hex('etagoftheobjectsegment')
resp = req.get_response(self.slo)
self.assertEqual(resp.status_int, 201)
def test_manifest_put_bad_etag_fail(self):
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
method='PUT', body=test_json_data)
req.headers['Etag'] = md5hex('NOTetagoftheobjectsegment')
resp = req.get_response(self.slo)
self.assertEqual(resp.status_int, 422)
def test_handle_multipart_put_disallow_empty_first_segment(self):
test_json_data = json.dumps([{'path': '/cont/small_object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 0},
{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
req = Request.blank('/v1/a/c/o?multipart-manifest=put',
method='PUT', body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual(status, '400 Bad Request')
def test_handle_multipart_put_allow_empty_last_segment(self):
test_json_data = json.dumps([{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100},
{'path': '/cont/empty_object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 0}]).encode('ascii')
req = Request.blank('/v1/AUTH_test/c/man?multipart-manifest=put',
method='PUT', body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual(status, '201 Created')
def test_handle_multipart_put_invalid_data(self):
def do_test(bad_data):
test_json_data = json.dumps([{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100},
{'data': bad_data}]).encode('ascii')
req = Request.blank('/v1/a/c/o', body=test_json_data)
with self.assertRaises(HTTPException) as catcher:
self.slo.handle_multipart_put(req, fake_start_response)
self.assertEqual(catcher.exception.status_int, 400)
do_test('invalid') # insufficient padding
do_test(12345)
do_test(0)
do_test(True)
do_test(False)
do_test(None)
do_test({})
do_test([])
# Empties are no good, either
do_test('')
do_test('====')
def test_handle_multipart_put_success_unicode(self):
test_json_data = json.dumps([{'path': u'/cont/object\u2661',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, headers={'Accept': 'test'},
body=test_json_data)
self.assertNotIn('X-Static-Large-Object', req.headers)
self.call_slo(req)
self.assertIn('X-Static-Large-Object', req.headers)
self.assertEqual(req.environ['PATH_INFO'], '/v1/AUTH_test/c/man')
self.assertIn(('HEAD', '/v1/AUTH_test/cont/object\xe2\x99\xa1'),
self.app.calls)
def test_handle_multipart_put_no_xml(self):
req = Request.blank(
'/test_good/AUTH_test/c/man?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, headers={'Accept': 'test'},
body=test_xml_data)
no_xml = list(self.slo(req.environ, fake_start_response))
self.assertEqual(no_xml, [b'Manifest must be valid JSON.\n'])
def test_handle_multipart_put_bad_data(self):
bad_data = json.dumps([{'path': '/cont/object',
'etag': 'etagoftheobj',
'size_bytes': 'lala'}])
req = Request.blank(
'/test_good/AUTH_test/c/man?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=bad_data)
status, headers, body = self.call_slo(req)
self.assertEqual(status, '400 Bad Request')
self.assertIn(b'invalid size_bytes', body)
for bad_data in [
json.dumps([{'path': '/cont', 'etag': 'etagoftheobj',
'size_bytes': 100}]),
json.dumps('asdf'), json.dumps(None), json.dumps(5),
'not json', '1234', '', json.dumps({'path': None}),
json.dumps([{'path': '/cont/object', 'etag': None,
'size_bytes': 12}]),
json.dumps([{'path': '/cont/object', 'etag': 'asdf',
'size_bytes': 'sd'}]),
json.dumps([{'path': 12, 'etag': 'etagoftheobj',
'size_bytes': 100}]),
json.dumps([{'path': u'/cont/object\u2661',
'etag': 'etagoftheobj', 'size_bytes': 100}]),
json.dumps([{'path': 12, 'size_bytes': 100}]),
json.dumps([{'path': 12, 'size_bytes': 100}]),
json.dumps([{'path': '/c/o', 'etag': 123, 'size_bytes': 100}]),
json.dumps([{'path': None, 'etag': 'etagoftheobj',
'size_bytes': 100}])]:
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=bad_data)
status, headers, body = self.call_slo(req)
self.assertEqual(status, '400 Bad Request')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=None)
status, headers, body = self.call_slo(req)
self.assertEqual(status, '411 Length Required')
def test_handle_multipart_put_check_data(self):
good_data = json.dumps(
[{'path': '/checktest/a_1', 'etag': 'a', 'size_bytes': '1'},
{'path': '/checktest/b_2', 'etag': 'b', 'size_bytes': '2'}])
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=good_data)
status, headers, body = self.call_slo(req)
self.assertEqual(self.app.call_count, 3)
# go behind SLO's back and see what actually got stored
req = Request.blank(
# this string looks weird, but it's just an artifact
# of FakeSwift
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_app(req)
headers = dict(headers)
manifest_data = json.loads(body)
self.assertTrue(headers['Content-Type'].endswith(';swift_bytes=3'))
self.assertEqual(len(manifest_data), 2)
self.assertEqual(manifest_data[0]['hash'], 'a')
self.assertEqual(manifest_data[0]['bytes'], 1)
self.assertTrue(
not manifest_data[0]['last_modified'].startswith('2012'))
self.assertTrue(manifest_data[1]['last_modified'].startswith('2012'))
def test_handle_multipart_put_check_data_bad(self):
bad_data = json.dumps(
[{'path': '/checktest/a_1', 'etag': 'a', 'size_bytes': '2'},
{'path': '/checktest/badreq', 'etag': 'a', 'size_bytes': '1'},
{'path': '/checktest/b_2', 'etag': 'not-b', 'size_bytes': '2'},
{'path': '/checktest/slob', 'etag': 'not-slob',
'size_bytes': '12345'}])
req = Request.blank(
'/v1/AUTH_test/checktest/man?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Accept': 'application/json'},
body=bad_data)
status, headers, body = self.call_slo(req)
self.assertEqual(self.app.call_count, 5)
errors = json.loads(body)['Errors']
self.assertEqual([
[u'/checktest/a_1', u'Size Mismatch'],
[u'/checktest/b_2', u'Etag Mismatch'],
[u'/checktest/badreq', u'400 Bad Request'],
[u'/checktest/slob', u'Etag Mismatch'],
[u'/checktest/slob', u'Size Mismatch'],
], sorted(errors))
def test_handle_multipart_put_skip_size_check(self):
good_data = json.dumps([
# Explicit None will skip it
{'path': '/checktest/a_1', 'etag': 'a', 'size_bytes': None},
# ...as will omitting it entirely
{'path': '/checktest/b_2', 'etag': 'b'}])
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=good_data)
status, headers, body = self.call_slo(req)
self.assertEqual(self.app.call_count, 3)
# Check that we still populated the manifest properly from our HEADs
req = Request.blank(
# this string looks weird, but it's just an artifact
# of FakeSwift
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_app(req)
manifest_data = json.loads(body)
self.assertEqual(1, manifest_data[0]['bytes'])
self.assertEqual(2, manifest_data[1]['bytes'])
def test_handle_multipart_put_skip_size_check_still_uses_min_size(self):
test_json_data = json.dumps([{'path': '/cont/empty_object',
'etag': 'etagoftheobjectsegment',
'size_bytes': None},
{'path': '/cont/small_object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
req = Request.blank('/v1/AUTH_test/c/o?multipart-manifest=put',
method='PUT', body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual(status, '400 Bad Request')
self.assertIn(b'Too small; each segment must be at least 1 byte', body)
def test_handle_multipart_put_skip_size_check_no_early_bailout(self):
# The first is too small (it's 0 bytes), and
# the second has a bad etag. Make sure both errors show up in
# the response.
test_json_data = json.dumps([{'path': '/cont/empty_object',
'etag': 'etagoftheobjectsegment',
'size_bytes': None},
{'path': '/cont/object2',
'etag': 'wrong wrong wrong',
'size_bytes': 100}]).encode('ascii')
req = Request.blank('/v1/AUTH_test/c/o?multipart-manifest=put',
method='PUT', body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual(status, '400 Bad Request')
self.assertIn(b'at least 1 byte', body)
self.assertIn(b'Etag Mismatch', body)
def test_handle_multipart_put_skip_etag_check(self):
good_data = json.dumps([
# Explicit None will skip it
{'path': '/checktest/a_1', 'etag': None, 'size_bytes': 1},
# ...as will omitting it entirely
{'path': '/checktest/b_2', 'size_bytes': 2}])
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=good_data)
status, headers, body = self.call_slo(req)
self.assertEqual(self.app.call_count, 3)
# Check that we still populated the manifest properly from our HEADs
req = Request.blank(
# this string looks weird, but it's just an artifact
# of FakeSwift
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_app(req)
manifest_data = json.loads(body)
self.assertEqual('a', manifest_data[0]['hash'])
self.assertEqual('b', manifest_data[1]['hash'])
def test_handle_multipart_put_with_manipulator_callback(self):
def data_inserter(manifest):
for i in range(len(manifest), -1, -1):
manifest.insert(i, {'data': 'WA=='})
good_data = json.dumps([
{'path': '/checktest/a_1'},
{'path': '/checktest/b_2'}])
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT',
'swift.callback.slo_manifest_hook': data_inserter},
body=good_data)
status, headers, body = self.call_slo(req)
self.assertEqual(self.app.call_count, 3)
# Check that we still populated the manifest properly from our HEADs
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_app(req)
manifest_data = json.loads(body)
self.assertEqual([
{k: v for k, v in item.items()
if k in ('name', 'bytes', 'hash', 'data')}
for item in manifest_data
], [
{'data': 'WA=='},
{'name': '/checktest/a_1', 'bytes': 1, 'hash': 'a'},
{'data': 'WA=='},
{'name': '/checktest/b_2', 'bytes': 2, 'hash': 'b'},
{'data': 'WA=='},
])
def test_handle_multipart_put_with_validator_callback(self):
def complainer(manifest):
return [(item['name'], "Don't wanna") for item in manifest]
good_data = json.dumps([
{'path': '/checktest/a_1'},
{'path': '/checktest/b_2'}])
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT',
'swift.callback.slo_manifest_hook': complainer},
body=good_data)
status, headers, body = self.call_slo(req)
self.assertEqual(self.app.call_count, 2)
self.assertEqual(status, '400 Bad Request')
body = body.split(b'\n')
self.assertIn(b"/checktest/a_1, Don't wanna", body)
self.assertIn(b"/checktest/b_2, Don't wanna", body)
def test_handle_unsatisfiable_ranges(self):
bad_data = json.dumps(
[{'path': '/checktest/a_1', 'etag': None,
'size_bytes': None, 'range': '1-'}])
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=bad_data)
status, headers, body = self.call_slo(req)
self.assertEqual('400 Bad Request', status)
self.assertIn(b"Unsatisfiable Range", body)
def test_handle_multipart_put_success_conditional(self):
test_json_data = json.dumps([{'path': u'/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, headers={'If-None-Match': '*'},
body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual(('201 Created', b''), (status, body))
self.assertEqual([
('HEAD', '/v1/AUTH_test/cont/object'),
('PUT', '/v1/AUTH_test/c/man?multipart-manifest=put'),
], self.app.calls)
# HEAD shouldn't be conditional
self.assertNotIn('If-None-Match', self.app.headers[0])
# But the PUT should be
self.assertIn('If-None-Match', self.app.headers[1])
self.assertEqual('*', self.app.headers[1]['If-None-Match'])
def test_handle_single_ranges(self):
good_data = json.dumps(
[{'path': '/checktest/a_1', 'etag': None,
'size_bytes': None, 'range': '0-0'},
{'path': '/checktest/b_2', 'etag': None,
'size_bytes': 2, 'range': '-1'},
{'path': '/checktest/b_2', 'etag': None,
'size_bytes': 2, 'range': '0-0'},
{'path': '/checktest/a_1', 'etag': None,
'size_bytes': None},
{'path': '/cont/object', 'etag': None,
'size_bytes': None, 'range': '10-40'}])
override_header = 'X-Object-Sysmeta-Container-Update-Override-Etag'
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=good_data,
headers={override_header: 'my custom etag'})
status, headers, body = self.call_slo(req)
self.assertEqual(('201 Created', b''), (status, body))
expected_etag = '"%s"' % md5hex(
'ab:1-1;b:0-0;aetagoftheobjectsegment:10-40;')
self.assertEqual(expected_etag, dict(headers)['Etag'])
self.assertEqual([
('HEAD', '/v1/AUTH_test/checktest/a_1'), # Only once!
('HEAD', '/v1/AUTH_test/checktest/b_2'), # Only once!
('HEAD', '/v1/AUTH_test/cont/object'),
], sorted(self.app.calls[:-1]))
self.assertEqual(
('PUT', '/v1/AUTH_test/checktest/man_3?multipart-manifest=put'),
self.app.calls[-1])
self.assertEqual(
'my custom etag; slo_etag=%s' % expected_etag.strip('"'),
self.app.headers[-1].get(override_header))
# Check that we still populated the manifest properly from our HEADs
req = Request.blank(
# this string looks weird, but it's just an artifact
# of FakeSwift
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_app(req)
manifest_data = json.loads(body)
self.assertEqual(len(manifest_data), 5)
self.assertEqual('a', manifest_data[0]['hash'])
self.assertNotIn('range', manifest_data[0])
self.assertEqual('b', manifest_data[1]['hash'])
self.assertEqual('1-1', manifest_data[1]['range'])
self.assertEqual('b', manifest_data[2]['hash'])
self.assertEqual('0-0', manifest_data[2]['range'])
self.assertEqual('a', manifest_data[3]['hash'])
self.assertNotIn('range', manifest_data[3])
self.assertEqual('etagoftheobjectsegment', manifest_data[4]['hash'])
self.assertEqual('10-40', manifest_data[4]['range'])
class TestSloDeleteManifest(SloTestCase):
def setUp(self):
super(TestSloDeleteManifest, self).setUp()
_submanifest_data = json.dumps(
[{'name': '/deltest/b_2', 'hash': 'a', 'bytes': '1'},
{'name': '/deltest/c_3', 'hash': 'b', 'bytes': '2'}])
_submanifest_data = _submanifest_data.encode('ascii')
self.app.register(
'GET', '/v1/AUTH_test/deltest/man_404',
swob.HTTPNotFound, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/man',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/deltest/gone', 'hash': 'a', 'bytes': '1'},
{'name': '/deltest/b_2', 'hash': 'b', 'bytes': '2'}]).
encode('ascii'))
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/man',
swob.HTTPNoContent, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/man-all-there',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/deltest/b_2', 'hash': 'a', 'bytes': '1'},
{'name': '/deltest/c_3', 'hash': 'b', 'bytes': '2'}]).
encode('ascii'))
self.app.register(
'GET', '/v1/AUTH_test-un\xc3\xafcode',
swob.HTTPOk, {}, None)
self.app.register(
'GET', '/v1/AUTH_test-un\xc3\xafcode/deltest', swob.HTTPOk, {
'X-Container-Read': 'diff read',
'X-Container-Write': 'diff write',
}, None)
self.app.register(
'GET', '/v1/AUTH_test-un\xc3\xafcode/\xe2\x98\x83', swob.HTTPOk, {
'X-Container-Read': 'same read',
'X-Container-Write': 'same write',
}, None)
self.app.register(
'GET', '/v1/AUTH_test-un\xc3\xafcode/deltest/man-all-there',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([
{'name': u'/\N{SNOWMAN}/b_2', 'hash': 'a', 'bytes': '1'},
{'name': u'/\N{SNOWMAN}/c_3', 'hash': 'b', 'bytes': '2'},
]).encode('ascii'))
self.app.register(
'GET', '/v1/AUTH_test-un\xc3\xafcode/\xe2\x98\x83/same-container',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([
{'name': u'/\N{SNOWMAN}/b_2', 'hash': 'a', 'bytes': '1'},
{'name': u'/\N{SNOWMAN}/c_3', 'hash': 'b', 'bytes': '2'},
]).encode('ascii'))
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/man-all-there',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/gone',
swob.HTTPNotFound, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/a_1',
swob.HTTPOk, {'Content-Length': '1'}, 'a')
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/a_1',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/b_2',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/c_3',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/d_3',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE', '/v1/AUTH_test-un\xc3\xafcode/deltest/man-all-there',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE',
'/v1/AUTH_test-un\xc3\xafcode/\xe2\x98\x83/same-container',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE', '/v1/AUTH_test-un\xc3\xafcode/\xe2\x98\x83/b_2',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE', '/v1/AUTH_test-un\xc3\xafcode/\xe2\x98\x83/c_3',
swob.HTTPNoContent, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/manifest-with-submanifest',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/deltest/a_1',
'hash': 'a', 'bytes': '1'},
{'name': '/deltest/submanifest', 'sub_slo': True,
'hash': 'submanifest-etag',
'bytes': len(_submanifest_data)},
{'name': '/deltest/d_3',
'hash': 'd', 'bytes': '3'}]).encode('ascii'))
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/manifest-with-submanifest',
swob.HTTPNoContent, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/submanifest',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
_submanifest_data)
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/submanifest',
swob.HTTPNoContent, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/manifest-missing-submanifest',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/deltest/a_1', 'hash': 'a', 'bytes': '1'},
{'name': '/deltest/missing-submanifest',
'hash': 'a', 'bytes': '2', 'sub_slo': True},
{'name': '/deltest/d_3', 'hash': 'd', 'bytes': '3'}]).
encode('ascii'))
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/manifest-missing-submanifest',
swob.HTTPNoContent, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/missing-submanifest',
swob.HTTPNotFound, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/manifest-badjson',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
b"[not {json (at ++++all")
self.app.register(
'GET', '/v1/AUTH_test/deltest/manifest-with-unauth-segment',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/deltest/a_1', 'hash': 'a', 'bytes': '1'},
{'name': '/deltest-unauth/q_17',
'hash': '11', 'bytes': '17'}]).encode('ascii'))
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/manifest-with-unauth-segment',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE', '/v1/AUTH_test/deltest-unauth/q_17',
swob.HTTPUnauthorized, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/manifest-with-too-many-segs',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/deltest/a_1',
'hash': 'a', 'bytes': '1'},
{'name': '/deltest/multi-submanifest', 'sub_slo': True,
'hash': 'submanifest-etag',
'bytes': len(_submanifest_data)},
{'name': '/deltest/b_2',
'hash': 'b', 'bytes': '1'},
{'name': '/deltest/c_3',
'hash': 'c', 'bytes': '1'},
{'name': '/deltest/d_4',
'hash': 'b', 'bytes': '1'},
{'name': '/deltest/e_5',
'hash': 'c', 'bytes': '1'},
{'name': '/deltest/f_6',
'hash': 'b', 'bytes': '1'},
{'name': '/deltest/g_8',
'hash': 'c', 'bytes': '1'},
{'name': '/deltest/g_8',
'hash': 'c', 'bytes': '1'},
{'name': '/deltest/h_9',
'hash': 'd', 'bytes': '3'}]))
def test_handle_multipart_delete_man(self):
req = Request.blank(
'/v1/AUTH_test/deltest/man',
environ={'REQUEST_METHOD': 'DELETE'})
self.slo(req.environ, fake_start_response)
self.assertEqual(self.app.call_count, 1)
def test_handle_multipart_delete_bad_utf8(self):
req = Request.blank(
b'/v1/AUTH_test/deltest/man\xff\xfe?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
resp_data = json.loads(body)
self.assertEqual(resp_data['Response Status'],
'412 Precondition Failed')
def test_handle_multipart_delete_whole_404(self):
req = Request.blank(
'/v1/AUTH_test/deltest/man_404?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
resp_data = json.loads(body)
self.assertEqual(
self.app.calls,
[('GET',
'/v1/AUTH_test/deltest/man_404?multipart-manifest=get')])
self.assertEqual(resp_data['Response Status'], '200 OK')
self.assertEqual(resp_data['Response Body'], '')
self.assertEqual(resp_data['Number Deleted'], 0)
self.assertEqual(resp_data['Number Not Found'], 1)
self.assertEqual(resp_data['Errors'], [])
def test_handle_multipart_delete_segment_404(self):
req = Request.blank(
'/v1/AUTH_test/deltest/man?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
resp_data = json.loads(body)
self.assertEqual(
set(self.app.calls),
set([('GET',
'/v1/AUTH_test/deltest/man?multipart-manifest=get'),
('DELETE', '/v1/AUTH_test/deltest/gone'),
('DELETE', '/v1/AUTH_test/deltest/b_2'),
('DELETE', '/v1/AUTH_test/deltest/man')]))
self.assertEqual(resp_data['Response Status'], '200 OK')
self.assertEqual(resp_data['Number Deleted'], 2)
self.assertEqual(resp_data['Number Not Found'], 1)
def test_handle_multipart_delete_whole(self):
req = Request.blank(
'/v1/AUTH_test/deltest/man-all-there?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE'})
self.call_slo(req)
self.assertEqual(set(self.app.calls), set([
('GET',
'/v1/AUTH_test/deltest/man-all-there?multipart-manifest=get'),
('DELETE', '/v1/AUTH_test/deltest/b_2'),
('DELETE', '/v1/AUTH_test/deltest/c_3'),
('DELETE', ('/v1/AUTH_test/deltest/man-all-there'))]))
def test_handle_multipart_delete_non_ascii(self):
unicode_acct = u'AUTH_test-un\u00efcode'
wsgi_acct = bytes_to_wsgi(unicode_acct.encode('utf-8'))
req = Request.blank(
'/v1/%s/deltest/man-all-there?'
'multipart-manifest=delete' % wsgi_acct,
environ={'REQUEST_METHOD': 'DELETE'})
status, _, body = self.call_slo(req)
self.assertEqual('200 OK', status)
lines = body.split(b'\n')
for l in lines:
parts = l.split(b':')
if len(parts) == 1:
continue
key, value = parts
if key == 'Response Status':
delete_status = int(value.split()[0])
self.assertEqual(200, delete_status)
self.assertEqual(set(self.app.calls), set([
('GET',
'/v1/%s/deltest/man-all-there'
'?multipart-manifest=get' % wsgi_acct),
('DELETE', '/v1/%s/\xe2\x98\x83/b_2' % wsgi_acct),
('DELETE', '/v1/%s/\xe2\x98\x83/c_3' % wsgi_acct),
('DELETE', ('/v1/%s/deltest/man-all-there' % wsgi_acct))]))
def test_handle_multipart_delete_nested(self):
req = Request.blank(
'/v1/AUTH_test/deltest/manifest-with-submanifest?' +
'multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE'})
self.call_slo(req)
self.assertEqual(
set(self.app.calls),
{('GET', '/v1/AUTH_test/deltest/' +
'manifest-with-submanifest?multipart-manifest=get'),
('GET', '/v1/AUTH_test/deltest/' +
'submanifest?multipart-manifest=get'),
('DELETE', '/v1/AUTH_test/deltest/a_1'),
('DELETE', '/v1/AUTH_test/deltest/b_2'),
('DELETE', '/v1/AUTH_test/deltest/c_3'),
('DELETE', '/v1/AUTH_test/deltest/submanifest'),
('DELETE', '/v1/AUTH_test/deltest/d_3'),
('DELETE', '/v1/AUTH_test/deltest/manifest-with-submanifest')})
def test_handle_multipart_delete_nested_too_many_segments(self):
req = Request.blank(
'/v1/AUTH_test/deltest/manifest-with-too-many-segs?' +
'multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
with patch.object(self.slo, 'max_manifest_segments', 1):
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
resp_data = json.loads(body)
self.assertEqual(resp_data['Response Status'], '400 Bad Request')
self.assertEqual(resp_data['Response Body'],
'Too many buffered slo segments to delete.')
def test_handle_multipart_delete_nested_404(self):
req = Request.blank(
'/v1/AUTH_test/deltest/manifest-missing-submanifest' +
'?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
resp_data = json.loads(body)
self.assertEqual(set(self.app.calls), {
('GET', '/v1/AUTH_test/deltest/' +
'manifest-missing-submanifest?multipart-manifest=get'),
('DELETE', '/v1/AUTH_test/deltest/a_1'),
('GET', '/v1/AUTH_test/deltest/' +
'missing-submanifest?multipart-manifest=get'),
('DELETE', '/v1/AUTH_test/deltest/d_3'),
('DELETE', '/v1/AUTH_test/deltest/manifest-missing-submanifest'),
})
self.assertEqual(resp_data['Response Status'], '200 OK')
self.assertEqual(resp_data['Response Body'], '')
self.assertEqual(resp_data['Number Deleted'], 3)
self.assertEqual(resp_data['Number Not Found'], 1)
self.assertEqual(resp_data['Errors'], [])
def test_handle_multipart_delete_nested_401(self):
self.app.register(
'GET', '/v1/AUTH_test/deltest/submanifest',
swob.HTTPUnauthorized, {}, None)
req = Request.blank(
('/v1/AUTH_test/deltest/manifest-with-submanifest' +
'?multipart-manifest=delete'),
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
resp_data = json.loads(body)
self.assertEqual(resp_data['Response Status'], '400 Bad Request')
self.assertEqual(resp_data['Errors'],
[['/deltest/submanifest', '401 Unauthorized']])
def test_handle_multipart_delete_nested_500(self):
self.app.register(
'GET', '/v1/AUTH_test/deltest/submanifest',
swob.HTTPServerError, {}, None)
req = Request.blank(
('/v1/AUTH_test/deltest/manifest-with-submanifest' +
'?multipart-manifest=delete'),
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
resp_data = json.loads(body)
self.assertEqual(resp_data['Response Status'], '400 Bad Request')
self.assertEqual(resp_data['Errors'],
[['/deltest/submanifest',
'Unable to load SLO manifest or segment.']])
def test_handle_multipart_delete_not_a_manifest(self):
req = Request.blank(
'/v1/AUTH_test/deltest/a_1?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
resp_data = json.loads(body)
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/deltest/a_1?multipart-manifest=get')])
self.assertEqual(resp_data['Response Status'], '400 Bad Request')
self.assertEqual(resp_data['Response Body'], '')
self.assertEqual(resp_data['Number Deleted'], 0)
self.assertEqual(resp_data['Number Not Found'], 0)
self.assertEqual(resp_data['Errors'],
[['/deltest/a_1', 'Not an SLO manifest']])
def test_handle_multipart_delete_bad_json(self):
req = Request.blank(
'/v1/AUTH_test/deltest/manifest-badjson?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
resp_data = json.loads(body)
self.assertEqual(self.app.calls,
[('GET', '/v1/AUTH_test/deltest/' +
'manifest-badjson?multipart-manifest=get')])
self.assertEqual(resp_data['Response Status'], '400 Bad Request')
self.assertEqual(resp_data['Response Body'], '')
self.assertEqual(resp_data['Number Deleted'], 0)
self.assertEqual(resp_data['Number Not Found'], 0)
self.assertEqual(resp_data['Errors'],
[['/deltest/manifest-badjson',
'Unable to load SLO manifest']])
def test_handle_multipart_delete_401(self):
req = Request.blank(
'/v1/AUTH_test/deltest/manifest-with-unauth-segment' +
'?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
resp_data = json.loads(body)
self.assertEqual(
set(self.app.calls),
set([('GET', '/v1/AUTH_test/deltest/' +
'manifest-with-unauth-segment?multipart-manifest=get'),
('DELETE', '/v1/AUTH_test/deltest/a_1'),
('DELETE', '/v1/AUTH_test/deltest-unauth/q_17'),
('DELETE', '/v1/AUTH_test/deltest/' +
'manifest-with-unauth-segment')]))
self.assertEqual(resp_data['Response Status'], '400 Bad Request')
self.assertEqual(resp_data['Response Body'], '')
self.assertEqual(resp_data['Number Deleted'], 2)
self.assertEqual(resp_data['Number Not Found'], 0)
self.assertEqual(resp_data['Errors'],
[['/deltest-unauth/q_17', '401 Unauthorized']])
def test_handle_multipart_delete_client_content_type(self):
req = Request.blank(
'/v1/AUTH_test/deltest/man-all-there?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE', 'CONTENT_TYPE': 'foo/bar'},
headers={'Accept': 'application/json'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
resp_data = json.loads(body)
self.assertEqual(resp_data["Number Deleted"], 3)
self.assertEqual(set(self.app.calls), set([
('GET',
'/v1/AUTH_test/deltest/man-all-there?multipart-manifest=get'),
('DELETE', '/v1/AUTH_test/deltest/b_2'),
('DELETE', '/v1/AUTH_test/deltest/c_3'),
('DELETE', '/v1/AUTH_test/deltest/man-all-there')]))
def test_handle_async_delete_whole_404(self):
self.slo.allow_async_delete = True
req = Request.blank(
'/v1/AUTH_test/deltest/man_404?async=t&multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
self.assertEqual('404 Not Found', status)
self.assertEqual(
self.app.calls,
[('GET',
'/v1/AUTH_test/deltest/man_404?multipart-manifest=get')])
def test_handle_async_delete_turned_off(self):
self.slo.allow_async_delete = False
req = Request.blank(
'/v1/AUTH_test/deltest/man-all-there?'
'multipart-manifest=delete&async=on&heartbeat=on',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Accept': 'application/json'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
resp_data = json.loads(body)
self.assertEqual(resp_data["Number Deleted"], 3)
self.assertEqual(set(self.app.calls), set([
('GET',
'/v1/AUTH_test/deltest/man-all-there?multipart-manifest=get'),
('DELETE', '/v1/AUTH_test/deltest/b_2'),
('DELETE', '/v1/AUTH_test/deltest/c_3'),
('DELETE', '/v1/AUTH_test/deltest/man-all-there')]))
def test_handle_async_delete_whole(self):
self.slo.allow_async_delete = True
now = Timestamp(time.time())
exp_obj_cont = get_expirer_container(
int(now), 86400, 'AUTH_test', 'deltest', 'man-all-there')
self.app.register(
'UPDATE', '/v1/.expiring_objects/%s' % exp_obj_cont,
swob.HTTPNoContent, {}, None)
req = Request.blank(
'/v1/AUTH_test/deltest/man-all-there'
'?async=true&multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE'})
with patch('swift.common.utils.Timestamp.now', return_value=now):
status, headers, body = self.call_slo(req)
self.assertEqual('204 No Content', status)
self.assertEqual(b'', body)
self.assertEqual(self.app.calls, [
('GET',
'/v1/AUTH_test/deltest/man-all-there?multipart-manifest=get'),
('UPDATE', '/v1/.expiring_objects/%s'
'?async=true&multipart-manifest=delete' % exp_obj_cont),
('DELETE', '/v1/AUTH_test/deltest/man-all-there'
'?async=true&multipart-manifest=delete'),
])
for header, expected in (
('Content-Type', 'application/json'),
('X-Backend-Storage-Policy-Index', '0'),
('X-Backend-Allow-Private-Methods', 'True'),
):
self.assertIn(header, self.app.calls_with_headers[1].headers)
value = self.app.calls_with_headers[1].headers[header]
msg = 'Expected %s header to be %r, not %r'
self.assertEqual(value, expected, msg % (header, expected, value))
self.assertEqual(json.loads(self.app.req_bodies[1]), [
{'content_type': 'application/async-deleted',
'created_at': now.internal,
'deleted': 0,
'etag': 'd41d8cd98f00b204e9800998ecf8427e',
'name': '%s-AUTH_test/deltest/b_2' % now.internal,
'size': 0,
'storage_policy_index': 0},
{'content_type': 'application/async-deleted',
'created_at': now.internal,
'deleted': 0,
'etag': 'd41d8cd98f00b204e9800998ecf8427e',
'name': '%s-AUTH_test/deltest/c_3' % now.internal,
'size': 0,
'storage_policy_index': 0},
])
def test_handle_async_delete_non_ascii(self):
self.slo.allow_async_delete = True
unicode_acct = u'AUTH_test-un\u00efcode'
wsgi_acct = bytes_to_wsgi(unicode_acct.encode('utf-8'))
now = Timestamp(time.time())
exp_obj_cont = get_expirer_container(
int(now), 86400, unicode_acct, 'deltest', 'man-all-there')
self.app.register(
'UPDATE', '/v1/.expiring_objects/%s' % exp_obj_cont,
swob.HTTPNoContent, {}, None)
authorize_calls = []
def authorize(req):
authorize_calls.append((req.method, req.acl))
req = Request.blank(
'/v1/%s/deltest/man-all-there?'
'async=1&multipart-manifest=delete&heartbeat=1' % wsgi_acct,
environ={'REQUEST_METHOD': 'DELETE', 'swift.authorize': authorize})
with patch('swift.common.utils.Timestamp.now', return_value=now):
status, _, body = self.call_slo(req)
# Every async delete should only need to make 3 requests during the
# client request/response cycle, so no need to support heart-beating
self.assertEqual('204 No Content', status)
self.assertEqual(b'', body)
self.assertEqual(self.app.calls, [
('GET',
'/v1/%s/deltest/man-all-there?'
'multipart-manifest=get' % wsgi_acct),
('HEAD', '/v1/%s' % wsgi_acct),
('HEAD', '/v1/%s/deltest' % wsgi_acct),
('HEAD', '/v1/%s/\xe2\x98\x83' % wsgi_acct),
('UPDATE',
'/v1/.expiring_objects/%s'
'?async=1&heartbeat=1&multipart-manifest=delete' % exp_obj_cont),
('DELETE',
'/v1/%s/deltest/man-all-there'
'?async=1&heartbeat=1&multipart-manifest=delete' % wsgi_acct),
])
self.assertEqual(authorize_calls, [
('GET', None), # Original GET
('DELETE', 'diff write'),
('DELETE', 'same write'),
('DELETE', None), # Final DELETE
])
for header, expected in (
('Content-Type', 'application/json'),
('X-Backend-Storage-Policy-Index', '0'),
('X-Backend-Allow-Private-Methods', 'True'),
):
self.assertIn(header, self.app.calls_with_headers[-2].headers)
value = self.app.calls_with_headers[-2].headers[header]
msg = 'Expected %s header to be %r, not %r'
self.assertEqual(value, expected, msg % (header, expected, value))
self.assertEqual(json.loads(self.app.req_bodies[-2]), [
{'content_type': 'application/async-deleted',
'created_at': now.internal,
'deleted': 0,
'etag': 'd41d8cd98f00b204e9800998ecf8427e',
'name': u'%s-%s/\N{SNOWMAN}/b_2' % (now.internal, unicode_acct),
'size': 0,
'storage_policy_index': 0},
{'content_type': 'application/async-deleted',
'created_at': now.internal,
'deleted': 0,
'etag': 'd41d8cd98f00b204e9800998ecf8427e',
'name': u'%s-%s/\N{SNOWMAN}/c_3' % (now.internal, unicode_acct),
'size': 0,
'storage_policy_index': 0},
])
def test_handle_async_delete_non_ascii_same_container(self):
self.slo.allow_async_delete = True
unicode_acct = u'AUTH_test-un\u00efcode'
wsgi_acct = bytes_to_wsgi(unicode_acct.encode('utf-8'))
now = Timestamp(time.time())
exp_obj_cont = get_expirer_container(
int(now), 86400, unicode_acct, u'\N{SNOWMAN}', 'same-container')
self.app.register(
'UPDATE', '/v1/.expiring_objects/%s' % exp_obj_cont,
swob.HTTPNoContent, {}, None)
authorize_calls = []
def authorize(req):
authorize_calls.append((req.method, req.acl))
req = Request.blank(
'/v1/%s/\xe2\x98\x83/same-container?'
'async=yes&multipart-manifest=delete' % wsgi_acct,
environ={'REQUEST_METHOD': 'DELETE', 'swift.authorize': authorize})
with patch('swift.common.utils.Timestamp.now', return_value=now):
status, _, body = self.call_slo(req)
self.assertEqual('204 No Content', status)
self.assertEqual(b'', body)
self.assertEqual(self.app.calls, [
('GET',
'/v1/%s/\xe2\x98\x83/same-container?'
'multipart-manifest=get' % wsgi_acct),
('HEAD', '/v1/%s' % wsgi_acct),
('HEAD', '/v1/%s/\xe2\x98\x83' % wsgi_acct),
('UPDATE',
'/v1/.expiring_objects/%s'
'?async=yes&multipart-manifest=delete' % exp_obj_cont),
('DELETE',
'/v1/%s/\xe2\x98\x83/same-container'
'?async=yes&multipart-manifest=delete' % wsgi_acct),
])
self.assertEqual(authorize_calls, [
('GET', None), # Original GET
('DELETE', 'same write'), # Only need one auth check
('DELETE', None), # Final DELETE
])
for header, expected in (
('Content-Type', 'application/json'),
('X-Backend-Storage-Policy-Index', '0'),
('X-Backend-Allow-Private-Methods', 'True'),
):
self.assertIn(header, self.app.calls_with_headers[-2].headers)
value = self.app.calls_with_headers[-2].headers[header]
msg = 'Expected %s header to be %r, not %r'
self.assertEqual(value, expected, msg % (header, expected, value))
self.assertEqual(json.loads(self.app.req_bodies[-2]), [
{'content_type': 'application/async-deleted',
'created_at': now.internal,
'deleted': 0,
'etag': 'd41d8cd98f00b204e9800998ecf8427e',
'name': u'%s-%s/\N{SNOWMAN}/b_2' % (now.internal, unicode_acct),
'size': 0,
'storage_policy_index': 0},
{'content_type': 'application/async-deleted',
'created_at': now.internal,
'deleted': 0,
'etag': 'd41d8cd98f00b204e9800998ecf8427e',
'name': u'%s-%s/\N{SNOWMAN}/c_3' % (now.internal, unicode_acct),
'size': 0,
'storage_policy_index': 0},
])
def test_handle_async_delete_nested(self):
self.slo.allow_async_delete = True
req = Request.blank(
'/v1/AUTH_test/deltest/manifest-with-submanifest' +
'?async=on&multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE'})
status, _, body = self.call_slo(req)
self.assertEqual('400 Bad Request', status)
self.assertEqual(b'No segments may be large objects.', body)
self.assertEqual(self.app.calls, [
('GET', '/v1/AUTH_test/deltest/' +
'manifest-with-submanifest?multipart-manifest=get')])
def test_handle_async_delete_too_many_containers(self):
self.slo.allow_async_delete = True
self.app.register(
'GET', '/v1/AUTH_test/deltest/man',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/cont1/a_1', 'hash': 'a', 'bytes': '1'},
{'name': '/cont2/b_2', 'hash': 'b', 'bytes': '2'}]).
encode('ascii'))
req = Request.blank(
'/v1/AUTH_test/deltest/man?async=on&multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE'})
status, _, body = self.call_slo(req)
self.assertEqual('400 Bad Request', status)
expected = b'All segments must be in one container. Found segments in '
self.assertEqual(expected, body[:len(expected)])
self.assertEqual(self.app.calls, [
('GET', '/v1/AUTH_test/deltest/man?multipart-manifest=get')])
class TestSloHeadOldManifest(SloTestCase):
slo_etag = md5hex("seg01-hashseg02-hash")
def setUp(self):
super(TestSloHeadOldManifest, self).setUp()
manifest_json = json.dumps([
{'name': '/gettest/seg01',
'bytes': '100',
'hash': 'seg01-hash',
'content_type': 'text/plain',
'last_modified': '2013-11-19T11:33:45.137446'},
{'name': '/gettest/seg02',
'bytes': '200',
'hash': 'seg02-hash',
'content_type': 'text/plain',
'last_modified': '2013-11-19T11:33:45.137447'}])
self.manifest_json_etag = md5hex(manifest_json)
manifest_headers = {
'Content-Length': str(len(manifest_json)),
'Content-Type': 'test/data',
'X-Static-Large-Object': 'true',
'X-Object-Sysmeta-Artisanal-Etag': 'bespoke',
'Etag': self.manifest_json_etag}
manifest_headers.update(getattr(self, 'extra_manifest_headers', {}))
self.manifest_has_sysmeta = all(h in manifest_headers for h in (
'X-Object-Sysmeta-Slo-Etag', 'X-Object-Sysmeta-Slo-Size'))
self.app.register(
'GET', '/v1/AUTH_test/headtest/man',
swob.HTTPOk, manifest_headers, manifest_json.encode('ascii'))
def test_etag_is_hash_of_segment_etags(self):
req = Request.blank(
'/v1/AUTH_test/headtest/man',
environ={'REQUEST_METHOD': 'HEAD'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('X-Manifest-Etag', self.manifest_json_etag), headers)
self.assertIn(('Content-Length', '300'), headers)
self.assertIn(('Content-Type', 'test/data'), headers)
self.assertEqual(body, b'') # it's a HEAD request, after all
expected_app_calls = [('HEAD', '/v1/AUTH_test/headtest/man')]
if not self.manifest_has_sysmeta:
expected_app_calls.append(('GET', '/v1/AUTH_test/headtest/man'))
self.assertEqual(self.app.calls, expected_app_calls)
def test_if_none_match_etag_matching(self):
req = Request.blank(
'/v1/AUTH_test/headtest/man',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': self.slo_etag})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '304 Not Modified')
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('Content-Length', '0'), headers)
self.assertIn(('Content-Type', 'test/data'), headers)
expected_app_calls = [('HEAD', '/v1/AUTH_test/headtest/man')]
if not self.manifest_has_sysmeta:
expected_app_calls.append(('GET', '/v1/AUTH_test/headtest/man'))
self.assertEqual(self.app.calls, expected_app_calls)
def test_if_match_etag_not_matching(self):
req = Request.blank(
'/v1/AUTH_test/headtest/man',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': 'zzz'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '412 Precondition Failed')
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('Content-Length', '0'), headers)
self.assertIn(('Content-Type', 'test/data'), headers)
expected_app_calls = [('HEAD', '/v1/AUTH_test/headtest/man')]
if not self.manifest_has_sysmeta:
expected_app_calls.append(('GET', '/v1/AUTH_test/headtest/man'))
self.assertEqual(self.app.calls, expected_app_calls)
def test_if_none_match_etag_matching_with_override(self):
req = Request.blank(
'/v1/AUTH_test/headtest/man',
environ={'REQUEST_METHOD': 'HEAD'},
headers={
'If-None-Match': 'bespoke',
'X-Backend-Etag-Is-At': 'X-Object-Sysmeta-Artisanal-Etag'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '304 Not Modified')
# We *are not* responsible for replacing the etag; whoever set
# x-backend-etag-is-at is responsible
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('Content-Length', '0'), headers)
self.assertIn(('Content-Type', 'test/data'), headers)
expected_app_calls = [('HEAD', '/v1/AUTH_test/headtest/man')]
if not self.manifest_has_sysmeta:
expected_app_calls.append(('GET', '/v1/AUTH_test/headtest/man'))
self.assertEqual(self.app.calls, expected_app_calls)
def test_if_match_etag_not_matching_with_override(self):
req = Request.blank(
'/v1/AUTH_test/headtest/man',
environ={'REQUEST_METHOD': 'HEAD'},
headers={
'If-Match': self.slo_etag,
'X-Backend-Etag-Is-At': 'X-Object-Sysmeta-Artisanal-Etag'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '412 Precondition Failed')
# We *are not* responsible for replacing the etag; whoever set
# x-backend-etag-is-at is responsible
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('Content-Length', '0'), headers)
self.assertIn(('Content-Type', 'test/data'), headers)
expected_app_calls = [('HEAD', '/v1/AUTH_test/headtest/man')]
if not self.manifest_has_sysmeta:
expected_app_calls.append(('GET', '/v1/AUTH_test/headtest/man'))
self.assertEqual(self.app.calls, expected_app_calls)
class TestSloHeadManifest(TestSloHeadOldManifest):
def setUp(self):
self.extra_manifest_headers = {
'X-Object-Sysmeta-Slo-Etag': self.slo_etag,
'X-Object-Sysmeta-Slo-Size': '300',
}
super(TestSloHeadManifest, self).setUp()
class TestSloGetRawManifest(SloTestCase):
def setUp(self):
super(TestSloGetRawManifest, self).setUp()
_bc_manifest_json = json.dumps(
[{'name': '/gettest/b_10', 'hash': md5hex('b' * 10), 'bytes': '10',
'content_type': 'text/plain',
'last_modified': '1970-01-01T00:00:00.000000'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15), 'bytes': '15',
'content_type': 'text/plain',
'last_modified': '1970-01-01T00:00:00.000000'},
{'name': '/gettest/d_10',
'hash': md5hex(md5hex("e" * 5) + md5hex("f" * 5)), 'bytes': '10',
'content_type': 'application/json',
'sub_slo': True,
'last_modified': '1970-01-01T00:00:00.000000'}])
self.bc_etag = md5hex(_bc_manifest_json)
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-bc',
# proxy obj controller removes swift_bytes from content-type
swob.HTTPOk, {'Content-Type': 'text/plain',
'X-Static-Large-Object': 'true',
'X-Object-Meta-Plant': 'Ficus',
'Etag': md5hex(_bc_manifest_json)},
_bc_manifest_json)
_bc_manifest_json_ranges = json.dumps(
[{'name': '/gettest/b_10', 'hash': md5hex('b' * 10), 'bytes': '10',
'last_modified': '1970-01-01T00:00:00.000000',
'content_type': 'text/plain', 'range': '1-99'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15), 'bytes': '15',
'last_modified': '1970-01-01T00:00:00.000000',
'content_type': 'text/plain', 'range': '100-200'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-bc-r',
# proxy obj controller removes swift_bytes from content-type
swob.HTTPOk, {'Content-Type': 'text/plain',
'X-Static-Large-Object': 'true',
'X-Object-Meta-Plant': 'Ficus',
'Etag': md5hex(_bc_manifest_json_ranges)},
_bc_manifest_json_ranges)
def test_get_raw_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-bc'
'?multipart-manifest=get&format=raw',
environ={'REQUEST_METHOD': 'GET',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
expected_body = json.dumps([
{'etag': md5hex('b' * 10), 'size_bytes': '10',
'path': '/gettest/b_10'},
{'etag': md5hex('c' * 15), 'size_bytes': '15',
'path': '/gettest/c_15'},
{'etag': md5hex(md5hex("e" * 5) + md5hex("f" * 5)),
'size_bytes': '10',
'path': '/gettest/d_10'}], sort_keys=True)
expected_etag = md5hex(expected_body)
if six.PY3:
expected_body = expected_body.encode('utf-8')
self.assertEqual(body, expected_body)
self.assertEqual(status, '200 OK')
self.assertTrue(('Etag', expected_etag) in headers, headers)
self.assertTrue(('X-Static-Large-Object', 'true') in headers, headers)
# raw format should return the actual manifest object content-type
self.assertIn(('Content-Type', 'text/plain'), headers)
try:
json.loads(body)
except ValueError:
self.fail("Invalid JSON in manifest GET: %r" % body)
def test_get_raw_manifest_passthrough_with_ranges(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-bc-r'
'?multipart-manifest=get&format=raw',
environ={'REQUEST_METHOD': 'GET',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
# raw format should return the actual manifest object content-type
self.assertIn(('Content-Type', 'text/plain'), headers)
try:
resp_data = json.loads(body)
except ValueError:
self.fail("Invalid JSON in manifest GET: %r" % body)
self.assertEqual(
resp_data,
[{'etag': md5hex('b' * 10), 'size_bytes': '10',
'path': '/gettest/b_10', 'range': '1-99'},
{'etag': md5hex('c' * 15), 'size_bytes': '15',
'path': '/gettest/c_15', 'range': '100-200'}],
body)
class TestSloGetManifest(SloTestCase):
def setUp(self):
super(TestSloGetManifest, self).setUp()
# some plain old objects
self.app.register(
'GET', '/v1/AUTH_test/gettest/a_5',
swob.HTTPOk, {'Content-Length': '5',
'Etag': md5hex('a' * 5)},
'a' * 5)
self.app.register(
'GET', '/v1/AUTH_test/gettest/b_10',
swob.HTTPOk, {'Content-Length': '10',
'Etag': md5hex('b' * 10)},
'b' * 10)
self.app.register(
'GET', '/v1/AUTH_test/gettest/c_15',
swob.HTTPOk, {'Content-Length': '15',
'Etag': md5hex('c' * 15)},
'c' * 15)
self.app.register(
'GET', '/v1/AUTH_test/gettest/d_20',
swob.HTTPOk, {'Content-Length': '20',
'Etag': md5hex('d' * 20)},
'd' * 20)
self.app.register(
'GET', '/v1/AUTH_test/gettest/e_25',
swob.HTTPOk, {'Content-Length': '25',
'Etag': md5hex('e' * 25)},
'e' * 25)
self.app.register(
'GET', '/v1/AUTH_test/gettest/f_30',
swob.HTTPOk, {'Content-Length': '30',
'Etag': md5hex('f' * 30)},
'f' * 30)
self.app.register(
'GET', '/v1/AUTH_test/gettest/g_35',
swob.HTTPOk, {'Content-Length': '35',
'Etag': md5hex('g' * 35)},
'g' * 35)
self.app.register(
'GET', '/v1/AUTH_test/gettest/h_40',
swob.HTTPOk, {'Content-Length': '40',
'Etag': md5hex('h' * 40)},
'h' * 40)
self.app.register(
'GET', '/v1/AUTH_test/gettest/i_45',
swob.HTTPOk, {'Content-Length': '45',
'Etag': md5hex('i' * 45)},
'i' * 45)
self.app.register(
'GET', '/v1/AUTH_test/gettest/j_50',
swob.HTTPOk, {'Content-Length': '50',
'Etag': md5hex('j' * 50)},
'j' * 50)
self.app.register(
'GET', '/v1/AUTH_test/gettest/k_55',
swob.HTTPOk, {'Content-Length': '55',
'Etag': md5hex('k' * 55)},
'k' * 55)
self.app.register(
'GET', '/v1/AUTH_test/gettest/l_60',
swob.HTTPOk, {'Content-Length': '60',
'Etag': md5hex('l' * 60)},
'l' * 60)
_bc_manifest_json = json.dumps(
[{'name': '/gettest/b_10', 'hash': md5hex('b' * 10), 'bytes': '10',
'content_type': 'text/plain'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15), 'bytes': '15',
'content_type': 'text/plain'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-bc',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'X-Object-Meta-Plant': 'Ficus',
'Etag': md5hex(_bc_manifest_json)},
_bc_manifest_json)
_abcd_manifest_json = json.dumps(
[{'name': '/gettest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/manifest-bc', 'sub_slo': True,
'content_type': 'application/json',
'hash': md5hex(md5hex("b" * 10) + md5hex("c" * 15)),
'bytes': 25},
{'name': '/gettest/d_20', 'hash': md5hex("d" * 20),
'content_type': 'text/plain', 'bytes': '20'}])
self.abcd_manifest_json_etag = md5hex(_abcd_manifest_json)
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-abcd',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': self.abcd_manifest_json_etag},
_abcd_manifest_json)
# A submanifest segment is created using the response headers from a
# HEAD on the submanifest. That HEAD is passed through SLO which will
# modify the response content-length to be equal to the size of the
# submanifest's large object. The swift_bytes value appended to the
# submanifest's content-type will have been removed. So the sub-slo
# segment dict that is written to the parent manifest should have the
# correct bytes and content-type values. However, if somehow the
# submanifest HEAD response wasn't modified by SLO (maybe
# historically?) and we ended up with the parent manifest sub-slo entry
# having swift_bytes appended to it's content-type and the actual
# submanifest size in its bytes field, then SLO can cope, so we create
# a deviant manifest to verify that SLO can deal with it.
_abcd_manifest_json_alt = json.dumps(
[{'name': '/gettest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/manifest-bc', 'sub_slo': True,
'content_type': 'application/json; swift_bytes=25',
'hash': md5hex(md5hex("b" * 10) + md5hex("c" * 15)),
'bytes': len(_bc_manifest_json)},
{'name': '/gettest/d_20', 'hash': md5hex("d" * 20),
'content_type': 'text/plain', 'bytes': '20'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-abcd-alt',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': md5hex(_abcd_manifest_json_alt)},
_abcd_manifest_json_alt)
_abcdefghijkl_manifest_json = json.dumps(
[{'name': '/gettest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/b_10', 'hash': md5hex("b" * 10),
'content_type': 'text/plain', 'bytes': '10'},
{'name': '/gettest/c_15', 'hash': md5hex("c" * 15),
'content_type': 'text/plain', 'bytes': '15'},
{'name': '/gettest/d_20', 'hash': md5hex("d" * 20),
'content_type': 'text/plain', 'bytes': '20'},
{'name': '/gettest/e_25', 'hash': md5hex("e" * 25),
'content_type': 'text/plain', 'bytes': '25'},
{'name': '/gettest/f_30', 'hash': md5hex("f" * 30),
'content_type': 'text/plain', 'bytes': '30'},
{'name': '/gettest/g_35', 'hash': md5hex("g" * 35),
'content_type': 'text/plain', 'bytes': '35'},
{'name': '/gettest/h_40', 'hash': md5hex("h" * 40),
'content_type': 'text/plain', 'bytes': '40'},
{'name': '/gettest/i_45', 'hash': md5hex("i" * 45),
'content_type': 'text/plain', 'bytes': '45'},
{'name': '/gettest/j_50', 'hash': md5hex("j" * 50),
'content_type': 'text/plain', 'bytes': '50'},
{'name': '/gettest/k_55', 'hash': md5hex("k" * 55),
'content_type': 'text/plain', 'bytes': '55'},
{'name': '/gettest/l_60', 'hash': md5hex("l" * 60),
'content_type': 'text/plain', 'bytes': '60'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-abcdefghijkl',
swob.HTTPOk, {
'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': md5hex(_abcdefghijkl_manifest_json)},
_abcdefghijkl_manifest_json)
_bc_ranges_manifest_json = json.dumps(
[{'name': '/gettest/b_10', 'hash': md5hex('b' * 10),
'content_type': 'text/plain', 'bytes': '10',
'range': '4-7'},
{'name': '/gettest/b_10', 'hash': md5hex('b' * 10),
'content_type': 'text/plain', 'bytes': '10',
'range': '2-5'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15),
'content_type': 'text/plain', 'bytes': '15',
'range': '0-3'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15),
'content_type': 'text/plain', 'bytes': '15',
'range': '11-14'}])
self.bc_ranges_etag = md5hex(_bc_ranges_manifest_json)
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-bc-ranges',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'X-Object-Meta-Plant': 'Ficus',
'Etag': self.bc_ranges_etag},
_bc_ranges_manifest_json)
_abcd_ranges_manifest_json = json.dumps(
[{'name': '/gettest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5',
'range': '0-3'},
{'name': '/gettest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5',
'range': '1-4'},
{'name': '/gettest/manifest-bc-ranges', 'sub_slo': True,
'content_type': 'application/json',
'hash': self.bc_ranges_etag,
'bytes': 16,
'range': '8-15'},
{'name': '/gettest/manifest-bc-ranges', 'sub_slo': True,
'content_type': 'application/json',
'hash': self.bc_ranges_etag,
'bytes': len(_bc_ranges_manifest_json),
'range': '0-7'},
{'name': '/gettest/d_20', 'hash': md5hex("d" * 20),
'content_type': 'text/plain', 'bytes': '20',
'range': '0-3'},
{'name': '/gettest/d_20', 'hash': md5hex("d" * 20),
'content_type': 'text/plain', 'bytes': '20',
'range': '8-11'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-abcd-ranges',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': md5hex(_abcd_ranges_manifest_json)},
_abcd_ranges_manifest_json)
_abcd_subranges_manifest_json = json.dumps(
[{'name': '/gettest/manifest-abcd-ranges', 'sub_slo': True,
'hash': md5hex("a" * 8),
'content_type': 'text/plain', 'bytes': '32',
'range': '6-10'},
{'name': '/gettest/manifest-abcd-ranges', 'sub_slo': True,
'hash': md5hex("a" * 8),
'content_type': 'text/plain', 'bytes': '32',
'range': '31-31'},
{'name': '/gettest/manifest-abcd-ranges', 'sub_slo': True,
'hash': md5hex("a" * 8),
'content_type': 'text/plain', 'bytes': '32',
'range': '14-18'},
{'name': '/gettest/manifest-abcd-ranges', 'sub_slo': True,
'hash': md5hex("a" * 8),
'content_type': 'text/plain', 'bytes': '32',
'range': '0-0'},
{'name': '/gettest/manifest-abcd-ranges', 'sub_slo': True,
'hash': md5hex("a" * 8),
'content_type': 'text/plain', 'bytes': '32',
'range': '22-26'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-abcd-subranges',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': md5hex(_abcd_subranges_manifest_json)},
_abcd_subranges_manifest_json)
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-badjson',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'X-Object-Meta-Fish': 'Bass'},
"[not {json (at ++++all")
def tearDown(self):
self.assertEqual(self.app.unclosed_requests, {})
def test_get_manifest_passthrough(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-bc?multipart-manifest=get',
environ={'REQUEST_METHOD': 'GET',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertIn(
('Content-Type', 'application/json; charset=utf-8'), headers)
try:
resp_data = json.loads(body)
except ValueError:
self.fail("Invalid JSON in manifest GET: %r" % body)
self.assertEqual(
resp_data,
[{'hash': md5hex('b' * 10), 'bytes': '10', 'name': '/gettest/b_10',
'content_type': 'text/plain'},
{'hash': md5hex('c' * 15), 'bytes': '15', 'name': '/gettest/c_15',
'content_type': 'text/plain'}],
body)
self.assertIn(('Etag', md5hex(body)), headers)
def test_get_nonmanifest_passthrough(self):
req = Request.blank(
'/v1/AUTH_test/gettest/a_5',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertEqual(body, b'aaaaa')
def test_get_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-bc',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
manifest_etag = md5hex(md5hex("b" * 10) + md5hex("c" * 15))
self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], '25')
self.assertEqual(headers['Etag'], '"%s"' % manifest_etag)
self.assertEqual(headers['X-Object-Meta-Plant'], 'Ficus')
self.assertEqual(body, b'bbbbbbbbbbccccccccccccccc')
for _, _, hdrs in self.app.calls_with_headers[1:]:
ua = hdrs.get("User-Agent", "")
self.assertTrue("SLO MultipartGET" in ua)
self.assertFalse("SLO MultipartGET SLO MultipartGET" in ua)
# the first request goes through unaltered
first_ua = self.app.calls_with_headers[0][2].get("User-Agent")
self.assertFalse(
"SLO MultipartGET" in first_ua)
def test_get_manifest_repeated_segments(self):
_aabbccdd_manifest_json = json.dumps(
[{'name': '/gettest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/b_10', 'hash': md5hex("b" * 10),
'content_type': 'text/plain', 'bytes': '10'},
{'name': '/gettest/b_10', 'hash': md5hex("b" * 10),
'content_type': 'text/plain', 'bytes': '10'},
{'name': '/gettest/c_15', 'hash': md5hex("c" * 15),
'content_type': 'text/plain', 'bytes': '15'},
{'name': '/gettest/c_15', 'hash': md5hex("c" * 15),
'content_type': 'text/plain', 'bytes': '15'},
{'name': '/gettest/d_20', 'hash': md5hex("d" * 20),
'content_type': 'text/plain', 'bytes': '20'},
{'name': '/gettest/d_20', 'hash': md5hex("d" * 20),
'content_type': 'text/plain', 'bytes': '20'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-aabbccdd',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': md5hex(_aabbccdd_manifest_json)},
_aabbccdd_manifest_json)
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-aabbccdd',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(body, (
b'aaaaaaaaaabbbbbbbbbbbbbbbbbbbbcccccccccccccccccccccccccccccc'
b'dddddddddddddddddddddddddddddddddddddddd'))
self.assertEqual(self.app.calls, [
('GET', '/v1/AUTH_test/gettest/manifest-aabbccdd'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
ranges = [c[2].get('Range') for c in self.app.calls_with_headers]
self.assertEqual(ranges, [
None,
'bytes=0-4,0-4',
'bytes=0-9,0-9',
'bytes=0-14,0-14',
'bytes=0-19,0-19'])
def test_get_manifest_ratelimiting(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcdefghijkl',
environ={'REQUEST_METHOD': 'GET'})
the_time = [time.time()]
sleeps = []
def mock_time():
return the_time[0]
def mock_sleep(duration):
sleeps.append(duration)
the_time[0] += duration
with patch('time.time', mock_time), \
patch('eventlet.sleep', mock_sleep), \
patch.object(self.slo, 'rate_limit_under_size', 999999999), \
patch.object(self.slo, 'rate_limit_after_segment', 0):
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK') # sanity check
self.assertEqual(sleeps, [2.0, 2.0, 2.0, 2.0, 2.0])
# give the client the first 4 segments without ratelimiting; we'll
# sleep less
del sleeps[:]
with patch('time.time', mock_time), \
patch('eventlet.sleep', mock_sleep), \
patch.object(self.slo, 'rate_limit_under_size', 999999999), \
patch.object(self.slo, 'rate_limit_after_segment', 4):
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK') # sanity check
self.assertEqual(sleeps, [2.0, 2.0, 2.0])
# ratelimit segments under 35 bytes; this affects a-f
del sleeps[:]
with patch('time.time', mock_time), \
patch('eventlet.sleep', mock_sleep), \
patch.object(self.slo, 'rate_limit_under_size', 35), \
patch.object(self.slo, 'rate_limit_after_segment', 0):
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK') # sanity check
self.assertEqual(sleeps, [2.0, 2.0])
# ratelimit segments under 36 bytes; this now affects a-g, netting
# us one more sleep than before
del sleeps[:]
with patch('time.time', mock_time), \
patch('eventlet.sleep', mock_sleep), \
patch.object(self.slo, 'rate_limit_under_size', 36), \
patch.object(self.slo, 'rate_limit_after_segment', 0):
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK') # sanity check
self.assertEqual(sleeps, [2.0, 2.0, 2.0])
def test_get_manifest_with_submanifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], '50')
self.assertEqual(headers['Etag'], '"%s"' % self.manifest_abcd_etag)
self.assertEqual(headers['X-Manifest-Etag'],
self.abcd_manifest_json_etag)
self.assertEqual(
body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd')
def test_get_manifest_with_submanifest_bytes_in_content_type(self):
# verify correct content-length when the sub-slo segment in the
# manifest has its actual object content-length appended as swift_bytes
# to the content-type, and the submanifest length in the bytes field.
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd-alt',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], '50')
self.assertEqual(headers['Etag'], '"%s"' % self.manifest_abcd_etag)
self.assertEqual(
body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd')
def test_range_get_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=3-17'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(headers['Content-Length'], '15')
self.assertEqual(headers['Etag'], '"%s"' % self.manifest_abcd_etag)
self.assertEqual(body, b'aabbbbbbbbbbccc')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get')])
ranges = [c[2].get('Range') for c in self.app.calls_with_headers]
self.assertEqual(ranges, [
'bytes=3-17',
None,
None,
'bytes=3-',
None,
'bytes=0-2'])
ignore_range_headers = [
c[2].get('X-Backend-Ignore-Range-If-Metadata-Present')
for c in self.app.calls_with_headers]
self.assertEqual(ignore_range_headers, [
'X-Static-Large-Object',
None,
None,
None,
None,
None])
# we set swift.source for everything but the first request
self.assertIsNone(self.app.swift_sources[0])
self.assertEqual(self.app.swift_sources[1:],
['SLO'] * (len(self.app.swift_sources) - 1))
def test_multiple_ranges_get_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=3-17,20-24,35-999999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
ct, params = parse_content_type(headers['Content-Type'])
params = dict(params)
self.assertEqual(ct, 'multipart/byteranges')
boundary = params.get('boundary')
self.assertTrue(boundary is not None)
if six.PY3:
boundary = boundary.encode('utf-8')
self.assertEqual(len(body), int(headers['Content-Length']))
got_mime_docs = []
for mime_doc_fh in iter_multipart_mime_documents(
BytesIO(body), boundary):
headers = parse_mime_headers(mime_doc_fh)
body = mime_doc_fh.read()
got_mime_docs.append((headers, body))
self.assertEqual(len(got_mime_docs), 3)
first_range_headers = got_mime_docs[0][0]
first_range_body = got_mime_docs[0][1]
self.assertEqual(first_range_headers['Content-Range'],
'bytes 3-17/50')
self.assertEqual(first_range_headers['Content-Type'],
'application/json')
self.assertEqual(first_range_body, b'aabbbbbbbbbbccc')
second_range_headers = got_mime_docs[1][0]
second_range_body = got_mime_docs[1][1]
self.assertEqual(second_range_headers['Content-Range'],
'bytes 20-24/50')
self.assertEqual(second_range_headers['Content-Type'],
'application/json')
self.assertEqual(second_range_body, b'ccccc')
third_range_headers = got_mime_docs[2][0]
third_range_body = got_mime_docs[2][1]
self.assertEqual(third_range_headers['Content-Range'],
'bytes 35-49/50')
self.assertEqual(third_range_headers['Content-Type'],
'application/json')
self.assertEqual(third_range_body, b'ddddddddddddddd')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
ranges = [c[2].get('Range') for c in self.app.calls_with_headers]
self.assertEqual(ranges, [
'bytes=3-17,20-24,35-999999', # initial GET
None, # re-fetch top-level manifest
None, # fetch manifest-bc as sub-slo
'bytes=3-', # a_5
None, # b_10
'bytes=0-2,5-9', # c_15
'bytes=5-']) # d_20
# we set swift.source for everything but the first request
self.assertIsNone(self.app.swift_sources[0])
self.assertEqual(self.app.swift_sources[1:],
['SLO'] * (len(self.app.swift_sources) - 1))
def test_multiple_ranges_including_suffix_get_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=3-17,-21'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
ct, params = parse_content_type(headers['Content-Type'])
params = dict(params)
self.assertEqual(ct, 'multipart/byteranges')
boundary = params.get('boundary')
self.assertTrue(boundary is not None)
if six.PY3:
boundary = boundary.encode('utf-8')
got_mime_docs = []
for mime_doc_fh in iter_multipart_mime_documents(
BytesIO(body), boundary):
headers = parse_mime_headers(mime_doc_fh)
body = mime_doc_fh.read()
got_mime_docs.append((headers, body))
self.assertEqual(len(got_mime_docs), 2)
first_range_headers = got_mime_docs[0][0]
first_range_body = got_mime_docs[0][1]
self.assertEqual(first_range_headers['Content-Range'],
'bytes 3-17/50')
self.assertEqual(first_range_body, b'aabbbbbbbbbbccc')
second_range_headers = got_mime_docs[1][0]
second_range_body = got_mime_docs[1][1]
self.assertEqual(second_range_headers['Content-Range'],
'bytes 29-49/50')
self.assertEqual(second_range_body, b'cdddddddddddddddddddd')
def test_range_get_includes_whole_manifest(self):
# If the first range GET results in retrieval of the entire manifest
# body (which we can detect by looking at Content-Range), then we
# should not go make a second, non-ranged request just to retrieve the
# same bytes again.
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=0-999999999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(
body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
def test_range_get_beyond_manifest(self):
big = 'e' * 1024 * 1024
big_etag = md5hex(big)
self.app.register(
'GET', '/v1/AUTH_test/gettest/big_seg',
swob.HTTPOk, {'Content-Type': 'application/foo',
'Etag': big_etag}, big)
big_manifest = json.dumps(
[{'name': '/gettest/big_seg', 'hash': big_etag,
'bytes': 1024 * 1024, 'content_type': 'application/foo'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/big_manifest',
swob.HTTPOk, {'Content-Type': 'application/octet-stream',
'X-Static-Large-Object': 'true',
'Etag': md5hex(big_manifest)},
big_manifest)
req = Request.blank(
'/v1/AUTH_test/gettest/big_manifest',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100000-199999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
if six.PY3:
count_e = sum(1 if x == 'e' else 0
for x in body.decode('ascii', errors='replace'))
else:
count_e = sum(1 if x == 'e' else 0 for x in body)
self.assertEqual(count_e, 100000)
self.assertEqual(len(body) - count_e, 0)
self.assertEqual(
self.app.calls, [
# has Range header, gets 416
('GET', '/v1/AUTH_test/gettest/big_manifest'),
# retry the first one
('GET', '/v1/AUTH_test/gettest/big_manifest'),
('GET',
'/v1/AUTH_test/gettest/big_seg?multipart-manifest=get')])
def test_range_get_beyond_manifest_refetch_fails(self):
big = 'e' * 1024 * 1024
big_etag = md5hex(big)
big_manifest = json.dumps(
[{'name': '/gettest/big_seg', 'hash': big_etag,
'bytes': 1024 * 1024, 'content_type': 'application/foo'}])
self.app.register_responses(
'GET', '/v1/AUTH_test/gettest/big_manifest',
[(swob.HTTPOk, {'Content-Type': 'application/octet-stream',
'X-Static-Large-Object': 'true',
'X-Backend-Timestamp': '1234',
'Etag': md5hex(big_manifest)},
big_manifest),
(swob.HTTPNotFound, {}, None)])
req = Request.blank(
'/v1/AUTH_test/gettest/big_manifest',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100000-199999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '503 Service Unavailable')
self.assertNotIn('X-Static-Large-Object', headers)
self.assertEqual(self.app.calls, [
# has Range header, gets 416
('GET', '/v1/AUTH_test/gettest/big_manifest'),
# retry the first one
('GET', '/v1/AUTH_test/gettest/big_manifest'),
])
def test_range_get_beyond_manifest_refetch_finds_old(self):
big = 'e' * 1024 * 1024
big_etag = md5hex(big)
big_manifest = json.dumps(
[{'name': '/gettest/big_seg', 'hash': big_etag,
'bytes': 1024 * 1024, 'content_type': 'application/foo'}])
self.app.register_responses(
'GET', '/v1/AUTH_test/gettest/big_manifest',
[(swob.HTTPOk, {'Content-Type': 'application/octet-stream',
'X-Static-Large-Object': 'true',
'X-Backend-Timestamp': '1234',
'Etag': md5hex(big_manifest)},
big_manifest),
(swob.HTTPOk, {'X-Backend-Timestamp': '1233'}, [b'small body'])])
req = Request.blank(
'/v1/AUTH_test/gettest/big_manifest',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100000-199999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '503 Service Unavailable')
self.assertNotIn('X-Static-Large-Object', headers)
self.assertEqual(self.app.calls, [
# has Range header, gets 416
('GET', '/v1/AUTH_test/gettest/big_manifest'),
# retry the first one
('GET', '/v1/AUTH_test/gettest/big_manifest'),
])
def test_range_get_beyond_manifest_refetch_small_non_slo(self):
big = 'e' * 1024 * 1024
big_etag = md5hex(big)
big_manifest = json.dumps(
[{'name': '/gettest/big_seg', 'hash': big_etag,
'bytes': 1024 * 1024, 'content_type': 'application/foo'}])
self.app.register_responses(
'GET', '/v1/AUTH_test/gettest/big_manifest',
[(swob.HTTPOk, {'Content-Type': 'application/octet-stream',
'X-Static-Large-Object': 'true',
'X-Backend-Timestamp': '1234',
'Etag': md5hex(big_manifest)},
big_manifest),
(swob.HTTPOk, {'X-Backend-Timestamp': '1235'}, [b'small body'])])
req = Request.blank(
'/v1/AUTH_test/gettest/big_manifest',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100000-199999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '416 Requested Range Not Satisfiable')
self.assertNotIn('X-Static-Large-Object', headers)
self.assertEqual(self.app.calls, [
# has Range header, gets 416
('GET', '/v1/AUTH_test/gettest/big_manifest'),
# retry the first one
('GET', '/v1/AUTH_test/gettest/big_manifest'),
])
def test_range_get_beyond_manifest_refetch_big_non_slo(self):
big = 'e' * 1024 * 1024
big_etag = md5hex(big)
big_manifest = json.dumps(
[{'name': '/gettest/big_seg', 'hash': big_etag,
'bytes': 1024 * 1024, 'content_type': 'application/foo'}])
self.app.register_responses(
'GET', '/v1/AUTH_test/gettest/big_manifest',
[(swob.HTTPOk, {'Content-Type': 'application/octet-stream',
'X-Static-Large-Object': 'true',
'X-Backend-Timestamp': '1234',
'Etag': md5hex(big_manifest)},
big_manifest),
(swob.HTTPOk, {'X-Backend-Timestamp': '1235'},
[b'x' * 1024 * 1024])])
req = Request.blank(
'/v1/AUTH_test/gettest/big_manifest',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100000-199999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK') # NOT 416 or 206!
self.assertNotIn('X-Static-Large-Object', headers)
self.assertEqual(len(body), 1024 * 1024)
self.assertEqual(body, b'x' * 1024 * 1024)
self.assertEqual(self.app.calls, [
# has Range header, gets 416
('GET', '/v1/AUTH_test/gettest/big_manifest'),
# retry the first one
('GET', '/v1/AUTH_test/gettest/big_manifest'),
])
def test_range_get_beyond_manifest_refetch_tombstone(self):
big = 'e' * 1024 * 1024
big_etag = md5hex(big)
big_manifest = json.dumps(
[{'name': '/gettest/big_seg', 'hash': big_etag,
'bytes': 1024 * 1024, 'content_type': 'application/foo'}])
self.app.register_responses(
'GET', '/v1/AUTH_test/gettest/big_manifest',
[(swob.HTTPOk, {'Content-Type': 'application/octet-stream',
'X-Static-Large-Object': 'true',
'X-Backend-Timestamp': '1234',
'Etag': md5hex(big_manifest)},
big_manifest),
(swob.HTTPNotFound, {'X-Backend-Timestamp': '1345'}, None)])
req = Request.blank(
'/v1/AUTH_test/gettest/big_manifest',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100000-199999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '404 Not Found')
self.assertNotIn('X-Static-Large-Object', headers)
self.assertEqual(self.app.calls, [
# has Range header, gets 416
('GET', '/v1/AUTH_test/gettest/big_manifest'),
# retry the first one
('GET', '/v1/AUTH_test/gettest/big_manifest'),
])
def test_range_get_bogus_content_range(self):
# Just a little paranoia; Swift currently sends back valid
# Content-Range headers, but if somehow someone sneaks an invalid one
# in there, we'll ignore it.
def content_range_breaker_factory(app):
def content_range_breaker(env, start_response):
req = swob.Request(env)
resp = req.get_response(app)
resp.headers['Content-Range'] = 'triscuits'
return resp(env, start_response)
return content_range_breaker
self.slo = slo.filter_factory({})(
content_range_breaker_factory(self.app))
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=0-999999999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(
body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
def test_range_get_manifest_on_segment_boundaries(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=5-29'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(headers['Content-Length'], '25')
self.assertEqual(headers['Etag'], '"%s"' % self.manifest_abcd_etag)
self.assertEqual(body, b'bbbbbbbbbbccccccccccccccc')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get')])
headers = [c[2] for c in self.app.calls_with_headers]
self.assertEqual(headers[0].get('Range'), 'bytes=5-29')
self.assertIsNone(headers[1].get('Range'))
self.assertIsNone(headers[2].get('Range'))
self.assertIsNone(headers[3].get('Range'))
self.assertIsNone(headers[4].get('Range'))
def test_range_get_manifest_first_byte(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=0-0'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(headers['Content-Length'], '1')
self.assertEqual(body, b'a')
# Make sure we don't get any objects we don't need, including
# submanifests.
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get')])
def test_range_get_manifest_sub_slo(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=25-30'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(headers['Content-Length'], '6')
self.assertEqual(body, b'cccccd')
# Make sure we don't get any objects we don't need, including
# submanifests.
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
def test_range_get_manifest_overlapping_end(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=45-55'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(headers['Content-Length'], '5')
self.assertEqual(body, b'ddddd')
def test_range_get_manifest_unsatisfiable(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100-200'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '416 Requested Range Not Satisfiable')
def test_get_segment_with_non_ascii_path(self):
segment_body = u"a møøse once bit my sister".encode("utf-8")
segment_etag = md5(segment_body, usedforsecurity=False).hexdigest()
if six.PY2:
path = u'/v1/AUTH_test/ünicode/öbject-segment'.encode('utf-8')
else:
path = str_to_wsgi(u'/v1/AUTH_test/ünicode/öbject-segment')
self.app.register(
'GET', path,
swob.HTTPOk, {'Content-Length': str(len(segment_body)),
'Etag': segment_etag},
segment_body)
manifest_json = json.dumps([{'name': u'/ünicode/öbject-segment',
'hash': segment_etag,
'content_type': 'text/plain',
'bytes': len(segment_body)}])
if six.PY2:
path = u'/v1/AUTH_test/ünicode/manifest'.encode('utf-8')
else:
path = str_to_wsgi(u'/v1/AUTH_test/ünicode/manifest')
self.app.register(
'GET', path,
swob.HTTPOk, {'Content-Type': 'application/json',
'Content-Length': str(len(manifest_json)),
'X-Static-Large-Object': 'true'},
manifest_json.encode('ascii'))
req = Request.blank(
str_to_wsgi('/v1/AUTH_test/ünicode/manifest'),
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(body, segment_body)
def test_get_range_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd-ranges',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], '32')
self.assertEqual(headers['Content-Type'], 'application/json')
self.assertEqual(body, b'aaaaaaaaccccccccbbbbbbbbdddddddd')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd-ranges'),
('GET', '/v1/AUTH_test/gettest/manifest-bc-ranges'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
ranges = [c[2].get('Range') for c in self.app.calls_with_headers]
self.assertEqual(ranges, [
None,
None,
'bytes=0-3,1-',
'bytes=0-3,11-',
'bytes=4-7,2-5',
'bytes=0-3,8-11'])
# we set swift.source for everything but the first request
self.assertIsNone(self.app.swift_sources[0])
self.assertEqual(
self.app.swift_sources[1:],
['SLO'] * (len(self.app.swift_sources) - 1)
)
self.assertEqual(md5hex(''.join([
md5hex('a' * 5), ':0-3;',
md5hex('a' * 5), ':1-4;',
self.bc_ranges_etag, ':8-15;',
self.bc_ranges_etag, ':0-7;',
md5hex('d' * 20), ':0-3;',
md5hex('d' * 20), ':8-11;',
])), headers['Etag'].strip('"'))
def test_get_subrange_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd-subranges',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], '17')
self.assertEqual(headers['Content-Type'], 'application/json')
self.assertEqual(body, b'aacccdccbbbabbddd')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd-subranges'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd-ranges'),
('GET', '/v1/AUTH_test/gettest/manifest-bc-ranges'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
ranges = [c[2].get('Range') for c in self.app.calls_with_headers]
self.assertEqual(ranges, [
None,
None,
None,
'bytes=3-',
'bytes=0-2',
'bytes=11-11',
'bytes=13-',
'bytes=4-6',
'bytes=0-0',
'bytes=4-5',
'bytes=0-2'])
# we set swift.source for everything but the first request
self.assertIsNone(self.app.swift_sources[0])
self.assertEqual(self.app.swift_sources[1:],
['SLO'] * (len(self.app.swift_sources) - 1))
def test_range_get_range_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd-ranges',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=7-26'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(headers['Content-Length'], '20')
self.assertEqual(headers['Content-Type'], 'application/json')
self.assertIn('Etag', headers)
self.assertEqual(body, b'accccccccbbbbbbbbddd')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd-ranges'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd-ranges'),
('GET', '/v1/AUTH_test/gettest/manifest-bc-ranges'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
ranges = [c[2].get('Range') for c in self.app.calls_with_headers]
self.assertEqual(ranges, [
'bytes=7-26',
None,
None,
'bytes=4-',
'bytes=0-3,11-',
'bytes=4-7,2-5',
'bytes=0-2'])
# we set swift.source for everything but the first request
self.assertIsNone(self.app.swift_sources[0])
self.assertEqual(self.app.swift_sources[1:],
['SLO'] * (len(self.app.swift_sources) - 1))
def test_range_get_subrange_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd-subranges',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=4-12'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(headers['Content-Length'], '9')
self.assertEqual(headers['Content-Type'], 'application/json')
self.assertEqual(body, b'cdccbbbab')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd-subranges'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd-subranges'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd-ranges'),
('GET', '/v1/AUTH_test/gettest/manifest-bc-ranges'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get')])
ranges = [c[2].get('Range') for c in self.app.calls_with_headers]
self.assertEqual(ranges, [
'bytes=4-12',
None,
None,
None,
'bytes=2-2',
'bytes=11-11',
'bytes=13-',
'bytes=4-6',
'bytes=0-0',
'bytes=4-4'])
# we set swift.source for everything but the first request
self.assertIsNone(self.app.swift_sources[0])
self.assertEqual(self.app.swift_sources[1:],
['SLO'] * (len(self.app.swift_sources) - 1))
def test_range_get_includes_whole_range_manifest(self):
# If the first range GET results in retrieval of the entire manifest
# body (which we can detect by looking at Content-Range), then we
# should not go make a second, non-ranged request just to retrieve the
# same bytes again.
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd-ranges',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=0-999999999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(headers['Content-Length'], '32')
self.assertEqual(headers['Content-Type'], 'application/json')
self.assertEqual(body, b'aaaaaaaaccccccccbbbbbbbbdddddddd')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd-ranges'),
('GET', '/v1/AUTH_test/gettest/manifest-bc-ranges'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
ranges = [c[2].get('Range') for c in self.app.calls_with_headers]
self.assertEqual(ranges, [
'bytes=0-999999999',
None,
'bytes=0-3,1-',
'bytes=0-3,11-',
'bytes=4-7,2-5',
'bytes=0-3,8-11'])
# we set swift.source for everything but the first request
self.assertIsNone(self.app.swift_sources[0])
self.assertEqual(self.app.swift_sources[1:],
['SLO'] * (len(self.app.swift_sources) - 1))
def test_get_bogus_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-badjson',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], '0')
self.assertEqual(headers['X-Object-Meta-Fish'], 'Bass')
self.assertEqual(body, b'')
def _do_test_generator_closure(self, leaks):
# Test that the SLO WSGI iterable closes its internal .app_iter when
# it receives a close() message.
#
# This is sufficient to fix a memory leak. The memory leak arises
# due to cyclic references involving a running generator; a running
# generator sometimes preventes the GC from collecting it in the
# same way that an object with a defined __del__ does.
#
# There are other ways to break the cycle and fix the memory leak as
# well; calling .close() on the generator is sufficient, but not
# necessary. However, having this test is better than nothing for
# preventing regressions.
class LeakTracker(object):
def __init__(self, inner_iter):
leaks[0] += 1
self.inner_iter = iter(inner_iter)
def __iter__(self):
return self
def next(self):
return next(self.inner_iter)
__next__ = next
def close(self):
leaks[0] -= 1
close_if_possible(self.inner_iter)
class LeakTrackingSegmentedIterable(slo.SegmentedIterable):
def _internal_iter(self, *a, **kw):
it = super(
LeakTrackingSegmentedIterable, self)._internal_iter(
*a, **kw)
return LeakTracker(it)
status = [None]
headers = [None]
def start_response(s, h, ei=None):
status[0] = s
headers[0] = h
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET',
'HTTP_ACCEPT': 'application/json'})
# can't self.call_slo() here since we don't want to consume the
# whole body
with patch.object(slo, 'SegmentedIterable',
LeakTrackingSegmentedIterable):
app_resp = self.slo(req.environ, start_response)
self.assertEqual(status[0], '200 OK') # sanity check
return app_resp
def test_generator_closure(self):
leaks = [0]
app_resp = self._do_test_generator_closure(leaks)
body_iter = iter(app_resp)
chunk = next(body_iter)
self.assertEqual(chunk, b'aaaaa') # sanity check
app_resp.close()
self.assertEqual(0, leaks[0])
def test_generator_closure_iter_app_resp(self):
# verify that the result of iter(app_resp) has a close method that
# closes app_resp
leaks = [0]
app_resp = self._do_test_generator_closure(leaks)
body_iter = iter(app_resp)
chunk = next(body_iter)
self.assertEqual(chunk, b'aaaaa') # sanity check
close_method = getattr(body_iter, 'close', None)
self.assertIsNotNone(close_method)
self.assertTrue(callable(close_method))
close_method()
self.assertEqual(0, leaks[0])
def test_head_manifest_is_efficient(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'HEAD'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], '50')
self.assertEqual(headers['Etag'], '"%s"' % self.manifest_abcd_etag)
self.assertEqual(headers['X-Manifest-Etag'],
self.abcd_manifest_json_etag)
self.assertEqual(body, b'')
# Note the lack of recursive descent into manifest-bc. We know the
# content-length from the outer manifest, so there's no need for any
# submanifest fetching here, but a naïve implementation might do it
# anyway.
self.assertEqual(self.app.calls, [
('HEAD', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd')])
def test_recursion_limit(self):
# man1 points to obj1 and man2, man2 points to obj2 and man3...
for i in range(20):
self.app.register('GET', '/v1/AUTH_test/gettest/obj%d' % i,
swob.HTTPOk, {'Content-Type': 'text/plain',
'Etag': md5hex('body%02d' % i)},
b'body%02d' % i)
manifest_json = json.dumps([{'name': '/gettest/obj20',
'hash': md5hex('body20'),
'content_type': 'text/plain',
'bytes': '6'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/man%d' % i,
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': 'man%d' % i},
manifest_json.encode('ascii'))
submanifest_bytes = 6
for i in range(19, 0, -1):
manifest_data = [
{'name': '/gettest/obj%d' % i,
'hash': md5hex('body%02d' % i),
'bytes': '6',
'content_type': 'text/plain'},
{'data': base64.b64encode(b'-' * 3).decode('ascii')},
{'name': '/gettest/man%d' % (i + 1),
'hash': 'man%d' % (i + 1),
'sub_slo': True,
'bytes': submanifest_bytes,
'content_type': 'application/json'}]
submanifest_bytes += 9
manifest_json = json.dumps(manifest_data)
self.app.register(
'GET', '/v1/AUTH_test/gettest/man%d' % i,
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': 'man%d' % i},
manifest_json.encode('ascii'))
req = Request.blank(
'/v1/AUTH_test/gettest/man1',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
# we don't know at header-sending time that things are going to go
# wrong, so we end up with a 200 and a truncated body
self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], str(9 * 19 + 6))
self.assertEqual(body, (
b'body01---body02---body03---body04---body05---'
b'body06---body07---body08---body09---body10---'))
# but the error shows up in logs
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
"While processing manifest '/v1/AUTH_test/gettest/man1', "
"max recursion depth was exceeded"
])
# make sure we didn't keep asking for segments
self.assertEqual(self.app.call_count, 20)
def test_sub_slo_recursion(self):
# man1 points to man2 and obj1, man2 points to man3 and obj2...
for i in range(11):
self.app.register('GET', '/v1/AUTH_test/gettest/obj%d' % i,
swob.HTTPOk, {'Content-Type': 'text/plain',
'Content-Length': '6',
'Etag': md5hex('body%02d' % i)},
b'body%02d' % i)
manifest_json = json.dumps([{'name': '/gettest/obj%d' % i,
'hash': md5hex('body%2d' % i),
'content_type': 'text/plain',
'bytes': '6'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/man%d' % i,
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': 'man%d' % i},
manifest_json.encode('ascii'))
self.app.register(
'HEAD', '/v1/AUTH_test/gettest/obj%d' % i,
swob.HTTPOk, {'Content-Length': '6',
'Etag': md5hex('body%2d' % i)},
None)
for i in range(9, 0, -1):
manifest_data = [
{'name': '/gettest/man%d' % (i + 1),
'hash': 'man%d' % (i + 1),
'sub_slo': True,
'bytes': (10 - i) * 6,
'content_type': 'application/json'},
{'name': '/gettest/obj%d' % i,
'hash': md5hex('body%02d' % i),
'bytes': '6',
'content_type': 'text/plain'}]
manifest_json = json.dumps(manifest_data)
self.app.register(
'GET', '/v1/AUTH_test/gettest/man%d' % i,
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': 'man%d' % i},
manifest_json.encode('ascii'))
req = Request.blank(
'/v1/AUTH_test/gettest/man1',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertEqual(body, (b'body10body09body08body07body06'
b'body05body04body03body02body01'))
self.assertEqual(self.app.call_count, 20)
def test_sub_slo_recursion_limit(self):
# man1 points to man2 and obj1, man2 points to man3 and obj2...
for i in range(12):
self.app.register('GET', '/v1/AUTH_test/gettest/obj%d' % i,
swob.HTTPOk,
{'Content-Type': 'text/plain',
'Content-Length': '6',
'Etag': md5hex('body%02d' % i)},
b'body%02d' % i)
manifest_json = json.dumps([{'name': '/gettest/obj%d' % i,
'hash': md5hex('body%2d' % i),
'content_type': 'text/plain',
'bytes': '6'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/man%d' % i,
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': 'man%d' % i},
manifest_json.encode('ascii'))
self.app.register(
'HEAD', '/v1/AUTH_test/gettest/obj%d' % i,
swob.HTTPOk, {'Content-Length': '6',
'Etag': md5hex('body%2d' % i)},
None)
for i in range(11, 0, -1):
manifest_data = [
{'name': '/gettest/man%d' % (i + 1),
'hash': 'man%d' % (i + 1),
'sub_slo': True,
'bytes': (12 - i) * 6,
'content_type': 'application/json'},
{'name': '/gettest/obj%d' % i,
'hash': md5hex('body%02d' % i),
'bytes': '6',
'content_type': 'text/plain'}]
manifest_json = json.dumps(manifest_data)
self.app.register('GET', '/v1/AUTH_test/gettest/man%d' % i,
swob.HTTPOk,
{'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': 'man%d' % i},
manifest_json.encode('ascii'))
req = Request.blank(
'/v1/AUTH_test/gettest/man1',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '409 Conflict')
self.assertEqual(self.app.call_count, 10)
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
"While processing manifest '/v1/AUTH_test/gettest/man1', "
"max recursion depth was exceeded"
])
def test_get_with_if_modified_since(self):
# It's important not to pass the If-[Un]Modified-Since header to the
# proxy for segment or submanifest GET requests, as it may result in
# 304 Not Modified responses, and those don't contain any useful data.
req = swob.Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': 'Wed, 12 Feb 2014 22:24:52 GMT',
'If-Unmodified-Since': 'Thu, 13 Feb 2014 23:25:53 GMT'})
status, headers, body = self.call_slo(req)
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [])
for _, _, hdrs in self.app.calls_with_headers[1:]:
self.assertFalse('If-Modified-Since' in hdrs)
self.assertFalse('If-Unmodified-Since' in hdrs)
def test_error_fetching_segment(self):
self.app.register('GET', '/v1/AUTH_test/gettest/c_15',
swob.HTTPUnauthorized, {}, None)
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(b"aaaaabbbbbbbbbb", body)
self.assertEqual(self.app.unread_requests, {})
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'While processing manifest /v1/AUTH_test/gettest/manifest-abcd, '
'got 401 (<html><h1>Unauthorized</h1><p>This server could not '
'verif...) while retrieving /v1/AUTH_test/gettest/c_15'
])
self.assertEqual(self.app.calls, [
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
# This one has the error, and so is the last one we fetch.
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get')])
def test_error_fetching_submanifest(self):
self.app.register('GET', '/v1/AUTH_test/gettest/manifest-bc',
swob.HTTPUnauthorized, {}, None)
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual("200 OK", status)
self.assertEqual(b"aaaaa", body)
self.assertEqual(self.app.unread_requests, {})
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'while fetching /v1/AUTH_test/gettest/manifest-abcd, GET of '
'submanifest /v1/AUTH_test/gettest/manifest-bc failed with '
'status 401 (<html><h1>Unauthorized</h1><p>This server could '
'not verif...)'
])
self.assertEqual(self.app.calls, [
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
# This one has the error, and so is the last one we fetch.
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
# But we were looking ahead to see if we could combine ranges,
# so we still get the first segment out
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get')])
def test_error_fetching_first_segment_submanifest(self):
# This differs from the normal submanifest error because this one
# happens before we've actually sent any response body.
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-a',
swob.HTTPForbidden, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-manifest-a',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/manifest-a', 'sub_slo': True,
'content_type': 'application/json',
'hash': 'manifest-a',
'bytes': '12345'}]))
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-manifest-a',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('409 Conflict', status)
self.assertEqual(self.app.unread_requests, {})
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'while fetching /v1/AUTH_test/gettest/manifest-manifest-a, GET '
'of submanifest /v1/AUTH_test/gettest/manifest-a failed with '
'status 403 (<html><h1>Forbidden</h1><p>Access was denied to '
'this reso...)'
])
def test_invalid_json_submanifest(self):
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-bc',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'X-Object-Meta-Plant': 'Ficus'},
"[this {isn't (JSON")
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, b'aaaaa')
if six.PY2:
error = "No JSON object could be decoded"
else:
error = "Expecting value: line 1 column 2 (char 1)"
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'while fetching /v1/AUTH_test/gettest/manifest-abcd, '
'JSON-decoding of submanifest /v1/AUTH_test/gettest/manifest-bc '
'failed with %s' % error
])
def test_mismatched_etag(self):
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-a-b-badetag-c',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/a_5', 'hash': md5hex('a' * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/b_10', 'hash': 'wrong!',
'content_type': 'text/plain', 'bytes': '10'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15),
'content_type': 'text/plain', 'bytes': '15'}]))
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-a-b-badetag-c',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, b'aaaaa')
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'Object segment no longer valid: /v1/AUTH_test/gettest/b_10 '
'etag: 82136b4240d6ce4ea7d03e51469a393b != wrong! or 10 != 10.'
])
def test_mismatched_size(self):
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-a-b-badsize-c',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/a_5', 'hash': md5hex('a' * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/b_10', 'hash': md5hex('b' * 10),
'content_type': 'text/plain', 'bytes': '999999'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15),
'content_type': 'text/plain', 'bytes': '15'}]))
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-a-b-badsize-c',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, b'aaaaa')
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'Object segment no longer valid: /v1/AUTH_test/gettest/b_10 '
'etag: 82136b4240d6ce4ea7d03e51469a393b != '
'82136b4240d6ce4ea7d03e51469a393b or 10 != 999999.'
])
def test_mismatched_checksum(self):
self.app.register(
'GET', '/v1/AUTH_test/gettest/a_5',
swob.HTTPOk, {'Content-Length': '5',
'Etag': md5hex('a' * 5)},
# this segment has invalid content
'x' * 5)
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/b_10', 'hash': md5hex('b' * 10),
'content_type': 'text/plain', 'bytes': '10'},
{'name': '/gettest/a_5', 'hash': md5hex('a' * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15),
'content_type': 'text/plain', 'bytes': '15'}]))
req = Request.blank('/v1/AUTH_test/gettest/manifest')
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, (b'b' * 10 + b'x' * 5))
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'Bad MD5 checksum for /v1/AUTH_test/gettest/a_5 as part of '
'/v1/AUTH_test/gettest/manifest: headers had '
'594f803b380a41396ed63dca39503542, but object MD5 was '
'actually fb0e22c79ac75679e9881e6ba183b354',
])
def test_mismatched_length(self):
self.app.register(
'GET', '/v1/AUTH_test/gettest/a_5',
swob.HTTPOk, {'Content-Length': '5',
'Etag': md5hex('a' * 5)},
# this segment comes up short
[b'a' * 4])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/b_10', 'hash': md5hex('b' * 10),
'content_type': 'text/plain', 'bytes': '10'},
{'name': '/gettest/a_5', 'hash': md5hex('a' * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15),
'content_type': 'text/plain', 'bytes': '15'}]))
req = Request.blank('/v1/AUTH_test/gettest/manifest')
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, (b'b' * 10 + b'a' * 4))
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'Bad response length for /v1/AUTH_test/gettest/a_5 as part of '
'/v1/AUTH_test/gettest/manifest: headers had 5, but '
'response length was actually 4',
])
def test_first_segment_mismatched_etag(self):
self.app.register('GET', '/v1/AUTH_test/gettest/manifest-badetag',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/a_5',
'hash': 'wrong!',
'content_type': 'text/plain',
'bytes': '5'}]))
req = Request.blank('/v1/AUTH_test/gettest/manifest-badetag',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('409 Conflict', status)
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'Object segment no longer valid: /v1/AUTH_test/gettest/a_5 '
'etag: 594f803b380a41396ed63dca39503542 != wrong! or 5 != 5.'
])
def test_first_segment_mismatched_size(self):
self.app.register('GET', '/v1/AUTH_test/gettest/manifest-badsize',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/a_5',
'hash': md5hex('a' * 5),
'content_type': 'text/plain',
'bytes': '999999'}]))
req = Request.blank('/v1/AUTH_test/gettest/manifest-badsize',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('409 Conflict', status)
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'Object segment no longer valid: /v1/AUTH_test/gettest/a_5 '
'etag: 594f803b380a41396ed63dca39503542 != '
'594f803b380a41396ed63dca39503542 or 5 != 999999.'
])
@patch('swift.common.request_helpers.time')
def test_download_takes_too_long(self, mock_time):
mock_time.time.side_effect = [
0, # start time
10 * 3600, # a_5
20 * 3600, # b_10
30 * 3600, # c_15, but then we time out
]
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'While processing manifest /v1/AUTH_test/gettest/manifest-abcd, '
'max LO GET time of 86400s exceeded'
])
self.assertEqual(self.app.calls, [
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get')])
def test_first_segment_not_exists(self):
self.app.register('GET', '/v1/AUTH_test/gettest/not_exists_obj',
swob.HTTPNotFound, {}, None)
self.app.register('GET', '/v1/AUTH_test/gettest/manifest-not-exists',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/not_exists_obj',
'hash': md5hex('not_exists_obj'),
'content_type': 'text/plain',
'bytes': '%d' % len('not_exists_obj')
}]))
req = Request.blank('/v1/AUTH_test/gettest/manifest-not-exists',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('409 Conflict', status)
self.assertEqual(self.app.unread_requests, {})
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'While processing manifest /v1/AUTH_test/gettest/'
'manifest-not-exists, got 404 (<html><h1>Not Found</h1><p>The '
'resource could not be foun...) while retrieving /v1/AUTH_test/'
'gettest/not_exists_obj'
])
def test_first_segment_not_available(self):
self.app.register('GET', '/v1/AUTH_test/gettest/not_avail_obj',
swob.HTTPServiceUnavailable, {}, None)
self.app.register('GET', '/v1/AUTH_test/gettest/manifest-not-avail',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/not_avail_obj',
'hash': md5hex('not_avail_obj'),
'content_type': 'text/plain',
'bytes': '%d' % len('not_avail_obj')
}]))
req = Request.blank('/v1/AUTH_test/gettest/manifest-not-avail',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('503 Service Unavailable', status)
self.assertEqual(self.app.unread_requests, {})
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'While processing manifest /v1/AUTH_test/gettest/'
'manifest-not-avail, got 503 (<html><h1>Service Unavailable</h1>'
'<p>The server is curren...) while retrieving /v1/AUTH_test/'
'gettest/not_avail_obj'
])
self.assertIn(b'Service Unavailable', body)
def test_leading_data_segment(self):
slo_etag = md5hex(
md5hex('preamble') +
md5hex('a' * 5)
)
preamble = base64.b64encode(b'preamble')
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-single-preamble',
swob.HTTPOk,
{
'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'
},
json.dumps([{
'data': preamble.decode('ascii')
}, {
'name': '/gettest/a_5',
'hash': md5hex('a' * 5),
'content_type': 'text/plain',
'bytes': '5',
}])
)
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-preamble',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, b'preambleaaaaa')
self.assertIn(('Etag', '"%s"' % slo_etag), headers)
self.assertIn(('Content-Length', '13'), headers)
def test_trailing_data_segment(self):
slo_etag = md5hex(
md5hex('a' * 5) +
md5hex('postamble')
)
postamble = base64.b64encode(b'postamble')
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-single-postamble',
swob.HTTPOk,
{
'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'
},
json.dumps([{
'name': '/gettest/a_5',
'hash': md5hex('a' * 5),
'content_type': 'text/plain',
'bytes': '5',
}, {
'data': postamble.decode('ascii')
}]).encode('ascii')
)
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-postamble',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, b'aaaaapostamble')
self.assertIn(('Etag', '"%s"' % slo_etag), headers)
self.assertIn(('Content-Length', '14'), headers)
def test_data_segment_sandwich(self):
slo_etag = md5hex(
md5hex('preamble') +
md5hex('a' * 5) +
md5hex('postamble')
)
preamble = base64.b64encode(b'preamble')
postamble = base64.b64encode(b'postamble')
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-single-prepostamble',
swob.HTTPOk,
{
'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'
},
json.dumps([{
'data': preamble.decode('ascii'),
}, {
'name': '/gettest/a_5',
'hash': md5hex('a' * 5),
'content_type': 'text/plain',
'bytes': '5',
}, {
'data': postamble.decode('ascii')
}])
)
# Test the whole SLO
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-prepostamble',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, b'preambleaaaaapostamble')
self.assertIn(('Etag', '"%s"' % slo_etag), headers)
self.assertIn(('Content-Length', '22'), headers)
# Test complete preamble only
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=0-7'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'preamble')
# Test range within preamble only
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=1-5'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'reamb')
# Test complete postamble only
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=13-21'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'postamble')
# Test partial pre and postamble
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=4-16'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'mbleaaaaapost')
# Test partial preamble and first byte of data
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=1-8'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'reamblea')
# Test last byte of segment data and partial postamble
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=12-16'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'apost')
def test_bunches_of_data_segments(self):
slo_etag = md5hex(
md5hex('ABCDEF') +
md5hex('a' * 5) +
md5hex('123456') +
md5hex('GHIJKL') +
md5hex('b' * 10) +
md5hex('7890@#')
)
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-multi-prepostamble',
swob.HTTPOk,
{
'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'
},
json.dumps([
{
'data': base64.b64encode(b'ABCDEF').decode('ascii')
},
{
'name': '/gettest/a_5',
'hash': md5hex('a' * 5),
'content_type': 'text/plain',
'bytes': '5',
},
{
'data': base64.b64encode(b'123456').decode('ascii')
},
{
'data': base64.b64encode(b'GHIJKL').decode('ascii')
},
{
'name': '/gettest/b_10',
'hash': md5hex('b' * 10),
'content_type': 'text/plain',
'bytes': '10',
},
{
'data': base64.b64encode(b'7890@#').decode('ascii')
}
])
)
# Test the whole SLO
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-multi-prepostamble',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, b'ABCDEFaaaaa123456GHIJKLbbbbbbbbbb7890@#')
self.assertIn(('Etag', '"%s"' % slo_etag), headers)
self.assertIn(('Content-Length', '39'), headers)
# Test last byte first pre-amble to first byte of second postamble
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-multi-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=5-33'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'Faaaaa123456GHIJKLbbbbbbbbbb7')
# Test only second complete preamble
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-multi-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=17-22'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'GHIJKL')
# Test only first complete postamble
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-multi-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=11-16'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'123456')
# Test only range within first postamble
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-multi-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=12-15'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'2345')
# Test only range within first postamble and second preamble
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-multi-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=12-18'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'23456GH')
class TestSloConditionalGetOldManifest(SloTestCase):
slo_data = [
{'name': '/gettest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/manifest-bc', 'sub_slo': True,
'content_type': 'application/json',
'hash': md5hex(md5hex("b" * 10) + md5hex("c" * 15)),
'bytes': 25},
{'name': '/gettest/d_20', 'hash': md5hex("d" * 20),
'content_type': 'text/plain', 'bytes': '20'}]
slo_etag = md5hex(''.join(seg['hash'] for seg in slo_data))
def setUp(self):
super(TestSloConditionalGetOldManifest, self).setUp()
# some plain old objects
self.app.register(
'GET', '/v1/AUTH_test/gettest/a_5',
swob.HTTPOk, {'Content-Length': '5',
'Etag': md5hex('a' * 5)},
b'a' * 5)
self.app.register(
'GET', '/v1/AUTH_test/gettest/b_10',
swob.HTTPOk, {'Content-Length': '10',
'Etag': md5hex('b' * 10)},
b'b' * 10)
self.app.register(
'GET', '/v1/AUTH_test/gettest/c_15',
swob.HTTPOk, {'Content-Length': '15',
'Etag': md5hex('c' * 15)},
b'c' * 15)
self.app.register(
'GET', '/v1/AUTH_test/gettest/d_20',
swob.HTTPOk, {'Content-Length': '20',
'Etag': md5hex('d' * 20)},
b'd' * 20)
_bc_manifest_json = json.dumps(
[{'name': '/gettest/b_10', 'hash': md5hex('b' * 10), 'bytes': '10',
'content_type': 'text/plain'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15), 'bytes': '15',
'content_type': 'text/plain'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-bc',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'X-Object-Meta-Plant': 'Ficus',
'Etag': md5hex(_bc_manifest_json)},
_bc_manifest_json)
_abcd_manifest_json = json.dumps(self.slo_data)
self.abcd_manifest_json_etag = md5hex(_abcd_manifest_json)
manifest_headers = {
'Content-Length': str(len(_abcd_manifest_json)),
'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': self.abcd_manifest_json_etag,
'X-Object-Sysmeta-Custom-Etag': 'a custom etag'}
manifest_headers.update(getattr(self, 'extra_manifest_headers', {}))
self.manifest_has_sysmeta = all(h in manifest_headers for h in (
'X-Object-Sysmeta-Slo-Etag', 'X-Object-Sysmeta-Slo-Size'))
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-abcd',
swob.HTTPOk, manifest_headers,
_abcd_manifest_json.encode('ascii'))
def test_if_none_match_matches(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': self.slo_etag})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '304 Not Modified')
self.assertIn(('Content-Length', '0'), headers)
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertEqual(body, b'')
expected_app_calls = [('GET', '/v1/AUTH_test/gettest/manifest-abcd')]
if not self.manifest_has_sysmeta:
# We *still* verify the first segment
expected_app_calls.extend([
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
])
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(self.app.headers[0].get('X-Backend-Etag-Is-At'),
'x-object-sysmeta-slo-etag')
def test_if_none_match_does_not_match(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': "not-%s" % self.slo_etag})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertIn(('Content-Length', '50'), headers)
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertEqual(
body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd')
expected_app_calls = [
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get'),
]
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(self.app.headers[0].get('X-Backend-Etag-Is-At'),
'x-object-sysmeta-slo-etag')
def test_if_match_matches(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': self.slo_etag})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertIn(('Content-Length', '50'), headers)
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertEqual(
body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd')
expected_app_calls = [('GET', '/v1/AUTH_test/gettest/manifest-abcd')]
if not self.manifest_has_sysmeta:
# Manifest never matches -> got back a 412; need to re-fetch
expected_app_calls.append(
('GET', '/v1/AUTH_test/gettest/manifest-abcd'))
expected_app_calls.extend([
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get'),
])
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(self.app.headers[0].get('X-Backend-Etag-Is-At'),
'x-object-sysmeta-slo-etag')
def test_if_match_does_not_match(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': "not-%s" % self.slo_etag})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '412 Precondition Failed')
self.assertIn(('Content-Length', '0'), headers)
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertEqual(body, b'')
expected_app_calls = [('GET', '/v1/AUTH_test/gettest/manifest-abcd')]
if not self.manifest_has_sysmeta:
# We *still* verify the first segment
expected_app_calls.extend([
# Manifest never matches -> got back a 412; need to re-fetch
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
])
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(self.app.headers[0].get('X-Backend-Etag-Is-At'),
'x-object-sysmeta-slo-etag')
def test_if_none_match_matches_with_override(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '"a custom etag"',
'X-Backend-Etag-Is-At': 'X-Object-Sysmeta-Custom-Etag'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '304 Not Modified')
self.assertIn(('Content-Length', '0'), headers)
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('X-Object-Sysmeta-Custom-Etag', 'a custom etag'),
headers)
self.assertEqual(body, b'')
expected_app_calls = [('GET', '/v1/AUTH_test/gettest/manifest-abcd')]
if not self.manifest_has_sysmeta:
# NB: no known middleware would have written a custom etag with
# old-style manifests. but if there *was*, here's what'd happen
expected_app_calls.extend([
# 304, so gotta refetch
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
# Since the "authoritative" etag didn't come from slo, we still
# verify the first segment
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
])
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(
self.app.headers[0].get('X-Backend-Etag-Is-At'),
'X-Object-Sysmeta-Custom-Etag,x-object-sysmeta-slo-etag')
def test_if_none_match_does_not_match_with_override(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': "%s" % self.slo_etag,
'X-Backend-Etag-Is-At': 'X-Object-Sysmeta-Custom-Etag'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertIn(('Content-Length', '50'), headers)
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('X-Object-Sysmeta-Custom-Etag', 'a custom etag'),
headers)
self.assertEqual(
body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd')
expected_app_calls = [
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get'),
]
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(
self.app.headers[0].get('X-Backend-Etag-Is-At'),
'X-Object-Sysmeta-Custom-Etag,x-object-sysmeta-slo-etag')
def test_if_match_matches_with_override(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '"a custom etag"',
'X-Backend-Etag-Is-At': 'X-Object-Sysmeta-Custom-Etag'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertIn(('Content-Length', '50'), headers)
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('X-Object-Sysmeta-Custom-Etag', 'a custom etag'),
headers)
self.assertEqual(
body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd')
expected_app_calls = [
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
# Match on the override from left of us; no need to refetch
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get'),
]
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(
self.app.headers[0].get('X-Backend-Etag-Is-At'),
'X-Object-Sysmeta-Custom-Etag,x-object-sysmeta-slo-etag')
def test_if_match_does_not_match_with_override(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': "%s" % self.slo_etag,
'X-Backend-Etag-Is-At': 'X-Object-Sysmeta-Custom-Etag'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '412 Precondition Failed')
self.assertIn(('Content-Length', '0'), headers)
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('X-Object-Sysmeta-Custom-Etag', 'a custom etag'),
headers)
self.assertEqual(body, b'')
expected_app_calls = [('GET', '/v1/AUTH_test/gettest/manifest-abcd')]
if not self.manifest_has_sysmeta:
# NB: no known middleware would have written a custom etag with
# old-style manifests. but if there *was*, here's what'd happen
expected_app_calls.extend([
# Manifest never matches -> got back a 412; need to re-fetch
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
# We *still* verify the first segment, even though we'll 412
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
])
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(
self.app.headers[0].get('X-Backend-Etag-Is-At'),
'X-Object-Sysmeta-Custom-Etag,x-object-sysmeta-slo-etag')
def test_if_match_matches_and_range(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': self.slo_etag,
'Range': 'bytes=3-6'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '206 Partial Content')
self.assertIn(('Content-Length', '4'), headers)
self.assertIn(('Etag', '"%s"' % self.manifest_abcd_etag), headers)
self.assertEqual(body, b'aabb')
expected_app_calls = [
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
# Needed to re-fetch because Range (and, for old manifests, 412)
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
]
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(self.app.headers[0].get('X-Backend-Etag-Is-At'),
'x-object-sysmeta-slo-etag')
def test_if_match_matches_passthrough(self):
# first fetch and stash the manifest etag
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd?multipart-manifest=get',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
headers = HeaderKeyDict(headers)
self.assertEqual('application/json; charset=utf-8',
headers['Content-Type'])
manifest_etag = headers['Etag']
# now use it as a condition and expect to match
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd?multipart-manifest=get',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': manifest_etag})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
headers = HeaderKeyDict(headers)
self.assertEqual(manifest_etag, headers['Etag'])
expected_app_calls = [
('GET',
'/v1/AUTH_test/gettest/manifest-abcd?multipart-manifest=get')] * 2
self.assertEqual(self.app.calls, expected_app_calls)
self.assertNotIn('X-Backend-Etag-Is-At', self.app.headers[0])
self.assertNotIn('X-Backend-Etag-Is-At', self.app.headers[1])
def test_range_resume_download(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=20-'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(body, b'ccccccccccdddddddddddddddddddd')
class TestSloConditionalGetNewManifest(TestSloConditionalGetOldManifest):
def setUp(self):
self.extra_manifest_headers = {
'X-Object-Sysmeta-Slo-Etag': self.slo_etag,
'X-Object-Sysmeta-Slo-Size': '50',
}
super(TestSloConditionalGetNewManifest, self).setUp()
class TestSloBulkDeleter(unittest.TestCase):
def test_reused_logger(self):
slo_mware = slo.filter_factory({})('fake app')
self.assertTrue(slo_mware.logger is slo_mware.bulk_deleter.logger)
def test_passes_through_concurrency(self):
slo_mware = slo.filter_factory({'delete_concurrency': 5})('fake app')
self.assertEqual(5, slo_mware.bulk_deleter.delete_concurrency)
def test_uses_big_max_deletes(self):
slo_mware = slo.filter_factory(
{'max_manifest_segments': 123456789})('fake app')
self.assertGreaterEqual(
slo_mware.bulk_deleter.max_deletes_per_request,
123456789)
class TestSwiftInfo(unittest.TestCase):
def setUp(self):
utils._swift_info = {}
utils._swift_admin_info = {}
def test_registered_defaults(self):
mware = slo.filter_factory({})('have to pass in an app')
swift_info = utils.get_swift_info()
self.assertTrue('slo' in swift_info)
self.assertEqual(swift_info['slo'].get('max_manifest_segments'),
mware.max_manifest_segments)
self.assertEqual(swift_info['slo'].get('min_segment_size'), 1)
self.assertEqual(swift_info['slo'].get('max_manifest_size'),
mware.max_manifest_size)
self.assertIs(swift_info['slo'].get('allow_async_delete'), False)
self.assertEqual(1000, mware.max_manifest_segments)
self.assertEqual(8388608, mware.max_manifest_size)
self.assertEqual(1048576, mware.rate_limit_under_size)
self.assertEqual(10, mware.rate_limit_after_segment)
self.assertEqual(1, mware.rate_limit_segments_per_sec)
self.assertEqual(10, mware.yield_frequency)
self.assertEqual(2, mware.concurrency)
self.assertEqual(2, mware.bulk_deleter.delete_concurrency)
self.assertIs(False, mware.allow_async_delete)
def test_registered_non_defaults(self):
conf = dict(
max_manifest_segments=500, max_manifest_size=1048576,
rate_limit_under_size=2097152, rate_limit_after_segment=20,
rate_limit_segments_per_sec=2, yield_frequency=5, concurrency=1,
delete_concurrency=3, allow_async_delete='y')
mware = slo.filter_factory(conf)('have to pass in an app')
swift_info = utils.get_swift_info()
self.assertTrue('slo' in swift_info)
self.assertEqual(swift_info['slo'].get('max_manifest_segments'), 500)
self.assertEqual(swift_info['slo'].get('min_segment_size'), 1)
self.assertEqual(swift_info['slo'].get('max_manifest_size'), 1048576)
self.assertIs(swift_info['slo'].get('allow_async_delete'), True)
self.assertEqual(500, mware.max_manifest_segments)
self.assertEqual(1048576, mware.max_manifest_size)
self.assertEqual(2097152, mware.rate_limit_under_size)
self.assertEqual(20, mware.rate_limit_after_segment)
self.assertEqual(2, mware.rate_limit_segments_per_sec)
self.assertEqual(5, mware.yield_frequency)
self.assertEqual(1, mware.concurrency)
self.assertEqual(3, mware.bulk_deleter.delete_concurrency)
self.assertIs(True, mware.allow_async_delete)
if __name__ == '__main__':
unittest.main()
| 45.030092
| 79
| 0.551552
|
cd4bf30354610443f76723021f503d144c6e6615
| 22,440
|
py
|
Python
|
dapr/proto/common/v1/common_pb2.py
|
karishma-chawla/python-sdk
|
15e018d48418cca5e1660c8afe37403b91f6298d
|
[
"MIT"
] | null | null | null |
dapr/proto/common/v1/common_pb2.py
|
karishma-chawla/python-sdk
|
15e018d48418cca5e1660c8afe37403b91f6298d
|
[
"MIT"
] | null | null | null |
dapr/proto/common/v1/common_pb2.py
|
karishma-chawla/python-sdk
|
15e018d48418cca5e1660c8afe37403b91f6298d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: dapr/proto/common/v1/common.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='dapr/proto/common/v1/common.proto',
package='dapr.proto.common.v1',
syntax='proto3',
serialized_options=b'\n\nio.dapr.v1B\014CommonProtosZ/github.com/dapr/dapr/pkg/proto/common/v1;common\252\002\033Dapr.Client.Autogen.Grpc.v1',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n!dapr/proto/common/v1/common.proto\x12\x14\x64\x61pr.proto.common.v1\x1a\x19google/protobuf/any.proto\"\xaf\x02\n\rHTTPExtension\x12\x36\n\x04verb\x18\x01 \x01(\x0e\x32(.dapr.proto.common.v1.HTTPExtension.Verb\x12I\n\x0bquerystring\x18\x02 \x03(\x0b\x32\x34.dapr.proto.common.v1.HTTPExtension.QuerystringEntry\x1a\x32\n\x10QuerystringEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"g\n\x04Verb\x12\x08\n\x04NONE\x10\x00\x12\x07\n\x03GET\x10\x01\x12\x08\n\x04HEAD\x10\x02\x12\x08\n\x04POST\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x12\x0b\n\x07\x43ONNECT\x10\x06\x12\x0b\n\x07OPTIONS\x10\x07\x12\t\n\x05TRACE\x10\x08\"\x96\x01\n\rInvokeRequest\x12\x0e\n\x06method\x18\x01 \x01(\t\x12\"\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x14\n\x0c\x63ontent_type\x18\x03 \x01(\t\x12;\n\x0ehttp_extension\x18\x04 \x01(\x0b\x32#.dapr.proto.common.v1.HTTPExtension\"J\n\x0eInvokeResponse\x12\"\n\x04\x64\x61ta\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x14\n\x0c\x63ontent_type\x18\x02 \x01(\t\"\xdc\x01\n\tStateItem\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x12\x0c\n\x04\x65tag\x18\x03 \x01(\t\x12?\n\x08metadata\x18\x04 \x03(\x0b\x32-.dapr.proto.common.v1.StateItem.MetadataEntry\x12\x33\n\x07options\x18\x05 \x01(\x0b\x32\".dapr.proto.common.v1.StateOptions\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xef\x02\n\x0cStateOptions\x12H\n\x0b\x63oncurrency\x18\x01 \x01(\x0e\x32\x33.dapr.proto.common.v1.StateOptions.StateConcurrency\x12H\n\x0b\x63onsistency\x18\x02 \x01(\x0e\x32\x33.dapr.proto.common.v1.StateOptions.StateConsistency\"h\n\x10StateConcurrency\x12\x1b\n\x17\x43ONCURRENCY_UNSPECIFIED\x10\x00\x12\x1b\n\x17\x43ONCURRENCY_FIRST_WRITE\x10\x01\x12\x1a\n\x16\x43ONCURRENCY_LAST_WRITE\x10\x02\"a\n\x10StateConsistency\x12\x1b\n\x17\x43ONSISTENCY_UNSPECIFIED\x10\x00\x12\x18\n\x14\x43ONSISTENCY_EVENTUAL\x10\x01\x12\x16\n\x12\x43ONSISTENCY_STRONG\x10\x02\x42i\n\nio.dapr.v1B\x0c\x43ommonProtosZ/github.com/dapr/dapr/pkg/proto/common/v1;common\xaa\x02\x1b\x44\x61pr.Client.Autogen.Grpc.v1b\x06proto3'
,
dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,])
_HTTPEXTENSION_VERB = _descriptor.EnumDescriptor(
name='Verb',
full_name='dapr.proto.common.v1.HTTPExtension.Verb',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='NONE', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GET', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='HEAD', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='POST', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PUT', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DELETE', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONNECT', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='OPTIONS', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TRACE', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=287,
serialized_end=390,
)
_sym_db.RegisterEnumDescriptor(_HTTPEXTENSION_VERB)
_STATEOPTIONS_STATECONCURRENCY = _descriptor.EnumDescriptor(
name='StateConcurrency',
full_name='dapr.proto.common.v1.StateOptions.StateConcurrency',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='CONCURRENCY_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONCURRENCY_FIRST_WRITE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONCURRENCY_LAST_WRITE', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1009,
serialized_end=1113,
)
_sym_db.RegisterEnumDescriptor(_STATEOPTIONS_STATECONCURRENCY)
_STATEOPTIONS_STATECONSISTENCY = _descriptor.EnumDescriptor(
name='StateConsistency',
full_name='dapr.proto.common.v1.StateOptions.StateConsistency',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='CONSISTENCY_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONSISTENCY_EVENTUAL', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONSISTENCY_STRONG', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1115,
serialized_end=1212,
)
_sym_db.RegisterEnumDescriptor(_STATEOPTIONS_STATECONSISTENCY)
_HTTPEXTENSION_QUERYSTRINGENTRY = _descriptor.Descriptor(
name='QuerystringEntry',
full_name='dapr.proto.common.v1.HTTPExtension.QuerystringEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='dapr.proto.common.v1.HTTPExtension.QuerystringEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='dapr.proto.common.v1.HTTPExtension.QuerystringEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=235,
serialized_end=285,
)
_HTTPEXTENSION = _descriptor.Descriptor(
name='HTTPExtension',
full_name='dapr.proto.common.v1.HTTPExtension',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='verb', full_name='dapr.proto.common.v1.HTTPExtension.verb', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='querystring', full_name='dapr.proto.common.v1.HTTPExtension.querystring', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_HTTPEXTENSION_QUERYSTRINGENTRY, ],
enum_types=[
_HTTPEXTENSION_VERB,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=87,
serialized_end=390,
)
_INVOKEREQUEST = _descriptor.Descriptor(
name='InvokeRequest',
full_name='dapr.proto.common.v1.InvokeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='method', full_name='dapr.proto.common.v1.InvokeRequest.method', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data', full_name='dapr.proto.common.v1.InvokeRequest.data', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='content_type', full_name='dapr.proto.common.v1.InvokeRequest.content_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='http_extension', full_name='dapr.proto.common.v1.InvokeRequest.http_extension', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=393,
serialized_end=543,
)
_INVOKERESPONSE = _descriptor.Descriptor(
name='InvokeResponse',
full_name='dapr.proto.common.v1.InvokeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='data', full_name='dapr.proto.common.v1.InvokeResponse.data', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='content_type', full_name='dapr.proto.common.v1.InvokeResponse.content_type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=545,
serialized_end=619,
)
_STATEITEM_METADATAENTRY = _descriptor.Descriptor(
name='MetadataEntry',
full_name='dapr.proto.common.v1.StateItem.MetadataEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='dapr.proto.common.v1.StateItem.MetadataEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='dapr.proto.common.v1.StateItem.MetadataEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=795,
serialized_end=842,
)
_STATEITEM = _descriptor.Descriptor(
name='StateItem',
full_name='dapr.proto.common.v1.StateItem',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='dapr.proto.common.v1.StateItem.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='dapr.proto.common.v1.StateItem.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='etag', full_name='dapr.proto.common.v1.StateItem.etag', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='metadata', full_name='dapr.proto.common.v1.StateItem.metadata', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='options', full_name='dapr.proto.common.v1.StateItem.options', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_STATEITEM_METADATAENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=622,
serialized_end=842,
)
_STATEOPTIONS = _descriptor.Descriptor(
name='StateOptions',
full_name='dapr.proto.common.v1.StateOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='concurrency', full_name='dapr.proto.common.v1.StateOptions.concurrency', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='consistency', full_name='dapr.proto.common.v1.StateOptions.consistency', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_STATEOPTIONS_STATECONCURRENCY,
_STATEOPTIONS_STATECONSISTENCY,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=845,
serialized_end=1212,
)
_HTTPEXTENSION_QUERYSTRINGENTRY.containing_type = _HTTPEXTENSION
_HTTPEXTENSION.fields_by_name['verb'].enum_type = _HTTPEXTENSION_VERB
_HTTPEXTENSION.fields_by_name['querystring'].message_type = _HTTPEXTENSION_QUERYSTRINGENTRY
_HTTPEXTENSION_VERB.containing_type = _HTTPEXTENSION
_INVOKEREQUEST.fields_by_name['data'].message_type = google_dot_protobuf_dot_any__pb2._ANY
_INVOKEREQUEST.fields_by_name['http_extension'].message_type = _HTTPEXTENSION
_INVOKERESPONSE.fields_by_name['data'].message_type = google_dot_protobuf_dot_any__pb2._ANY
_STATEITEM_METADATAENTRY.containing_type = _STATEITEM
_STATEITEM.fields_by_name['metadata'].message_type = _STATEITEM_METADATAENTRY
_STATEITEM.fields_by_name['options'].message_type = _STATEOPTIONS
_STATEOPTIONS.fields_by_name['concurrency'].enum_type = _STATEOPTIONS_STATECONCURRENCY
_STATEOPTIONS.fields_by_name['consistency'].enum_type = _STATEOPTIONS_STATECONSISTENCY
_STATEOPTIONS_STATECONCURRENCY.containing_type = _STATEOPTIONS
_STATEOPTIONS_STATECONSISTENCY.containing_type = _STATEOPTIONS
DESCRIPTOR.message_types_by_name['HTTPExtension'] = _HTTPEXTENSION
DESCRIPTOR.message_types_by_name['InvokeRequest'] = _INVOKEREQUEST
DESCRIPTOR.message_types_by_name['InvokeResponse'] = _INVOKERESPONSE
DESCRIPTOR.message_types_by_name['StateItem'] = _STATEITEM
DESCRIPTOR.message_types_by_name['StateOptions'] = _STATEOPTIONS
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
HTTPExtension = _reflection.GeneratedProtocolMessageType('HTTPExtension', (_message.Message,), {
'QuerystringEntry' : _reflection.GeneratedProtocolMessageType('QuerystringEntry', (_message.Message,), {
'DESCRIPTOR' : _HTTPEXTENSION_QUERYSTRINGENTRY,
'__module__' : 'dapr.proto.common.v1.common_pb2'
# @@protoc_insertion_point(class_scope:dapr.proto.common.v1.HTTPExtension.QuerystringEntry)
})
,
'DESCRIPTOR' : _HTTPEXTENSION,
'__module__' : 'dapr.proto.common.v1.common_pb2'
# @@protoc_insertion_point(class_scope:dapr.proto.common.v1.HTTPExtension)
})
_sym_db.RegisterMessage(HTTPExtension)
_sym_db.RegisterMessage(HTTPExtension.QuerystringEntry)
InvokeRequest = _reflection.GeneratedProtocolMessageType('InvokeRequest', (_message.Message,), {
'DESCRIPTOR' : _INVOKEREQUEST,
'__module__' : 'dapr.proto.common.v1.common_pb2'
# @@protoc_insertion_point(class_scope:dapr.proto.common.v1.InvokeRequest)
})
_sym_db.RegisterMessage(InvokeRequest)
InvokeResponse = _reflection.GeneratedProtocolMessageType('InvokeResponse', (_message.Message,), {
'DESCRIPTOR' : _INVOKERESPONSE,
'__module__' : 'dapr.proto.common.v1.common_pb2'
# @@protoc_insertion_point(class_scope:dapr.proto.common.v1.InvokeResponse)
})
_sym_db.RegisterMessage(InvokeResponse)
StateItem = _reflection.GeneratedProtocolMessageType('StateItem', (_message.Message,), {
'MetadataEntry' : _reflection.GeneratedProtocolMessageType('MetadataEntry', (_message.Message,), {
'DESCRIPTOR' : _STATEITEM_METADATAENTRY,
'__module__' : 'dapr.proto.common.v1.common_pb2'
# @@protoc_insertion_point(class_scope:dapr.proto.common.v1.StateItem.MetadataEntry)
})
,
'DESCRIPTOR' : _STATEITEM,
'__module__' : 'dapr.proto.common.v1.common_pb2'
# @@protoc_insertion_point(class_scope:dapr.proto.common.v1.StateItem)
})
_sym_db.RegisterMessage(StateItem)
_sym_db.RegisterMessage(StateItem.MetadataEntry)
StateOptions = _reflection.GeneratedProtocolMessageType('StateOptions', (_message.Message,), {
'DESCRIPTOR' : _STATEOPTIONS,
'__module__' : 'dapr.proto.common.v1.common_pb2'
# @@protoc_insertion_point(class_scope:dapr.proto.common.v1.StateOptions)
})
_sym_db.RegisterMessage(StateOptions)
DESCRIPTOR._options = None
_HTTPEXTENSION_QUERYSTRINGENTRY._options = None
_STATEITEM_METADATAENTRY._options = None
# @@protoc_insertion_point(module_scope)
| 41.943925
| 2,193
| 0.759626
|
0fcdb18342ad85935516ab6ef51e170df7a5e493
| 947
|
py
|
Python
|
Server.py
|
wiemesprit/m-v
|
01df78f470b737adb7818152e52ba79b6a6e2b0b
|
[
"MIT"
] | null | null | null |
Server.py
|
wiemesprit/m-v
|
01df78f470b737adb7818152e52ba79b6a6e2b0b
|
[
"MIT"
] | null | null | null |
Server.py
|
wiemesprit/m-v
|
01df78f470b737adb7818152e52ba79b6a6e2b0b
|
[
"MIT"
] | 1
|
2020-10-21T18:49:20.000Z
|
2020-10-21T18:49:20.000Z
|
from flask import Flask,request
import base64
import io
from PIL import Image
from main import ExecuteProcess
from ExtractInfoFromDiploma import ExecuteDiplomaProcess
app = Flask(__name__)
@app.route("/")
def hello_world():
return "Hello World"
@app.route('/getimage', methods=['POST'])
def handle_form():
payload = request.form.to_dict(flat=False)
im_b64 = payload['image'][0]
im_binary = base64.b64decode(im_b64)
buf = io.BytesIO(im_binary)
img = Image.open(buf)
img.save("Uploads/permis.PNG")
return ExecuteProcess()
@app.route('/getdiploma', methods=['POST'])
def handle_form_diploma():
payload = request.form.to_dict(flat=False)
im_b64 = payload['image'][0]
im_binary = base64.b64decode(im_b64)
buf = io.BytesIO(im_binary)
img = Image.open(buf)
img.save("Uploads/diplome1.jpg")
return ExecuteDiplomaProcess()
if __name__ == "__main__":
app.run(host='0.0.0.0',port=5000)
| 24.282051
| 56
| 0.702218
|
afb78181904e06155d08a5254b62c1f46175bb4d
| 8,135
|
py
|
Python
|
spampy/email_processor.py
|
abdullahselek/spampy
|
2bf3650fb0d8f60a9f544a910f2ff8c64c30e765
|
[
"MIT"
] | 26
|
2018-06-27T12:06:43.000Z
|
2022-01-30T23:18:21.000Z
|
spampy/email_processor.py
|
abdullahselek/spampy
|
2bf3650fb0d8f60a9f544a910f2ff8c64c30e765
|
[
"MIT"
] | 3
|
2018-12-05T09:02:05.000Z
|
2019-05-13T20:07:52.000Z
|
spampy/email_processor.py
|
abdullahselek/spampy
|
2bf3650fb0d8f60a9f544a910f2ff8c64c30e765
|
[
"MIT"
] | 10
|
2018-06-29T09:50:23.000Z
|
2020-10-07T16:32:36.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import nltk
import os
import numpy as np
import codecs
import multiprocessing as mp
from collections import Counter
from typing import Dict, List, Tuple
def preprocess(email: str) -> str:
"""
Preprocess (simplifies) raw email.
Args:
email (str):
Raw e-mail
Returns:
Processed (simplified) email
"""
# Make e-mail lower case
email = email.lower()
# Strip html tags
email = re.sub("<[^<>]+>", " ", email)
# Any numbers get replaced with the string 'number'
email = re.sub("[0-9]+", "number", email)
# Any word starting with http or https:// replaced with 'httpaddr'
email = re.sub(r"(http|https)://[^\s]*", "httpaddr", email)
# Strings with "@" in the middle are considered emails --> 'emailaddr'
email = re.sub(r"[^\s]+@[^\s]+", "emailaddr", email)
# The '$' sign gets replaced with 'dollar'
email = re.sub("[$]+", "dollar", email)
return email
def create_tokenlist(email: str) -> List:
"""
Tokenizes it, creates a list of tokens in the e-mail.
Args:
email (str):
Raw e-mail
Returns:
Ordered list of tokens in the e-mail.
"""
# use NLTK porter stemmer
stemmer = nltk.stem.porter.PorterStemmer()
email = preprocess(email)
# Split the e-mail into single words by ' ', '@', '$', '/', ...
tokens = re.split(
r"[ \@\$\/\#\.\-\:\&\*\+\=\[\]\?\!\(\)\{\}\,\'\"\>\_\<\;\%]", email
)
# Loop over each word and use a stemmer to shorten it,
tokenlist = []
for token in tokens:
# Remove any non alphanumeric characters
token = re.sub("[^a-zA-Z0-9]", "", token)
# Use the Porter stemmer to stem the word
stemmed = stemmer.stem(token)
# Pass empty tokens
if not len(token):
continue
# Save a list of all unique stemmed words
tokenlist.append(stemmed)
return tokenlist
def get_vocablary_dict(
path: str = "spampy/datasets", filename: str = "vocablary.txt"
) -> Dict:
"""
Add vocablary text file content into a dictionary.
Args:
path (str):
Vocablary file folder path.
filename (str):
Vocablary file name.
Returns:
Vocablary dict.
"""
vocablary_dict = {}
with open(os.path.join(path, filename), "r") as f:
for line in f:
(val, key) = line.split()
vocablary_dict[int(val)] = key
return vocablary_dict
def get_vocablary_indices(email: str, vocablary_dict: Dict) -> List:
"""
Returns a list of indices (location) of each stemmed word in email.
Args:
email (str):
E-mail.
vocablary_dict (dict):
Vocablary dictionary created by `get_vocablary_dict`.
Returns:
Indices list.
"""
tokenlist = create_tokenlist(email)
index_list = [
vocablary_dict[token] for token in tokenlist if token in vocablary_dict
]
return index_list
def feature_vector_from_email(email: str, vocablary_dict: Dict) -> Dict:
"""
Returns a vector of shape (n,1) with a size of the vocablary_dict.
If the vocab word with index == 1 is in the email, first element in
this vector is 1, 0 otherwise.
Args:
email (str):
E-mail.
vocablary_dict (dict):
Vocablary dictionary created by `get_vocablary_dict`.
"""
n = len(vocablary_dict)
result = np.zeros((n, 1))
vocablary_indices = get_vocablary_indices(email, vocablary_dict)
for index in vocablary_indices:
result[index] = 1
return result
def listdir(directory: str) -> List:
"""
A specialized version of os.listdir() that ignores files that
start with a leading period.
Especially dismissing .DS_STORE s.
"""
filelist = os.listdir(directory)
return [x for x in filelist if not (x.startswith("."))]
def enron_processor(emails_dir: str, return_dict: Dict) -> Dict:
"""
A function which processes .txt email files into lists
and returns in a dictionary.
Args:
emails_dir (str):
Root folders for emails.
return_dict (dict):
Shared dict for processed datas.
"""
all_words = []
dirs = [os.path.join(emails_dir, f) for f in listdir(emails_dir)]
for d in dirs:
emails = [os.path.join(d, f) for f in listdir(d)]
for mail in emails:
with codecs.open(mail, "r", encoding="utf-8", errors="ignore") as m:
for line in m:
words = line.split()
all_words += words
dictionary = Counter(all_words)
list_to_remove = list(dictionary.keys())
return_dict["all_words"] = dictionary
return_dict["list_to_remove"] = list_to_remove
def create_enron_dictionary(root_dir: str = "spampy/datasets/enron") -> Dict:
"""
A function which create a dictionary from enron dataset.
Uses multiple process.
Args:
root_dir (str):
Root folders for enron dataset.
"""
manager = mp.Manager()
return_dict = manager.dict()
jobs = []
emails_dirs = [os.path.join(root_dir, f) for f in listdir(root_dir)]
for emails_dir in emails_dirs:
p = mp.Process(target=enron_processor, args=(emails_dir, return_dict))
jobs.append(p)
p.start()
for proc in jobs:
proc.join()
dictionary = return_dict["all_words"]
list_to_remove = return_dict["list_to_remove"]
for item in list_to_remove:
if item.isalpha() == False:
del dictionary[item]
elif len(item) == 1:
del dictionary[item]
dictionary = dictionary.most_common(3000)
np.save("dict_enron.npy", dictionary)
return dictionary
def features_processor(emails_dir: str, return_dict: Dict) -> Dict:
"""
A function which processes data features into lists
and returns in a dictionary.
Args:
emails_dir (str):
Root folders for emails.
return_dict (dict):
Shared dict for processed datas.
"""
features_matrix = return_dict["features_matrix"]
train_labels = return_dict["train_labels"]
docID = 0
enron_dict = return_dict["enron_dict"]
dirs = [os.path.join(emails_dir, f) for f in os.listdir(emails_dir)]
for d in dirs:
emails = [os.path.join(d, f) for f in os.listdir(d)]
for mail in emails:
with open(mail) as m:
all_words = []
for line in m:
words = line.split()
all_words += words
for word in all_words:
wordID = 0
for i, d in enumerate(enron_dict):
if d[0] == u"word":
wordID = i
features_matrix[docID, wordID] = all_words.count(word)
train_labels[docID] = int(mail.split(".")[-2] == "spam")
docID = docID + 1
return_dict["features_matrix"] = features_matrix
return_dict["train_labels"] = train_labels
def extract_enron_features(root_dir: str = "spampy/datasets/enron") -> Tuple:
"""
A function creates features and labels from enron dataset.
Uses multiple process and returns in a tuple.
Args:
root_dir (str):
Root folders for enron dataset.
"""
enron_dict = create_enron_dictionary(root_dir)
manager = mp.Manager()
return_dict = manager.dict()
return_dict["enron_dict"] = enron_dict
features_matrix = np.zeros((33716, 3000))
train_labels = np.zeros(33716)
return_dict["features_matrix"] = features_matrix
return_dict["train_labels"] = train_labels
jobs = []
emails_dirs = [os.path.join(root_dir, f) for f in os.listdir(root_dir)]
for emails_dir in emails_dirs:
p = mp.Process(target=features_processor, args=(emails_dir, return_dict))
jobs.append(p)
p.start()
for proc in jobs:
proc.join()
features_matrix = return_dict["features_matrix"]
train_labels = return_dict["train_labels"]
return np.array(features_matrix), np.array(train_labels)
| 29.689781
| 82
| 0.608851
|
cc231815b5fbfd2f7e67e30f458c6438441ed313
| 5,717
|
py
|
Python
|
feature_engineering.py
|
renan-vieira/Data-Mining-2019.2
|
de6049e4a4386ac1c0d32d918b8db131a074fce9
|
[
"MIT"
] | null | null | null |
feature_engineering.py
|
renan-vieira/Data-Mining-2019.2
|
de6049e4a4386ac1c0d32d918b8db131a074fce9
|
[
"MIT"
] | null | null | null |
feature_engineering.py
|
renan-vieira/Data-Mining-2019.2
|
de6049e4a4386ac1c0d32d918b8db131a074fce9
|
[
"MIT"
] | null | null | null |
#=========================================
# PROJETO MINERACAO DE DADOS
# feature engineering
#
# @claudioalvesmonteiro
#=========================================
# import packages
import pandas as pd
from utils_functions import count_porcent, createFaixa, findUTI, categoryToDummyThreshold
import numpy as np
# importar dados
arq = pd.ExcelFile('data/raw_data_v0.xlsx')
cadastro = arq.parse(0)
internacao = arq.parse(1)
amb = arq.parse(2)
#==================================
# TRATAMENTO E FEATURE ENGINEERING
#==================================
#================ CADASTRO ===============#
#------------ passar codigo para string
amb['Cod Integração'] = [str(x) for x in amb['Cod Integração']]
cadastro['Cod Integração'] = [str(x) for x in cadastro['Cod Integração']]
internacao.dropna(subset=['Cod Integração'], inplace=True)
internacao['Cod Integração'] = [str(int(x)) for x in internacao['Cod Integração']]
#------------ cidade salvador
cadastro['bairro_centro'] = [1 if x == 'CENTRO' or x == 'CENTRO\r\n' else 0 for x in cadastro['Bairro']]
cadastro['bairro_zona_rural'] = [1 if x == 'ZONA RURAL' or x == 'ZONA RURAL\r\n' else 0 for x in cadastro['Bairro']]
#------------ bairro [centro e zona rural]
cadastro['cidade_salvador'] = [1 if x == 'SALVADOR' else 0 for x in cadastro['Cidade']]
#------------ sexo
cadastro['sexo_feminino'] = [1 if x == 'F' else 0 for x in cadastro['Sexo']]
#------------ idade
cadastro['faixa_etaria'] = createFaixa(cadastro['Idade'])
#------------ altura [faltante]
cadastro['altura_faltante'] = [1 if pd.isnull(x) else 0 for x in cadastro['Altura']]
#------------ peso
# capturar peso por faixa etaria X sexo
media_peso = cadastro[['Peso', 'faixa_etaria', 'Sexo']].groupby(['Sexo', 'faixa_etaria']).median()
media_peso.columns = ['peso_faixa']
media_peso.reset_index(inplace=True)
# combinar com base de cadastro
cadastro = cadastro.merge(media_peso, on =['Sexo', 'faixa_etaria'])
# inputacao nos pesos faltantes
cadastro['peso'] = [cadastro['peso_faixa'][i] if pd.isnull(cadastro['Peso'][i]) else cadastro['Peso'][i] for i in range(len(cadastro))]
#================ AMBULATORIO ===============#
#------------ filtrar casos repetidos de procedimentos no ambulatorio
amb = amb.drop_duplicates(['Cod Integração', 'PROCEDIMENTO_COD'])
#------------ numero procedimentos por paciente
# contabilizar procedimentos e renomear colunas
qpro = count_porcent(amb[['Cod Integração', 'PROCEDIMENTO_COD']], 'Cod Integração').drop('porcent', axis=1)
qpro.columns = ['Cod Integração', 'numero_procedimentos']
# combinar com dados de cadastro [FILTRA CASOS QUE FORAM PARA AMBULATORIO]
cadastro = cadastro.merge(qpro, on='Cod Integração')
#------------ diagnosticos do ambulatorio
# aplicar funcao para gerar dummies > 1% de representatividade na base
cadastro = categoryToDummyThreshold(cadastro, amb, 'CID', 1)
#------------ procedimentos do ambulatorio
# aplicar funcao para gerar dummies > 1% de representatividade na base
cadastro = categoryToDummyThreshold(cadastro, amb, 'PROCEDIMENTO_COD', 1)
# remover pacientes duplicados [gerado em casos de multiplos valores de procedimentos para o mesmo paciente]
cadastro = cadastro.drop_duplicates('Cod Integração')
#================ INTERNACAO ===============#
#------------ alvo [1 se internado na UTI, 0 caso contrario] [tabela internacao]
# capturar unidades pela qual o paciente passou
internacao_uti = pd.DataFrame(internacao[['Cod Integração', 'Unidade']].groupby(['Cod Integração', 'Unidade']).size())
internacao_uti.reset_index(inplace=True)
# fitrar unidade de UTI
internacao_uti['UTI'] = findUTI(internacao_uti['Unidade'])
internacao_uti = internacao_uti[internacao_uti['UTI'] == 1]
# remover duplicados e selecionar colunas
internacao_uti.drop_duplicates('Cod Integração', inplace=True)
internacao_uti.drop(['Unidade', 0], axis=1, inplace=True)
# combinar com base de cadastro
dataset = cadastro.merge(internacao_uti, on='Cod Integração', how='left')
# atribuir 0 a nulos no ALVO
dataset['UTI'] = [0 if pd.isnull(x) else x for x in dataset['UTI'] ]
#===========================
# EXPORTAR DADOS
#===========================
#------------ selecionar colunas
dataset = dataset[['UTI','Cod Integração', 'Idade',
'bairro_centro', 'bairro_zona_rural',
'cidade_salvador', 'sexo_feminino', 'faixa_etaria', 'altura_faltante',
'peso_faixa', 'peso', 'numero_procedimentos', 'SIG_CID', 'CID_C64',
'CID_C710', 'CID_C830', 'CID_C910', 'CID_C920', 'CID_D613', 'CID_G409',
'CID_I500', 'CID_J158', 'CID_J159', 'CID_J189', 'CID_N00', 'CID_N048',
'CID_OUTROS', 'CID_Q210', 'CID_Q211', 'CID_Q213', 'CID_Z001',
'SIG_PROCEDIMENTO_COD', 'PROCEDIMENTO_COD_11112050',
'PROCEDIMENTO_COD_11112190', 'PROCEDIMENTO_COD_13051032',
'PROCEDIMENTO_COD_13051040', 'PROCEDIMENTO_COD_1358',
'PROCEDIMENTO_COD_1701001S', 'PROCEDIMENTO_COD_17011043',
'PROCEDIMENTO_COD_17016045', 'PROCEDIMENTO_COD_17018030',
'PROCEDIMENTO_COD_17023041', 'PROCEDIMENTO_COD_17034035',
'PROCEDIMENTO_COD_17039045', 'PROCEDIMENTO_COD_17049040',
'PROCEDIMENTO_COD_17055040', 'PROCEDIMENTO_COD_17056047',
'PROCEDIMENTO_COD_17059046', 'PROCEDIMENTO_COD_17064040',
'PROCEDIMENTO_COD_20202014', 'PROCEDIMENTO_COD_20203008',
'PROCEDIMENTO_COD_28011376', 'PROCEDIMENTO_COD_6067',
'PROCEDIMENTO_COD_CONMAERE', 'PROCEDIMENTO_COD_OUTROS',
'PROCEDIMENTO_COD_S255', 'PROCEDIMENTO_COD_S571',
'PROCEDIMENTO_COD_SADT', 'PROCEDIMENTO_COD_SH', 'PROCEDIMENTO_COD_SP']]
#------------ salvar base
dataset.to_csv('data/preprocessed_data.csv', index=False)
print('\nDados salvos com as seguintes features:', dataset.columns)
| 40.835714
| 135
| 0.67378
|
325d053ce5923914ef87c633e2f022c1cca91594
| 4,054
|
py
|
Python
|
tests/test_account.py
|
vivialconnect/vivialconnect-python
|
a7deaae0ffd470c681810e6d23c90bd7a0bbde11
|
[
"MIT"
] | 1
|
2017-03-06T05:46:31.000Z
|
2017-03-06T05:46:31.000Z
|
tests/test_account.py
|
vivialconnect/vivialconnect-python
|
a7deaae0ffd470c681810e6d23c90bd7a0bbde11
|
[
"MIT"
] | 3
|
2017-04-11T14:08:48.000Z
|
2017-08-30T15:25:43.000Z
|
tests/test_account.py
|
vivialconnect/vivialconnect-python
|
a7deaae0ffd470c681810e6d23c90bd7a0bbde11
|
[
"MIT"
] | 2
|
2017-05-04T19:26:38.000Z
|
2017-06-21T19:39:07.000Z
|
import vivialconnect
from tests.common import BaseTestCase
from tests.common import HTTMock
from vivialconnect.resources.account import Transaction
class AccountTest(BaseTestCase):
def test_get_account(self):
with HTTMock(
self.response_content,
body=self.load_fixture("account/account"),
headers={"Content-type": "application/json"},
):
account = vivialconnect.Account.find(6242736)
self.assertEqual("Vivial Connect", account.company_name)
def test_update_account(self):
with HTTMock(
self.response_content,
body=self.load_fixture("account/account"),
headers={"Content-type": "application/json"},
):
account = vivialconnect.Account.find(6242736)
with HTTMock(
self.response_content,
body=self.load_fixture("account/account"),
headers={"Content-type": "application/json"},
):
account.save()
def test_get_accounts(self):
with HTTMock(
self.response_content,
body=self.load_fixture("account/accounts"),
headers={"Content-type": "application/json"},
):
accounts = vivialconnect.Account.find()
self.assertEqual(3, len(accounts))
def test_count_accounts(self):
with HTTMock(
self.response_content,
body=self.load_fixture("account/count"),
headers={"Content-type": "application/json"},
):
count = vivialconnect.Account.count()
self.assertEqual(3, count)
def test_get_transactions(self):
with HTTMock(
self.response_content,
body=self.load_fixture("transaction/transactions"),
headers={"Content-type": "application/json"},
):
transactions = Transaction.find()
assert transactions is not None
assert len(transactions) > 0
def test_get_transaction_by_id(self):
with HTTMock(
self.response_content,
body=self.load_fixture("transaction/transaction-by-id"),
headers={"Content-type": "application/json"},
):
transaction_id = 3957836
transaction = Transaction.find(id_=transaction_id)
assert transaction is not None
assert transaction.id == transaction_id
def test_get_transactions_using_dates(self):
with HTTMock(
self.response_content,
body=self.load_fixture("transaction/transactions"),
headers={"Content-type": "application/json"},
):
transactions = Transaction.find(
start_time="2019-12-14T19:00:31Z", end_time="2020-12-14T19:00:31Z"
)
assert transactions is not None
assert len(transactions) > 0
def test_get_transactions_by_type(self):
with HTTMock(
self.response_content,
body=self.load_fixture("transaction/transactions-by-type"),
headers={"Content-type": "application/json"},
):
transactions = Transaction.find(transaction_type="number_purchase")
assert transactions is not None
assert len(transactions) > 0
for transaction in transactions:
assert transaction.transaction_type.startswith("number_purchase")
def test_transactions_with_params(self):
with HTTMock(
self.response_content,
body=self.load_fixture("transaction/transactions-with-params"),
headers={"Content-type": "application/json"},
):
transactions = Transaction.find(
transaction_type="number_purchase", limit=10, page=1
)
assert transactions is not None
assert len(transactions) <= 10
for transaction in transactions:
assert transaction.transaction_type.startswith("number_purchase")
if __name__ == "__main__":
unittest.main()
| 34.355932
| 82
| 0.609521
|
51ee6622ded72325ba257a7a3905eff143c179e7
| 1,941
|
py
|
Python
|
methods/__init__.py
|
rickgroen/cov-weighting
|
64c296679cd37e724a03c6dc107606f7048aec96
|
[
"MIT"
] | 26
|
2021-01-05T07:10:31.000Z
|
2022-03-23T06:31:00.000Z
|
methods/__init__.py
|
rickgroen/cov-weighting
|
64c296679cd37e724a03c6dc107606f7048aec96
|
[
"MIT"
] | 6
|
2021-04-12T16:27:11.000Z
|
2022-02-09T07:00:15.000Z
|
methods/__init__.py
|
rickgroen/cov-weighting
|
64c296679cd37e724a03c6dc107606f7048aec96
|
[
"MIT"
] | 7
|
2021-03-08T09:28:05.000Z
|
2022-02-23T07:39:29.000Z
|
import re
import importlib
import torch.optim as optim
from methods.base_method import BaseMethod
def find_method_using_name(method_name):
# Given the option --method [METHOD],
# the file "networks/{}_method.py" will be imported.
method_name = re.sub('-', '', method_name)
method_filename = "methods." + method_name + "_method"
method_lib = importlib.import_module(method_filename)
# In the file, the class called [MethodName]Method() will
# be instantiated. It has to be a subclass of BaseMethod, and it is case-insensitive.
method = None
target_model_name = method_name.replace('_', '') + 'method'
for name, cls in method_lib.__dict__.items():
if name.lower() == target_model_name.lower() and issubclass(cls, BaseMethod):
method = cls
if method is None:
error_string = "No method class with name {} was found in {}.py,".format(target_model_name, method_filename)
raise ImportError(error_string)
return method
def create_method(args, loader):
model = find_method_using_name(args.method)
instance = model(args, loader)
print("Method {} with name {} was created".format(instance.__class__.__name__, instance.name))
return instance
def get_optimizer(params, args):
""" Gets and optimizer according to args.optimizer.
For now use a set of hard-coded parameters for each optimizer.
"""
which_optimizer = args.optimizer
if which_optimizer == 'adam':
optimizer = optim.Adam(params, lr=args.learning_rate)
elif which_optimizer == 'sgd':
optimizer = optim.SGD(params, lr=args.learning_rate, momentum=0.9, weight_decay=1e-4, nesterov=True)
elif which_optimizer == 'rmsprop':
optimizer = optim.RMSprop(params, lr=args.learning_rate, momentum=0.9, weight_decay=1e-4)
else:
raise NotImplementedError('{} has not been implemented'.format(which_optimizer))
return optimizer
| 38.82
| 116
| 0.7017
|
65b113cd4746120366b512bf10d09611c2525d0f
| 397
|
py
|
Python
|
src/concurrency/server/client.py
|
Python-Masterclass/Concurrency
|
d1902afefd74fa31bebef2c8d205260e746fc525
|
[
"MIT"
] | null | null | null |
src/concurrency/server/client.py
|
Python-Masterclass/Concurrency
|
d1902afefd74fa31bebef2c8d205260e746fc525
|
[
"MIT"
] | null | null | null |
src/concurrency/server/client.py
|
Python-Masterclass/Concurrency
|
d1902afefd74fa31bebef2c8d205260e746fc525
|
[
"MIT"
] | null | null | null |
import socket
if __name__ == "__main__":
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", 25000))
print("Connected to server")
while True:
n = input().strip() # Remove newline
if not n:
break
sock.send(n.encode("utf-8"))
response = sock.recv(100)
print(f"{response.decode('utf-8').strip()}")
| 28.357143
| 60
| 0.589421
|
5e3898ed9375022265bfd1a7d111388f1ee8a17e
| 473
|
py
|
Python
|
setup.py
|
anbasile/protest-clef-2020
|
44c4db7ee563a79155a1fc975046283bb46efe76
|
[
"Apache-2.0"
] | 2
|
2020-06-16T15:02:14.000Z
|
2021-06-24T17:49:12.000Z
|
setup.py
|
anbasile/protest-clef-2020
|
44c4db7ee563a79155a1fc975046283bb46efe76
|
[
"Apache-2.0"
] | 5
|
2020-12-30T09:12:39.000Z
|
2022-02-10T02:31:46.000Z
|
setup.py
|
anbasile/protest-clef-2020
|
44c4db7ee563a79155a1fc975046283bb46efe76
|
[
"Apache-2.0"
] | null | null | null |
from distutils.core import setup
setup(
name='protesta',
version='0.0.1',
description='Detect protest events in news articles',
author='Angelo Basile',
author_email='me@angelobasile.it',
install_requires=[
"nlp>=0.4",
"numpy<1.19",
"tensorflow-addons",
"tensorflow==2.4.0",
"torch",
"transformers==3.5",
"typer>=0.3.1",
],
entry_points={"console_scripts": ["protesta=protesta:app"]})
| 24.894737
| 64
| 0.58351
|
f6720ba3ddde470952740ce3b5355832f214b1d4
| 2,305
|
py
|
Python
|
openstack/network/v2/pool_member.py
|
wangrui1121/huaweicloud-sdk-python
|
240abe00288760115d1791012d4e3c4592d77ad1
|
[
"Apache-2.0"
] | 43
|
2018-12-19T08:39:15.000Z
|
2021-07-21T02:45:43.000Z
|
openstack/network/v2/pool_member.py
|
wangrui1121/huaweicloud-sdk-python
|
240abe00288760115d1791012d4e3c4592d77ad1
|
[
"Apache-2.0"
] | 11
|
2019-03-17T13:28:56.000Z
|
2020-09-23T23:57:50.000Z
|
openstack/network/v2/pool_member.py
|
wangrui1121/huaweicloud-sdk-python
|
240abe00288760115d1791012d4e3c4592d77ad1
|
[
"Apache-2.0"
] | 47
|
2018-12-19T05:14:25.000Z
|
2022-03-19T15:28:30.000Z
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may
# # not use this file except in compliance with the License. You may obtain
# # a copy of the License at
# #
# # http://www.apache.org/licenses/LICENSE-2.0
# #
# # Unless required by applicable law or agreed to in writing, software
# # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# # License for the specific language governing permissions and limitations
# # under the License.
#
# from openstack.network import network_service
# from openstack import resource2 as resource
#
#
# class PoolMember(resource.Resource):
# resource_key = 'member'
# resources_key = 'members'
# base_path = '/lbaas/pools/%(pool_id)s/members'
# service = network_service.NetworkService()
#
# # capabilities
# allow_create = True
# allow_get = True
# allow_update = True
# allow_delete = True
# allow_list = True
#
# _query_mapping = resource.QueryParameters(
# 'address', 'name', 'protocol_port', 'subnet_id', 'weight',
# is_admin_state_up='admin_state_up',
# project_id='tenant_id',
# )
#
# # Properties
# #: The ID of the owning pool
# pool_id = resource.URI('pool_id')
# #: The IP address of the pool member.
# address = resource.Body('address')
# #: The administrative state of the pool member, which is up ``True`` or
# #: down ``False``. *Type: bool*
# is_admin_state_up = resource.Body('admin_state_up', type=bool)
# #: Name of the pool member.
# name = resource.Body('name')
# #: The ID of the project this pool member is associated with.
# project_id = resource.Body('tenant_id')
# #: The port on which the application is hosted.
# protocol_port = resource.Body('protocol_port', type=int)
# #: Subnet ID in which to access this pool member.
# subnet_id = resource.Body('subnet_id')
# #: A positive integer value that indicates the relative portion of traffic
# #: that this member should receive from the pool. For example, a member
# #: with a weight of 10 receives five times as much traffic as a member
# #: with weight of 2.
# weight = resource.Body('weight', type=int)
| 40.438596
| 80
| 0.674187
|
af9ae06e8445b6374dd9781eca267858bbea4986
| 2,874
|
py
|
Python
|
tests/trainer/flags/test_val_check_interval.py
|
prajakta0111/pytorch-lightning
|
3df02b880a6d145ff0aca24ea429c12c0d8f1181
|
[
"Apache-2.0"
] | 1
|
2021-08-05T01:45:26.000Z
|
2021-08-05T01:45:26.000Z
|
tests/trainer/flags/test_val_check_interval.py
|
prajakta0111/pytorch-lightning
|
3df02b880a6d145ff0aca24ea429c12c0d8f1181
|
[
"Apache-2.0"
] | 1
|
2021-03-01T17:32:12.000Z
|
2021-03-01T17:32:12.000Z
|
tests/trainer/flags/test_val_check_interval.py
|
prajakta0111/pytorch-lightning
|
3df02b880a6d145ff0aca24ea429c12c0d8f1181
|
[
"Apache-2.0"
] | 1
|
2020-10-18T10:32:31.000Z
|
2020-10-18T10:32:31.000Z
|
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from pytorch_lightning.trainer import Trainer
from tests.helpers import BoringModel
@pytest.mark.parametrize('max_epochs', [1, 2, 3])
def test_val_check_interval_1(tmpdir, max_epochs):
class TestModel(BoringModel):
def __init__(self):
super().__init__()
self.train_epoch_calls = 0
self.val_epoch_calls = 0
def on_train_epoch_start(self) -> None:
self.train_epoch_calls += 1
def on_validation_epoch_start(self) -> None:
if not self.trainer.running_sanity_check:
self.val_epoch_calls += 1
model = TestModel()
trainer = Trainer(
max_epochs=max_epochs,
val_check_interval=1.0,
logger=False,
)
trainer.fit(model)
assert model.val_epoch_calls == max_epochs
@pytest.mark.parametrize('max_epochs', [1, 2, 3])
def test_val_check_interval_quarter(tmpdir, max_epochs):
class TestModel(BoringModel):
def __init__(self):
super().__init__()
self.train_epoch_calls = 0
self.val_epoch_calls = 0
def on_train_epoch_start(self) -> None:
self.train_epoch_calls += 1
def on_validation_epoch_start(self) -> None:
if not self.trainer.running_sanity_check:
self.val_epoch_calls += 1
model = TestModel()
trainer = Trainer(
max_epochs=max_epochs,
val_check_interval=0.25,
logger=False,
)
trainer.fit(model)
assert model.val_epoch_calls == max_epochs * 4
@pytest.mark.parametrize('max_epochs', [1, 2, 3])
def test_val_check_interval_third(tmpdir, max_epochs):
class TestModel(BoringModel):
def __init__(self):
super().__init__()
self.train_epoch_calls = 0
self.val_epoch_calls = 0
def on_train_epoch_start(self) -> None:
self.train_epoch_calls += 1
def on_validation_epoch_start(self) -> None:
if not self.trainer.running_sanity_check:
self.val_epoch_calls += 1
model = TestModel()
trainer = Trainer(
max_epochs=max_epochs,
val_check_interval=0.33,
logger=False,
)
trainer.fit(model)
assert model.val_epoch_calls == max_epochs * 3
| 28.176471
| 74
| 0.658316
|
e142829768654e3bba573b798b1caf9d738748c5
| 35,533
|
py
|
Python
|
hydrus/tests/functional/test_app.py
|
batcypher/hydrus-test
|
64b99504eefabbc3041f4771c0fd6deb189e699e
|
[
"MIT"
] | null | null | null |
hydrus/tests/functional/test_app.py
|
batcypher/hydrus-test
|
64b99504eefabbc3041f4771c0fd6deb189e699e
|
[
"MIT"
] | null | null | null |
hydrus/tests/functional/test_app.py
|
batcypher/hydrus-test
|
64b99504eefabbc3041f4771c0fd6deb189e699e
|
[
"MIT"
] | null | null | null |
"""
This file (test_app.py) contains the functional tests for checking if the
response format is proper. Run tests/unit/test_crud.py before running this.
"""
import json
import re
import uuid
import pytest
from hydra_python_core.doc_writer import HydraLink, DocUrl
from hydrus.tests.conftest import gen_dummy_object
from hydrus.utils import get_doc
# specify common fixture for all tests
@pytest.mark.usefixtures('init_db_for_app_tests')
class TestApp():
def test_Index(self, test_app_client, constants):
"""Test for the Index."""
HYDRUS_SERVER_URL = constants['HYDRUS_SERVER_URL']
API_NAME = constants['API_NAME']
response_get = test_app_client.get(f'/{API_NAME}')
endpoints = json.loads(response_get.data.decode('utf-8'))
response_post = test_app_client.post(f'/{API_NAME}', data=dict(foo='bar'))
response_put = test_app_client.put(f'/{API_NAME}', data=dict(foo='bar'))
response_delete = test_app_client.delete(f'/{API_NAME}')
assert '@context' in endpoints
assert endpoints['@id'] == f'{HYDRUS_SERVER_URL}{API_NAME}'
assert endpoints['@type'] == 'EntryPoint'
assert response_get.status_code == 200
assert response_post.status_code == 405
assert response_put.status_code == 405
assert response_delete.status_code == 405
def test_EntryPoint_context(self, test_app_client, constants):
"""Test for the EntryPoint context."""
API_NAME = constants['API_NAME']
response_get = test_app_client.get(f'/{API_NAME}/contexts/EntryPoint.jsonld')
response_get_data = json.loads(response_get.data.decode('utf-8'))
response_post = test_app_client.post(f'/{API_NAME}/contexts/EntryPoint.jsonld', data={})
response_delete = test_app_client.delete(f'/{API_NAME}/contexts/EntryPoint.jsonld')
assert response_get.status_code == 200
assert '@context' in response_get_data
assert response_post.status_code == 405
assert response_delete.status_code == 405
def test_Vocab(self, test_app_client, constants):
"""Test the vocab."""
API_NAME = constants['API_NAME']
HYDRUS_SERVER_URL = constants['HYDRUS_SERVER_URL']
vocab_route = get_doc().doc_name
response_get = test_app_client.get(f'/{API_NAME}/{vocab_route}#')
response_get_data = json.loads(response_get.data.decode('utf-8'))
assert '@context' in response_get_data
assert response_get_data['@type'] == 'ApiDocumentation'
assert response_get_data['@id'] == f'{HYDRUS_SERVER_URL}{API_NAME}/{vocab_route}'
assert response_get.status_code == 200
response_delete = test_app_client.delete(f'/{API_NAME}/{vocab_route}#')
assert response_delete.status_code == 405
response_put = test_app_client.put(
f'/{API_NAME}/{vocab_route}#', data=json.dumps(dict(foo='bar')))
assert response_put.status_code == 405
response_post = test_app_client.post(f'/{API_NAME}/{vocab_route}#',
data=json.dumps(dict(foo='bar')))
assert response_post.status_code == 405
def test_Collections_GET(self, test_app_client, constants, doc, init_db_for_app_tests):
"""Test GET on collection endpoints."""
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints['collections']:
response_get = test_app_client.get(endpoint['@id'])
assert response_get.status_code == 200
response_get_data = json.loads(response_get.data.decode('utf-8'))
assert '@context' in response_get_data
assert '@id' in response_get_data
assert '@type' in response_get_data
assert 'members' in response_get_data
# Check the item URI has the valid format, so it can be dereferenced
if len(response_get_data['members']) > 0:
for item in response_get_data['members']:
class_type = item['@type']
if class_type in doc.parsed_classes:
class_ = doc.parsed_classes[class_type]['class']
class_methods = [
x.method for x in class_.supportedOperation]
if 'GET' in class_methods:
item_response = test_app_client.get(
response_get_data['members'][0]['@id'])
assert item_response.status_code == 200
def test_pagination(self, test_app_client, constants, doc):
"""Test basic pagination"""
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints['collections']:
response_get = test_app_client.get(endpoint['@id'])
assert response_get.status_code == 200
response_get_data = json.loads(
response_get.data.decode('utf-8'))
assert 'hydra:view' in response_get_data
assert 'hydra:first' in response_get_data['hydra:view']
assert 'hydra:last' in response_get_data['hydra:view']
if 'hydra:next' in response_get_data['hydra:view']:
response_next = test_app_client.get(
response_get_data['hydra:view']['hydra:next'])
assert response_next.status_code == 200
response_next_data = json.loads(response_next.data.decode('utf-8'))
assert 'hydra:previous' in response_next_data['hydra:view']
break
def test_Collections_PUT(self, test_app_client, constants, doc):
"""Test insert data to the collection."""
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints['collections']:
collection_name = '/'.join(endpoint['@id'].split(f'/{API_NAME}/')[1:])
collection = doc.collections[collection_name]['collection']
collection_methods = [x.method for x in collection.supportedOperation]
if 'PUT' in collection_methods:
dummy_object = gen_dummy_object(collection.name, doc)
good_response_put = test_app_client.put(endpoint['@id'],
data=json.dumps(dummy_object))
assert good_response_put.status_code == 201
def test_collection_object_GET(self, test_app_client, constants, doc):
"""Test GET of a given collection object using ID."""
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints['collections']:
collection_name = '/'.join(endpoint['@id'].split(f'/{API_NAME}/')[1:])
collection = doc.collections[collection_name]['collection']
collection_methods = [x.method for x in collection.supportedOperation]
if 'PUT' in collection_methods:
dummy_object = gen_dummy_object(collection.name, doc)
initial_put_response = test_app_client.put(
endpoint['@id'], data=json.dumps(dummy_object))
assert initial_put_response.status_code == 201
response = json.loads(initial_put_response.data.decode('utf-8'))
regex = r'(.*)ID (.{36})* (.*)'
matchObj = re.match(regex, response['description'])
assert matchObj is not None
id_ = matchObj.group(2)
if 'GET' in collection_methods:
get_response = test_app_client.get(f'{endpoint["@id"]}/{id_}')
assert get_response.status_code == 200
def test_collection_object_PUT(self, test_app_client, constants, doc):
"""Test PUT of a given collection object using ID."""
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints['collections']:
collection_name = '/'.join(endpoint['@id'].split(f'/{API_NAME}/')[1:])
collection = doc.collections[collection_name]['collection']
collection_methods = [x.method for x in collection.supportedOperation]
if 'PUT' in collection_methods:
dummy_object = gen_dummy_object(collection.name, doc)
initial_put_response = test_app_client.put(
endpoint['@id'], data=json.dumps(dummy_object))
assert initial_put_response.status_code == 201
def test_collection_object_POST(self, test_app_client, constants, doc, socketio):
"""Test POST of a given collection object using ID."""
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints['collections']:
collection_name = '/'.join(endpoint['@id'].split(f'/{API_NAME}/')[1:])
collection = doc.collections[collection_name]['collection']
collection_methods = [x.method for x in collection.supportedOperation]
dummy_object = gen_dummy_object(collection.name, doc)
initial_put_response = test_app_client.put(
endpoint['@id'], data=json.dumps(dummy_object))
assert initial_put_response.status_code == 201
response = json.loads(initial_put_response.data.decode('utf-8'))
regex = r'(.*)ID (.{36})* (.*)'
matchObj = re.match(regex, response['description'])
assert matchObj is not None
id_ = matchObj.group(2)
if 'POST' in collection_methods:
# members attribute should be writeable for POSTs
if collection.supportedProperty[0].write:
dummy_object = gen_dummy_object(collection.name, doc)
post_replace_response = test_app_client.post(f'{endpoint["@id"]}/{id_}',
data=json.dumps(dummy_object))
assert post_replace_response.status_code == 200
def test_collection_object_DELETE(self, test_app_client, constants, doc):
"""Test DELETE of a given collection object using ID."""
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints['collections']:
collection_name = '/'.join(
endpoint["@id"].split(f'/{API_NAME}/')[1:])
collection = doc.collections[collection_name]['collection']
collection_methods = [
x.method for x in collection.supportedOperation]
dummy_object = gen_dummy_object(collection.name, doc)
initial_put_response = test_app_client.put(endpoint["@id"],
data=json.dumps(dummy_object))
assert initial_put_response.status_code == 201
response = json.loads(initial_put_response.data.decode('utf-8'))
regex = r'(.*)ID (.{36})* (.*)'
matchObj = re.match(regex, response['description'])
assert matchObj is not None
id_ = matchObj.group(2)
if 'DELETE' in collection_methods:
delete_response = test_app_client.delete(
f'{endpoint["@id"]}/{id_}')
assert delete_response.status_code == 200
def test_object_PUT_at_id(self, test_app_client, constants, doc):
"""Create object in collection using PUT at specific ID."""
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints['collections']:
collection_name = '/'.join(
endpoint["@id"].split(f'/{API_NAME}/')[1:])
collection = doc.collections[collection_name]['collection']
collection_methods = [
x.method for x in collection.supportedOperation]
dummy_object = gen_dummy_object(collection.name, doc)
if 'PUT' in collection_methods:
dummy_object = gen_dummy_object(collection.name, doc)
put_response = test_app_client.put(f'{endpoint["@id"]}/{uuid.uuid4()}',
data=json.dumps(dummy_object))
assert put_response.status_code == 201
def test_object_PUT_at_ids(self, test_app_client, constants, doc):
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints:
if endpoint not in ['@context', '@id', '@type', 'collections']:
class_name = '/'.join(endpoints[endpoint].split(f'/{API_NAME}/')[1:])
if class_name not in doc.collections:
class_ = doc.parsed_classes[class_name]['class']
class_methods = [x.method for x in class_.supportedOperation]
data_ = {'data': list()}
objects = list()
ids = ''
for index in range(3):
objects.append(gen_dummy_object(class_.title, doc))
ids = f'{uuid.uuid4()},'
data_['data'] = objects
if 'PUT' in class_methods:
put_response = test_app_client.put(f'{endpoints[endpoint]}/add/{ids}',
data=json.dumps(data_))
assert put_response.status_code == 201
def test_endpointClass_PUT(self, test_app_client, constants, doc):
"""Check non collection Class PUT."""
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints:
if endpoint not in ['@context', '@id', '@type', 'collections']:
class_name = '/'.join(
endpoints[endpoint].split(f'/{API_NAME}/')[1:])
if class_name not in doc.collections:
class_ = doc.parsed_classes[class_name]['class']
class_methods = [
x.method for x in class_.supportedOperation]
if 'PUT' in class_methods:
dummy_object = gen_dummy_object(class_.title, doc)
put_response = test_app_client.put(endpoints[endpoint],
data=json.dumps(dummy_object))
assert put_response.status_code == 201
def test_endpointClass_POST(self, test_app_client, constants, doc):
"""Check non collection Class POST."""
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints:
if endpoint not in ['@context', '@id', '@type', 'collections']:
class_name = '/'.join(endpoints[endpoint].split(f'/{API_NAME}/')[1:])
if class_name not in doc.collections:
class_ = doc.parsed_classes[class_name]['class']
class_methods = [x.method for x in class_.supportedOperation]
if 'PUT' in class_methods:
# first insert a object which we will update later
dummy_object = gen_dummy_object(class_.title, doc)
initial_put_response = test_app_client.put(endpoints[endpoint],
data=json.dumps(dummy_object))
response = json.loads(
initial_put_response.data.decode('utf-8'))
regex = r'(.*)ID (.{36})* (.*)'
matchObj = re.match(regex, response['description'])
id_ = matchObj.group(2)
if 'POST' in class_methods:
dummy_object = gen_dummy_object(class_.title, doc)
post_response = test_app_client.post(f'{endpoints[endpoint]}/{id_}',
data=json.dumps(dummy_object))
assert post_response.status_code == 200
def test_endpointClass_DELETE(self, test_app_client, constants, doc):
"""Check non collection Class DELETE."""
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints:
if endpoint not in ['@context', '@id', '@type', 'collections']:
class_name = '/'.join(
endpoints[endpoint].split(f'/{API_NAME}/')[1:])
if class_name not in doc.collections:
class_ = doc.parsed_classes[class_name]['class']
class_methods = [x.method for x in class_.supportedOperation]
if 'PUT' in class_methods:
# first insert a object which we will update later
dummy_object = gen_dummy_object(class_.title, doc)
initial_put_response = test_app_client.put(endpoints[endpoint],
data=json.dumps(dummy_object))
response = json.loads(
initial_put_response.data.decode('utf-8'))
regex = r'(.*)ID (.{36})* (.*)'
matchObj = re.match(regex, response['description'])
id_ = matchObj.group(2)
if 'DELETE' in class_methods:
delete_response = test_app_client.delete(
f'{endpoints[endpoint]}/{id_}')
assert delete_response.status_code == 200
def test_endpointClass_GET(self, test_app_client, constants, doc):
"""Check non collection Class GET."""
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints:
if endpoint not in ['@context', '@id', '@type', 'collections']:
class_name = '/'.join(endpoints[endpoint].split(f'/{API_NAME}/')[1:])
if class_name not in doc.collections:
class_ = doc.parsed_classes[class_name]['class']
class_methods = [x.method for x in class_.supportedOperation]
if 'GET' in class_methods:
response_get = test_app_client.get(endpoints[endpoint])
assert response_get.status_code == 405
def test_IriTemplate(self, test_app_client, constants, doc):
"""Test structure of IriTemplates attached to parsed classes"""
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
expanded_base_url = DocUrl.doc_url
for endpoint in endpoints['collections']:
collection_name = '/'.join(endpoint["@id"].split(f'/{API_NAME}/')[1:])
collection = doc.collections[collection_name]['collection']
class_name = collection.manages["object"].split(expanded_base_url)[1]
response_get = test_app_client.get(endpoint["@id"])
assert response_get.status_code == 200
response_get_data = json.loads(response_get.data.decode('utf-8'))
assert 'search' in response_get_data
assert 'hydra:mapping' in response_get_data['search']
class_ = doc.parsed_classes[class_name]['class']
class_props = [x.prop for x in class_.supportedProperty]
for mapping in response_get_data['search']['hydra:mapping']:
prop = mapping['hydra:property']
prop_name = mapping['hydra:variable']
is_valid_class_prop = prop not in ['limit', 'offset', 'pageIndex']
# check if IRI property is for searching through a nested_class
# and not this class_
is_nested_class_prop = "[" in prop_name and "]" in prop_name
if is_valid_class_prop and not is_nested_class_prop:
assert prop in class_props
def test_client_controlled_pagination(self, test_app_client, constants, doc):
"""Test pagination controlled by test_app_client with help of pageIndex,
offset and limit parameters."""
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints['collections']:
response_get = test_app_client.get(endpoint["@id"])
assert response_get.status_code == 200
response_get_data = json.loads(response_get.data.decode('utf-8'))
assert 'search' in response_get_data
assert 'hydra:mapping' in response_get_data['search']
# Test with pageIndex and limit
params = {'pageIndex': 1, 'limit': 2}
response_for_page_param = test_app_client.get(
endpoint["@id"], query_string=params)
assert response_for_page_param.status_code == 200
response_for_page_param_data = json.loads(
response_for_page_param.data.decode('utf-8'))
assert 'hydra:first' in response_for_page_param_data['hydra:view']
assert 'hydra:last' in response_for_page_param_data['hydra:view']
if 'hydra:next' in response_for_page_param_data['hydra:view']:
hydra_next = response_for_page_param_data['hydra:view']['hydra:next']
assert 'pageIndex=2' in hydra_next
next_response = test_app_client.get(
response_for_page_param_data['hydra:view']['hydra:next'])
assert next_response.status_code == 200
next_response_data = json.loads(
next_response.data.decode('utf-8'))
assert 'hydra:previous' in next_response_data['hydra:view']
data = next_response_data['hydra:view']['hydra:previous']
assert 'pageIndex=1' in data
# Test with offset and limit
params = {'offset': 1, 'limit': 2}
response_for_offset_param = test_app_client.get(endpoint["@id"],
query_string=params)
assert response_for_offset_param.status_code == 200
response_for_offset_param_data = json.loads(
response_for_offset_param.data.decode('utf-8'))
data = response_for_offset_param_data['hydra:view']
assert 'hydra:first' in data
assert 'hydra:last' in data
if 'hydra:next' in data:
assert 'offset=3' in data['hydra:next']
next_response = test_app_client.get(data['hydra:next'])
assert next_response.status_code == 200
next_response_data = json.loads(next_response.data.decode('utf-8'))
assert 'hydra:previous' in next_response_data['hydra:view']
assert 'offset=1' in next_response_data['hydra:view']['hydra:previous']
def test_GET_for_nested_class(self, test_app_client, constants, doc):
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints['collections']:
collection_name = '/'.join(endpoint["@id"].split(f'/{API_NAME}/')[1:])
collection = doc.collections[collection_name]['collection']
class_methods = [x.method for x in collection.supportedOperation]
if 'GET' in class_methods:
response_get = test_app_client.get(endpoint["@id"])
assert response_get.status_code == 200
instance = response_get.json['members'][0]['@id']
instance_type = instance.split('/')[-2]
instance_class = doc.parsed_classes[instance_type]['class']
instance_methods = [x.method for x in instance_class.supportedOperation]
if 'GET' in instance_methods:
response_get_data = test_app_client.get(instance).json
assert '@context' in response_get_data
assert '@id' in response_get_data
assert '@type' in response_get_data
class_props = [x for x in collection.supportedProperty]
expanded_base_url = DocUrl.doc_url
for prop_name in class_props:
if not isinstance(prop_name.prop, HydraLink):
if expanded_base_url in prop_name.prop:
assert '@type' in response_get_data[prop_name.title]
def test_required_props(self, test_app_client, constants, doc):
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints:
if endpoint not in ['@context', '@id', '@type', 'collections']:
class_name = '/'.join(endpoints[endpoint].split(f'/{API_NAME}/')[1:])
if class_name not in doc.collections:
class_ = doc.parsed_classes[class_name]['class']
class_methods = [x.method for x in class_.supportedOperation]
if 'PUT' in class_methods:
dummy_object = gen_dummy_object(class_.title, doc)
required_prop = ''
for prop in class_.supportedProperty:
if prop.required:
required_prop = prop.title
break
if required_prop:
del dummy_object[required_prop]
put_response = test_app_client.put(
endpoints[endpoint], data=json.dumps(dummy_object))
assert put_response.status_code == 400
def test_writeable_props(self, test_app_client, constants, doc):
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints:
if endpoint not in ['@context', '@id', '@type', 'collections']:
class_name = '/'.join(endpoints[endpoint].split(f'/{API_NAME}/')[1:])
if class_name not in doc.collections:
class_ = doc.parsed_classes[class_name]['class']
class_methods = [x.method for x in class_.supportedOperation]
if 'PUT' in class_methods:
# first insert a object which we will update later
dummy_object = gen_dummy_object(class_.title, doc)
initial_put_response = test_app_client.put(endpoints[endpoint],
data=json.dumps(dummy_object))
response = json.loads(initial_put_response.data.decode('utf-8'))
regex = r'(.*)ID (.{36})* (.*)'
matchObj = re.match(regex, response['description'])
id_ = matchObj.group(2)
if 'POST' in class_methods:
dummy_object = gen_dummy_object(class_.title, doc)
# Test for writeable properties
post_response = test_app_client.post(
f'{endpoints[endpoint]}/{id_}', data=json.dumps(dummy_object))
assert post_response.status_code == 200
# Test for properties with writeable=False
non_writeable_prop = ''
for prop in class_.supportedProperty:
if prop.write is False:
non_writeable_prop = prop.title
break
if non_writeable_prop != '':
dummy_object[non_writeable_prop] = 'xyz'
post_response = test_app_client.post(
endpoints[endpoint], data=json.dumps(dummy_object))
assert post_response.status_code == 405
def test_readable_props(self, test_app_client, constants, doc):
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints:
if endpoint not in ['@context', '@id', '@type', 'collections']:
class_name = '/'.join(
endpoints[endpoint].split(f'/{API_NAME}/')[1:])
if class_name not in doc.collections:
class_ = doc.parsed_classes[class_name]['class']
class_methods = [
x.method for x in class_.supportedOperation]
if 'GET' in class_methods:
not_readable_prop = ''
for prop in class_.supportedProperty:
if prop.read is False:
not_readable_prop = prop.title
break
if not_readable_prop:
get_response = test_app_client.get(
endpoints[endpoint])
get_response_data = json.loads(
get_response.data.decode('utf-8'))
assert not_readable_prop not in get_response_data
def test_bad_objects(self, test_app_client, constants, doc):
"""Checks if bad objects are added or not."""
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints:
if endpoint not in ['@context', '@id', '@type', 'collections']:
class_name = '/'.join(
endpoints[endpoint].split(f'/{API_NAME}/')[1:])
if class_name in doc.parsed_classes:
class_ = doc.parsed_classes[class_name]['class']
class_methods = [
x.method for x in class_.supportedOperation]
if 'PUT' in class_methods:
bad_response_put = test_app_client.put(
endpoints[endpoint],
data=json.dumps(dict(foo='bar')))
assert bad_response_put.status_code == 400
def test_bad_requests(self, test_app_client, constants, doc):
"""Checks if bad requests are handled or not."""
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints:
if endpoint not in ['@context', '@id', '@type', 'collections']:
class_name = '/'.join(endpoints[endpoint].split(f'/{API_NAME}/')[1:])
if class_name not in doc.collections:
class_ = doc.parsed_classes[class_name]['class']
class_methods = [x.method for x in class_.supportedOperation]
dummy_object = gen_dummy_object(class_.title, doc)
if 'PUT' in class_methods:
initial_put_response = test_app_client.put(
endpoints[endpoint], data=json.dumps(dummy_object))
assert initial_put_response.status_code == 201
response = json.loads(initial_put_response.data.decode('utf-8'))
regex = r'(.*)ID (.{36})* (.*)'
matchObj = re.match(regex, response['description'])
assert matchObj is not None
id_ = matchObj.group(2)
if 'POST' not in class_methods:
dummy_object = gen_dummy_object(class_.title, doc)
post_replace_response = test_app_client.post(
f'{endpoints[endpoint]}/{id_}', data=json.dumps(dummy_object))
assert post_replace_response.status_code == 405
if 'DELETE' not in class_methods:
delete_response = test_app_client.delete(
f'{endpoints[endpoint]}/{id_}')
assert delete_response.status_code == 405
def test_Endpoints_Contexts(self, test_app_client, constants, doc):
"""Test all endpoints contexts are generated properly."""
API_NAME = constants['API_NAME']
index = test_app_client.get(f'/{API_NAME}')
assert index.status_code == 200
endpoints = json.loads(index.data.decode('utf-8'))
for endpoint in endpoints['collections']:
response_get = test_app_client.get(endpoints['@id'])
assert response_get.status_code == 200
context = json.loads(response_get.data.decode('utf-8'))['@context']
response_context = test_app_client.get(context)
response_context_data = json.loads(
response_context.data.decode('utf-8'))
assert response_context.status_code == 200
assert '@context' in response_context_data
| 56.671451
| 97
| 0.570512
|
9b0af450efa2f2b8bc750e19b865be2d031c6d23
| 3,387
|
py
|
Python
|
web-syllabus/yasss/neutemplates/faculty.py
|
natederbinsky/neu-templates
|
5a7db9ca080470543c63a9a320f99af1713ce09d
|
[
"MIT"
] | 1
|
2018-08-31T21:30:38.000Z
|
2018-08-31T21:30:38.000Z
|
web-syllabus/yasss/neutemplates/faculty.py
|
natederbinsky/neu-templates
|
5a7db9ca080470543c63a9a320f99af1713ce09d
|
[
"MIT"
] | null | null | null |
web-syllabus/yasss/neutemplates/faculty.py
|
natederbinsky/neu-templates
|
5a7db9ca080470543c63a9a320f99af1713ce09d
|
[
"MIT"
] | 2
|
2017-10-05T19:58:12.000Z
|
2020-09-01T18:43:49.000Z
|
from typing import Any, Callable, Dict, Iterable, List, Mapping, Optional, Tuple, Union
from typing_extensions import Literal
from enum import Enum
from os import path
from datetime import date
from yasss import gen
##
class Badge(Enum):
"""Types of badges."""
CUSTOM = 1
LINKEDIN = 2
GSCHOLAR = 3
GITHUB = 4
def make_custom_badge(url: str, img: str) -> Tuple[Badge, str, str]:
"""Produces a custom badge via a url and image."""
return (Badge.CUSTOM, url, img)
def make_linkedin(id: str) -> Tuple[Badge, str, str]:
"""Produces a LinkedIn badge given a user id."""
return (Badge.LINKEDIN, 'https://www.linkedin.com/in/{}'.format(id), 'img/_badges/linkedin.png')
def make_gscholar(id: str, lang: str='en') -> Tuple[Badge, str, str]:
"""Produces a Google Scholar badge given a user id."""
return (Badge.GSCHOLAR, 'https://scholar.google.com/citations?hl={}&user={}'.format(lang, id), 'img/_badges/gscholar.ico')
def make_github(id: str) -> Tuple[Badge, str, str]:
"""Produces a GitHub badge given a user id."""
return (Badge.GITHUB, 'https://github.com/{}'.format(id), 'img/_badges/github.png')
##
def build(site_dir: str, destination: str, pages: Iterable[str], resources: List[Union[str, Tuple[str, Callable[[str, str], bool]]]], nav: Mapping[str, str], home: str, personName: str, contactEmail: str, modDate: date, favicon: str, ganalytics: Optional[str]=None, badges: Iterable[Tuple[Badge, str, str]] = (), data: Mapping[str, Any]={}, globals: Mapping[str, Any]={}) -> bool:
"""Builds a site using the faculty template."""
# validate nav
for title, fname in nav.items():
if fname not in pages:
gen.eprint('Invalid navigation path: {} ({})'.format(fname, title))
return False
# validate home
if home not in nav:
gen.eprint('Invalid nav home: {}'.format(home))
return False
##
templ_name='faculty'
templ_resources = [
'css/bootstrap.min.css',
'css/faculty.css',
'fonts/glyphicons-halflings-regular.eot',
'fonts/glyphicons-halflings-regular.woff',
'fonts/glyphicons-halflings-regular.woff2',
'fonts/glyphicons-halflings-regular.svg',
'fonts/glyphicons-halflings-regular.ttf',
'js/bootstrap.min.js',
'js/jquery.min.js',
]
for badge in badges:
if badge[0] != Badge.CUSTOM:
templ_resources.append(badge[2])
else:
resources.append(badge[2])
templ_data: Dict[str, Any] = {
'name': personName,
'email': contactEmail,
'nav': nav,
'home': home,
'favicon': favicon,
'year': modDate.year,
'mod': modDate.strftime('%d %B %Y'),
'badges': badges,
'ganalytics': ganalytics
}
templ_data['name_small'] = templ_data['name'][:max(19, len(templ_data['name']))]
templ_globals = dict(globals)
templ_globals['nav'] = nav
##
return gen.build(
templ_name=templ_name,
templ_dir=path.join(path.dirname(path.abspath(__file__)), templ_name),
site_dir=site_dir,
destination=destination,
pages=pages,
templ_resources=templ_resources,
site_resources=resources + [favicon],
templ_data=templ_data,
site_data=data,
globals=templ_globals
)
| 29.710526
| 380
| 0.621494
|
53daf66c6d66a7bda906791502c3a9c09aca9a8a
| 8,627
|
py
|
Python
|
msticpy/data/uploaders/splunk_uploader.py
|
GiuseppeLaurenza/msticpy
|
37f96126b1e7ed06d3d140e340cdf86d6eee440b
|
[
"MIT"
] | 2
|
2020-11-03T05:56:10.000Z
|
2020-11-03T05:56:17.000Z
|
msticpy/data/uploaders/splunk_uploader.py
|
GiuseppeLaurenza/msticpy
|
37f96126b1e7ed06d3d140e340cdf86d6eee440b
|
[
"MIT"
] | 1
|
2021-06-02T15:24:59.000Z
|
2021-06-02T15:24:59.000Z
|
msticpy/data/uploaders/splunk_uploader.py
|
GiuseppeLaurenza/msticpy
|
37f96126b1e7ed06d3d140e340cdf86d6eee440b
|
[
"MIT"
] | 1
|
2022-02-06T18:56:15.000Z
|
2022-02-06T18:56:15.000Z
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""Splunk Uploader class."""
from pathlib import Path
from typing import Any
from tqdm.notebook import tqdm
import pandas as pd
from pandas.io.parsers import ParserError
from .uploader_base import UploaderBase
from ..._version import VERSION
from ..drivers.splunk_driver import SplunkDriver
from ...common.exceptions import MsticpyConnectionError, MsticpyUserError
__version__ = VERSION
__author__ = "Pete Bryan"
class SplunkUploader(UploaderBase):
"""Uploader class for Splunk."""
def __init__(self, username: str, host: str, password: str, **kwargs):
"""Initialize a Splunk Uploader instance."""
super().__init__()
self._kwargs = kwargs
self.workspace = host
self.workspace_secret = password
self.user = username
self.driver = SplunkDriver()
self.port = kwargs.get("port", 8089)
self._debug = kwargs.get("debug", False)
self._connect = kwargs.get("connect", True)
self.connected = False
if self._connect:
self.connect()
def connect(self):
"""Connect to Splunk host."""
self.driver.connect(
host=self.workspace,
username=self.user,
password=self.workspace_secret,
port=self.port,
)
self.connected = True
def _post_data(
self,
data: pd.DataFrame,
index_name: str,
table_name: Any,
host: str = None,
**kwargs,
):
"""
Write data to the Splunk instance connected to.
Parameters
----------
data : pd.DataFrame
Data to upload.
index_name : str
Name of the Splunk Index to add data to.
table_name : str
The souretype in Splunk data will be uploaded to.
host : str, optional
The hostname associated with the uploaded data, by default "Upload".
"""
if not self.connected:
raise MsticpyConnectionError(
"Splunk host not connected, please call .connect before proceding.",
title="Splunk host not connected",
)
if not host:
host = "Upload"
create_idx = kwargs.get("create_index", False)
index = self._load_index(index_name, create_idx)
progress = tqdm(total=len(data.index), desc="Rows", position=0)
for row in data.iterrows():
data = row[1].to_csv()
try:
data.encode(encoding="latin-1")
except UnicodeEncodeError:
data = data.encode(encoding="utf-8")
index.submit(data, sourcetype=table_name, host=host)
progress.update(1)
progress.close()
if self._debug is True:
print("Upload complete")
# pylint: disable=arguments-differ
def upload_df( # type: ignore
self,
data: pd.DataFrame,
table_name: str,
index_name: str,
create_index: bool = False,
**kwargs,
):
"""
Upload a Pandas DataFrame to Splunk.
Parameters
----------
data : pd.DataFrame
Data to upload.
table_name : str
The souretype in Splunk data will be uploaded to.
index_name : str
Name of the Splunk Index to add data to.
host : str, optional
Host name to upload data with, default will be 'Upload'
create_index : bool, optional
Set this to true to create the index if it doesn't already exist. Default is False.
"""
host = kwargs.get("host", None)
if not isinstance(data, pd.DataFrame):
raise MsticpyUserError(
"Data must be in Pandas DataFrame format.",
title="incorrect data format",
)
self._post_data(
data=data,
table_name=table_name,
index_name=index_name,
create_index=create_index,
host=host,
)
def upload_file( # type: ignore
self,
file_path: str,
index_name: str,
table_name: str = None,
delim: str = ",",
create_index=False,
**kwargs,
):
"""
Upload a seperated value file to Splunk.
Parameters
----------
file_path : str
Path to the file to upload.
index_name : str
Name of the Splunk Index to add data to.
table_name : str, optional
The souretype in Splunk data will be uploaded to, if not set the file name will be used.
delim : str, optional
Seperator value in file, by default ","
host : str, optional
Host name to upload data with, default will be 'Upload'
create_index : bool, optional
Set this to true to create the index if it doesn't already exist. Default is False.
"""
host = kwargs.get("host", None)
path = Path(file_path)
try:
data = pd.read_csv(path, delimiter=delim)
except (ParserError, UnicodeDecodeError) as parse_err:
raise MsticpyUserError(
"The file specified is not a seperated value file.",
"Incorrect file type.",
) from parse_err
if not table_name:
table_name = path.stem
self._post_data(
data=data,
table_name=table_name,
index_name=index_name,
host=host,
create_index=create_index,
)
def upload_folder( # type: ignore
self,
folder_path: str,
index_name: str,
table_name: str = None,
delim: str = ",",
create_index=False,
**kwargs,
):
"""
Upload all files in a folder to Splunk.
Parameters
----------
folder_path : str
Path to folder to upload.
index_name : str
Name of the Splunk Index to add data to, if it doesn't exist it will be created.
table_name : str, optional
The souretype in Splunk data will be uploaded to, if not set the file name will be used.
delim : str, optional
Seperator value in files, by default ","
host : str, optional
Host name to upload data with, default will be 'Upload'
create_index : bool, optional
Set this to true to create the index if it doesn't already exist. Default is False.
"""
host = kwargs.get("host", None)
glob_pat = kwargs.get("glob", "*")
t_name = bool(table_name)
input_files = Path(folder_path).glob(glob_pat)
f_progress = tqdm(total=len(list(input_files)), desc="Files", position=0)
for path in input_files:
try:
data = pd.read_csv(path, delimiter=delim)
except (ParserError, UnicodeDecodeError) as parse_err:
raise MsticpyUserError(
"The file specified is not a seperated value file.",
title="Incorrect file type.",
) from parse_err
if not t_name:
table_name = path.stem
self._post_data(
data=data,
table_name=table_name,
index_name=index_name,
host=host,
create_index=create_index,
)
f_progress.update(1)
if self._debug is True:
print(f"{str(path)} uploaded to {table_name}")
f_progress.close()
# pylint: enable=arguments-differ
def _check_index(self, index_name: str):
"""Check if index exists in Splunk host."""
service_list = [item.name for item in self.driver.service.indexes]
if index_name in service_list:
return True
return False
def _load_index(self, index_name, create: bool = True):
"""Load specified Index or create if it doesn't exist."""
if self._check_index(index_name):
return self.driver.service.indexes[index_name]
if not self._check_index(index_name) and create:
return self.driver.service.indexes.create(index_name)
raise MsticpyConnectionError("Index not present in Splunk host.")
| 33.831373
| 100
| 0.562884
|
8602d5cd9894dc928638de4c81587d53bfe584b8
| 18
|
py
|
Python
|
hello.py
|
LuisFelipeUrena/lambdata-luife
|
cc46b1608f15511614387ee834263b8a5eb24d4e
|
[
"MIT"
] | null | null | null |
hello.py
|
LuisFelipeUrena/lambdata-luife
|
cc46b1608f15511614387ee834263b8a5eb24d4e
|
[
"MIT"
] | null | null | null |
hello.py
|
LuisFelipeUrena/lambdata-luife
|
cc46b1608f15511614387ee834263b8a5eb24d4e
|
[
"MIT"
] | null | null | null |
print('Hola guey')
| 18
| 18
| 0.722222
|
3c37faf6a7dca027dc2e1ba19bf8ef6899dc466c
| 2,325
|
py
|
Python
|
python/CZITest/CZI_Test_Problem1.py
|
sanjaymeena/ProgrammingProblems
|
22904d7c210606464cf35a1623309da98e55475c
|
[
"Apache-2.0"
] | null | null | null |
python/CZITest/CZI_Test_Problem1.py
|
sanjaymeena/ProgrammingProblems
|
22904d7c210606464cf35a1623309da98e55475c
|
[
"Apache-2.0"
] | null | null | null |
python/CZITest/CZI_Test_Problem1.py
|
sanjaymeena/ProgrammingProblems
|
22904d7c210606464cf35a1623309da98e55475c
|
[
"Apache-2.0"
] | 1
|
2021-09-26T14:59:26.000Z
|
2021-09-26T14:59:26.000Z
|
# coding: utf-8
# ``
# Task 1: Identifying common words between documents
# For this task, you need to generate a matrix, where each entry contains the number of unique
# common tokens (words) between each pair of documents. The output should include no headers for
# rows or columns. The matrix should be symmetric, and follow the numbering conventions of the files.
# The diagonal entries would be the count of unique terms in each document.
# For example, the first number on the first line is the number of unique terms in 001.txt, the second
# number on the second line is the number of unique terms in 002.txt, etc.
# Similarly, the first element on the second line would be the number of unique terms that appear in
# both 001.txt and 002.txt, the 23rd number on the 16th line is the number of unique common terms
# between 023.txt and 016.txt, etc.
# ``
# In[52]:
import collections
import re
import sys
import glob
import os
import numpy as np
np.set_printoptions(linewidth=120)
# In[53]:
def tokenize(string):
return re.findall(r'\w+',string.lower())
# In[54]:
data_folder='./data/'
content_dict = {}
total_content=[]
for content in os.listdir(data_folder): # "." means current directory
filepath=data_folder+ content
lines=[]
with open(filepath) as f:
lines=f.readlines()
f.close()
#print lines
#print content
#print len(lines)
tokenized=[]
for line in lines :
words=tokenize(line)
string=' '.join(w for w in words)
tokenized.append(string)
tokenset=set(tokenized)
string=' '.join(token for token in tokenset)
f=int(content.replace('.txt', ''))
#print f
content_dict[f]=string
# In[55]:
# w1=content_dict[20].split()
# w2=content_dict[21].split()
# intersection = set(w1) & set(w2)
# print len(intersection)
# In[56]:
rows, columns = 100, 100
matrix = [[0 for x in range(rows)] for y in range(columns)]
for i in range(0,rows):
for j in range(0,columns):
w1=content_dict[i+1].split()
w2=content_dict[j+1].split()
intersection = set(w1) & set(w2)
matrix[i][j]=len(intersection)
# In[58]:
np.set_printoptions(linewidth=120)
print(np.matrix(matrix))
# test case
# vals=set(content_dict[1].split())
# print len(vals)
# print matrix[0][0]
# In[ ]:
| 23.019802
| 102
| 0.67914
|
60f309d68869fbc7e660f8ca89f4fa7b8007caf3
| 376
|
py
|
Python
|
py/setup.py
|
svrotter/stabilizer
|
96732cbea115efaa2012b69fc23fa18f95a56298
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
py/setup.py
|
svrotter/stabilizer
|
96732cbea115efaa2012b69fc23fa18f95a56298
|
[
"Apache-2.0",
"MIT"
] | 1
|
2022-01-20T09:20:43.000Z
|
2022-01-20T09:20:43.000Z
|
py/setup.py
|
svrotter/stabilizer
|
96732cbea115efaa2012b69fc23fa18f95a56298
|
[
"Apache-2.0",
"MIT"
] | 1
|
2022-01-19T14:29:54.000Z
|
2022-01-19T14:29:54.000Z
|
from setuptools import setup, find_packages
setup(name="stabilizer",
packages=find_packages(),
version="0.1",
description="Stabilizer Utilities",
author="QUARTIQ GmbH",
license="MIT",
install_requires=[
"numpy",
"miniconf-mqtt@git+https://github.com/quartiq/miniconf@develop#subdirectory=py/miniconf-mqtt"
])
| 28.923077
| 105
| 0.643617
|
9e2284b9c05ce537dad9057d6c30c9b6a3b9704a
| 20,881
|
py
|
Python
|
keras/engine/sequential.py
|
henrique/keras
|
b693bb84200d70aa736f2491ff83509fd1b1b6fb
|
[
"Apache-2.0"
] | null | null | null |
keras/engine/sequential.py
|
henrique/keras
|
b693bb84200d70aa736f2491ff83509fd1b1b6fb
|
[
"Apache-2.0"
] | null | null | null |
keras/engine/sequential.py
|
henrique/keras
|
b693bb84200d70aa736f2491ff83509fd1b1b6fb
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=protected-access
"""Home of the `Sequential` model."""
import tensorflow.compat.v2 as tf
import copy
from keras import layers as layer_module
from keras.engine import base_layer
from keras.engine import functional
from keras.engine import input_layer
from keras.engine import training_utils
from keras.saving.saved_model import model_serialization
from keras.utils import generic_utils
from keras.utils import layer_utils
from keras.utils import tf_inspect
from keras.utils import tf_utils
from keras.utils import traceback_utils
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
SINGLE_LAYER_OUTPUT_ERROR_MSG = ('All layers in a Sequential model should have '
'a single output tensor. For multi-output '
'layers, use the functional API.')
@keras_export('keras.Sequential', 'keras.models.Sequential')
class Sequential(functional.Functional):
"""`Sequential` groups a linear stack of layers into a `tf.keras.Model`.
`Sequential` provides training and inference features on this model.
Examples:
>>> # Optionally, the first layer can receive an `input_shape` argument:
>>> model = tf.keras.Sequential()
>>> model.add(tf.keras.layers.Dense(8, input_shape=(16,)))
>>> # Afterwards, we do automatic shape inference:
>>> model.add(tf.keras.layers.Dense(4))
>>> # This is identical to the following:
>>> model = tf.keras.Sequential()
>>> model.add(tf.keras.Input(shape=(16,)))
>>> model.add(tf.keras.layers.Dense(8))
>>> # Note that you can also omit the `input_shape` argument.
>>> # In that case the model doesn't have any weights until the first call
>>> # to a training/evaluation method (since it isn't yet built):
>>> model = tf.keras.Sequential()
>>> model.add(tf.keras.layers.Dense(8))
>>> model.add(tf.keras.layers.Dense(4))
>>> # model.weights not created yet
>>> # Whereas if you specify the input shape, the model gets built
>>> # continuously as you are adding layers:
>>> model = tf.keras.Sequential()
>>> model.add(tf.keras.layers.Dense(8, input_shape=(16,)))
>>> model.add(tf.keras.layers.Dense(4))
>>> len(model.weights)
4
>>> # When using the delayed-build pattern (no input shape specified), you can
>>> # choose to manually build your model by calling
>>> # `build(batch_input_shape)`:
>>> model = tf.keras.Sequential()
>>> model.add(tf.keras.layers.Dense(8))
>>> model.add(tf.keras.layers.Dense(4))
>>> model.build((None, 16))
>>> len(model.weights)
4
```python
# Note that when using the delayed-build pattern (no input shape specified),
# the model gets built the first time you call `fit`, `eval`, or `predict`,
# or the first time you call the model on some input data.
model = tf.keras.Sequential()
model.add(tf.keras.layers.Dense(8))
model.add(tf.keras.layers.Dense(1))
model.compile(optimizer='sgd', loss='mse')
# This builds the model for the first time:
model.fit(x, y, batch_size=32, epochs=10)
```
"""
@tf.__internal__.tracking.no_automatic_dependency_tracking
@traceback_utils.filter_traceback
def __init__(self, layers=None, name=None):
"""Creates a `Sequential` model instance.
Args:
layers: Optional list of layers to add to the model.
name: Optional name for the model.
"""
# Skip the init in FunctionalModel since model doesn't have input/output yet
super(functional.Functional, self).__init__( # pylint: disable=bad-super-call
name=name, autocast=False)
base_layer.keras_api_gauge.get_cell('Sequential').set(True)
self.supports_masking = True
self._compute_output_and_mask_jointly = True
self._auto_track_sub_layers = False
self._inferred_input_shape = None
self._has_explicit_input_shape = False
self._input_dtype = None
self._layer_call_argspecs = {}
self._created_nodes = set()
# Flag that indicate whether the sequential network topology has been
# created. It is false when there isn't any layer, or the layers doesn't
# have input shape.
self._graph_initialized = False
# Unfortunately some Sequential models using custom layers or FeatureColumn
# layers have multiple inputs. This is fundamentally incompatible with
# most of the Sequential API, and we have to disable a number of features
# for such models.
self._use_legacy_deferred_behavior = False
# Add to the model any layers passed to the constructor.
if layers:
if not isinstance(layers, (list, tuple)):
layers = [layers]
for layer in layers:
self.add(layer)
@property
def layers(self):
# Historically, `sequential.layers` only returns layers that were added
# via `add`, and omits the auto-generated `InputLayer` that comes at the
# bottom of the stack.
# `Trackable` manages the `_layers` attributes and does filtering
# over it.
layers = super(Sequential, self).layers
if layers and isinstance(layers[0], input_layer.InputLayer):
return layers[1:]
return layers[:]
@tf.__internal__.tracking.no_automatic_dependency_tracking
@traceback_utils.filter_traceback
def add(self, layer):
"""Adds a layer instance on top of the layer stack.
Args:
layer: layer instance.
Raises:
TypeError: If `layer` is not a layer instance.
ValueError: In case the `layer` argument does not
know its input shape.
ValueError: In case the `layer` argument has
multiple output tensors, or is already connected
somewhere else (forbidden in `Sequential` models).
"""
# If we are passed a Keras tensor created by keras.Input(), we can extract
# the input layer from its keras history and use that without any loss of
# generality.
if hasattr(layer, '_keras_history'):
origin_layer = layer._keras_history[0]
if isinstance(origin_layer, input_layer.InputLayer):
layer = origin_layer
if isinstance(layer, tf.Module):
if not isinstance(layer, base_layer.Layer):
layer = functional.ModuleWrapper(layer)
else:
raise TypeError('The added layer must be an instance of class Layer. '
f'Received: layer={layer} of type {type(layer)}.')
tf_utils.assert_no_legacy_layers([layer])
if not self._is_layer_name_unique(layer):
raise ValueError(
'All layers added to a Sequential model '
f'should have unique names. Name "{layer.name}" is already the name '
'of a layer in this model. Update the `name` argument '
'to pass a unique name.')
self.built = False
set_inputs = False
self._maybe_create_attribute('_self_tracked_trackables', [])
if not self._self_tracked_trackables:
if isinstance(layer, input_layer.InputLayer):
# Case where the user passes an Input or InputLayer layer via `add`.
set_inputs = True
else:
batch_shape, dtype = training_utils.get_input_shape_and_dtype(layer)
if batch_shape:
# Instantiate an input layer.
x = input_layer.Input(
batch_shape=batch_shape, dtype=dtype, name=layer.name + '_input')
# This will build the current layer
# and create the node connecting the current layer
# to the input layer we just created.
layer(x)
set_inputs = True
if set_inputs:
outputs = tf.nest.flatten(layer._inbound_nodes[-1].outputs)
if len(outputs) != 1:
raise ValueError(SINGLE_LAYER_OUTPUT_ERROR_MSG)
self.outputs = outputs
self.inputs = layer_utils.get_source_inputs(self.outputs[0])
self.built = True
self._has_explicit_input_shape = True
elif self.outputs:
# If the model is being built continuously on top of an input layer:
# refresh its output.
output_tensor = layer(self.outputs[0])
if len(tf.nest.flatten(output_tensor)) != 1:
raise ValueError(SINGLE_LAYER_OUTPUT_ERROR_MSG)
self.outputs = [output_tensor]
self.built = True
if set_inputs or self._graph_initialized:
self._init_graph_network(self.inputs, self.outputs)
self._graph_initialized = True
else:
self._self_tracked_trackables.append(layer)
self._handle_deferred_layer_dependencies([layer])
self._layer_call_argspecs[layer] = tf_inspect.getfullargspec(layer.call)
@tf.__internal__.tracking.no_automatic_dependency_tracking
@traceback_utils.filter_traceback
def pop(self):
"""Removes the last layer in the model.
Raises:
TypeError: if there are no layers in the model.
"""
if not self.layers:
raise TypeError('There are no layers in the model.')
layer = self._self_tracked_trackables.pop()
self._layer_call_argspecs.pop(layer)
if not self.layers:
self.outputs = None
self.inputs = None
self.built = False
self._inferred_input_shape = None
self._has_explicit_input_shape = False
self._graph_initialized = False
elif self._graph_initialized:
self.layers[-1]._outbound_nodes = []
self.outputs = [self.layers[-1].output]
self._init_graph_network(self.inputs, self.outputs)
self.built = True
@tf.__internal__.tracking.no_automatic_dependency_tracking
def _build_graph_network_for_inferred_shape(self,
input_shape,
input_dtype=None):
if input_shape is None or not self.layers:
return
if not tf.__internal__.tf2.enabled() or not tf.compat.v1.executing_eagerly_outside_functions():
# This behavior is disabled in V1 or when eager execution is disabled.
return
if (not self._has_explicit_input_shape and
not self._use_legacy_deferred_behavior):
# Determine whether the input shape is novel, i.e. whether the model
# should be rebuilt.
input_shape = tuple(input_shape)
if self._inferred_input_shape is None:
new_shape = input_shape
else:
new_shape = relax_input_shape(self._inferred_input_shape, input_shape)
if (new_shape is not None and new_shape != self._inferred_input_shape):
# A novel shape has been received: we need to rebuild the model.
# In case we are inside a graph function, we step out of it.
with tf.init_scope():
inputs = input_layer.Input(
batch_shape=new_shape,
dtype=input_dtype,
name=self.layers[0].name + '_input')
layer_input = inputs
created_nodes = set()
for layer in self.layers:
# Clear nodes previously created via this method. This prevents
# node accumulation and ensures that e.g. `layer.output` is
# always connected to `model.inputs`
# (this is important e.g. for the feature extraction use case).
# We don't just do `layer._inbound_nodes = []` in order
# not to break shared layers added to Sequential models (which is
# technically illegal as per the `add()` docstring,
# but wasn't previously disabled).
clear_previously_created_nodes(layer, self._created_nodes)
try:
# Create Functional API connection by calling the current layer
layer_output = layer(layer_input)
except: # pylint:disable=bare-except
# Functional API calls may fail for a number of reasons:
# 1) The layer may be buggy. In this case it will be easier for
# the user to debug if we fail on the first call on concrete data,
# instead of our own call on a symbolic input.
# 2) The layer is dynamic (graph-incompatible) and hasn't
# overridden `compute_output_shape`. In this case, it is
# impossible to build a graph network.
# 3) The layer is otherwise incompatible with the Functional API
# (e.g. this is the case for some probabilistic layers that rely
# on hacks and that do not return tensors).
# In all these cases, we should avoid creating a graph network
# (or we simply can't).
self._use_legacy_deferred_behavior = True
return
if len(tf.nest.flatten(layer_output)) != 1:
raise ValueError(SINGLE_LAYER_OUTPUT_ERROR_MSG)
# Keep track of nodes just created above
track_nodes_created_by_last_call(layer, created_nodes)
layer_input = layer_output
outputs = layer_output
self._created_nodes = created_nodes
try:
# Initialize a graph Network. This call will never fail for
# a stack of valid Keras layers.
# However some users have layers that are fundamentally incompatible
# with the Functional API, which do not return tensors. In this
# case, we fall back to the legacy deferred behavior.
# TODO(fchollet): consider raising here, as we should not be
# supporting such layers.
self._init_graph_network(inputs, outputs)
self._graph_initialized = True
except: # pylint:disable=bare-except
self._use_legacy_deferred_behavior = True
self._inferred_input_shape = new_shape
@generic_utils.default
def build(self, input_shape=None):
if self._graph_initialized:
self._init_graph_network(self.inputs, self.outputs)
else:
if input_shape is None:
raise ValueError('You must provide an `input_shape` argument.')
self._build_graph_network_for_inferred_shape(input_shape)
if not self.built:
input_shape = tuple(input_shape)
self._build_input_shape = input_shape
super(Sequential, self).build(input_shape)
self.built = True
def call(self, inputs, training=None, mask=None): # pylint: disable=redefined-outer-name
# If applicable, update the static input shape of the model.
if not self._has_explicit_input_shape:
if not tf.is_tensor(inputs) and not isinstance(
inputs, tf.Tensor):
# This is a Sequential with multiple inputs. This is technically an
# invalid use case of Sequential, but we tolerate it for backwards
# compatibility.
self._use_legacy_deferred_behavior = True
self._build_input_shape = tf.nest.map_structure(_get_shape_tuple, inputs)
if tf.__internal__.tf2.enabled():
logging.warning('Layers in a Sequential model should only have a '
'single input tensor, but we receive a %s input: %s'
'\nConsider rewriting this model with the Functional '
'API.' % (type(inputs), inputs))
else:
self._build_graph_network_for_inferred_shape(inputs.shape, inputs.dtype)
if self._graph_initialized:
if not self.built:
self._init_graph_network(self.inputs, self.outputs)
return super(Sequential, self).call(inputs, training=training, mask=mask)
outputs = inputs # handle the corner case where self.layers is empty
for layer in self.layers:
# During each iteration, `inputs` are the inputs to `layer`, and `outputs`
# are the outputs of `layer` applied to `inputs`. At the end of each
# iteration `inputs` is set to `outputs` to prepare for the next layer.
kwargs = {}
argspec = self._layer_call_argspecs[layer].args
if 'mask' in argspec:
kwargs['mask'] = mask
if 'training' in argspec:
kwargs['training'] = training
outputs = layer(inputs, **kwargs)
if len(tf.nest.flatten(outputs)) != 1:
raise ValueError(SINGLE_LAYER_OUTPUT_ERROR_MSG)
# `outputs` will be the inputs to the next layer.
inputs = outputs
mask = getattr(outputs, '_keras_mask', None)
return outputs
def compute_output_shape(self, input_shape):
shape = input_shape
for layer in self.layers:
shape = layer.compute_output_shape(shape)
return shape
def compute_mask(self, inputs, mask):
# TODO(omalleyt): b/123540974 This function is not really safe to call
# by itself because it will duplicate any updates and losses in graph
# mode by `call`ing the Layers again.
outputs = self.call(inputs, mask=mask) # pylint: disable=unexpected-keyword-arg
return getattr(outputs, '_keras_mask', None)
def get_config(self):
layer_configs = []
for layer in super(Sequential, self).layers:
# `super().layers` include the InputLayer if available (it is filtered out
# of `self.layers`). Note that `self._self_tracked_trackables` is managed
# by the tracking infrastructure and should not be used.
layer_configs.append(generic_utils.serialize_keras_object(layer))
config = {
'name': self.name,
'layers': copy.deepcopy(layer_configs)
}
if not self._is_graph_network and self._build_input_shape is not None:
config['build_input_shape'] = self._build_input_shape
return config
@classmethod
def from_config(cls, config, custom_objects=None):
if 'name' in config:
name = config['name']
build_input_shape = config.get('build_input_shape')
layer_configs = config['layers']
else:
name = None
build_input_shape = None
layer_configs = config
model = cls(name=name)
for layer_config in layer_configs:
layer = layer_module.deserialize(layer_config,
custom_objects=custom_objects)
model.add(layer)
if (not model.inputs and build_input_shape and
isinstance(build_input_shape, (tuple, list))):
model.build(build_input_shape)
return model
@property
def input_spec(self):
if hasattr(self, '_manual_input_spec'):
return self._manual_input_spec
if self.layers and hasattr(self.layers[0], 'input_spec'):
return self.layers[0].input_spec
return None
@input_spec.setter
def input_spec(self, value):
self._manual_input_spec = value
@property
def _trackable_saved_model_saver(self):
return model_serialization.SequentialSavedModelSaver(self)
def _is_layer_name_unique(self, layer):
for ref_layer in self.layers:
if layer.name == ref_layer.name and ref_layer is not layer:
return False
return True
def _assert_weights_created(self):
if self._graph_initialized:
return
# When the graph has not been initialized, use the Model's implementation to
# to check if the weights has been created.
super(functional.Functional, self)._assert_weights_created() # pylint: disable=bad-super-call
def _get_shape_tuple(t):
if hasattr(t, 'shape'):
shape = t.shape
if isinstance(shape, tuple):
return shape
if shape.rank is not None:
return tuple(shape.as_list())
return None
return None
def relax_input_shape(shape_1, shape_2):
if shape_1 is None or shape_2 is None:
return None
if len(shape_1) != len(shape_2):
return None
return tuple(None if d1 != d2 else d1 for d1, d2 in zip(shape_1, shape_2))
def clear_previously_created_nodes(layer, created_nodes):
"""Remove nodes from `created_nodes` from the layer's inbound_nodes."""
for node in layer._inbound_nodes:
prev_layers = node.inbound_layers
for prev_layer in tf.nest.flatten(prev_layers):
prev_layer._outbound_nodes = [
n for n in prev_layer._outbound_nodes
if n not in created_nodes]
layer._inbound_nodes = [
n for n in layer._inbound_nodes if n not in created_nodes]
def track_nodes_created_by_last_call(layer, created_nodes):
"""Adds to `created_nodes` the nodes created by the last call to `layer`."""
if not layer._inbound_nodes:
return
created_nodes.add(layer._inbound_nodes[-1])
prev_layers = layer._inbound_nodes[-1].inbound_layers
for prev_layer in tf.nest.flatten(prev_layers):
if prev_layer._outbound_nodes:
created_nodes.add(prev_layer._outbound_nodes[-1])
| 40.624514
| 99
| 0.67741
|
173cf7292895c57965630146e0c6b40127317a99
| 9,943
|
py
|
Python
|
tests/zia/test_dlp.py
|
LetMeR00t/pyZscaler
|
6b8027a4f76fdc1f95321558251a91d954218d9f
|
[
"MIT"
] | 16
|
2021-07-09T00:20:31.000Z
|
2022-02-17T19:29:26.000Z
|
tests/zia/test_dlp.py
|
jacobgarder/pyZscaler
|
77cf2ff9d64b49b932ec251a35041bfc0abf1223
|
[
"MIT"
] | 62
|
2021-07-21T03:42:09.000Z
|
2022-03-18T09:08:20.000Z
|
tests/zia/test_dlp.py
|
jacobgarder/pyZscaler
|
77cf2ff9d64b49b932ec251a35041bfc0abf1223
|
[
"MIT"
] | 8
|
2021-09-11T08:14:53.000Z
|
2022-03-25T20:14:41.000Z
|
import pytest
import responses
from box import Box, BoxList
from responses import matchers
@pytest.fixture(name="dlp_dicts")
def dlp_dicts():
yield [
{
"id": 1,
"custom": True,
"customPhraseMatchType": "MATCH_ALL_CUSTOM_PHRASE_PATTERN_DICTIONARY",
"dictionaryType": "PATTERNS_AND_PHRASES",
"name": "test",
"nameL10nTag": False,
"description": "test",
"phrases": [
{"action": "PHRASE_COUNT_TYPE_ALL", "phrase": "test"},
{"action": "PHRASE_COUNT_TYPE_UNIQUE", "phrase": "test"},
],
"patterns": [
{"action": "PATTERN_COUNT_TYPE_ALL", "pattern": "test"},
{"action": "PATTERN_COUNT_TYPE_UNIQUE", "pattern": "test"},
],
},
{
"id": 2,
"custom": True,
"customPhraseMatchType": "MATCH_ANY_CUSTOM_PHRASE_PATTERN_DICTIONARY",
"dictionaryType": "PATTERNS_AND_PHRASES",
"name": "test",
"nameL10nTag": False,
"description": "test",
"phrases": [
{"action": "PHRASE_COUNT_TYPE_ALL", "phrase": "test"},
{"action": "PHRASE_COUNT_TYPE_UNIQUE", "phrase": "test"},
],
"patterns": [
{"action": "PATTERN_COUNT_TYPE_ALL", "pattern": "test"},
{"action": "PATTERN_COUNT_TYPE_UNIQUE", "pattern": "test"},
],
},
]
@responses.activate
def test_dlp_update_dict_all(zia, dlp_dicts):
responses.add(
method="GET",
url="https://zsapi.zscaler.net/api/v1/dlpDictionaries/1",
json=dlp_dicts[0],
status=200,
)
responses.add(
method="PUT",
url="https://zsapi.zscaler.net/api/v1/dlpDictionaries/1",
json={
"id": 1,
"custom": True,
"customPhraseMatchType": "MATCH_ALL_CUSTOM_PHRASE_PATTERN_DICTIONARY",
"dictionaryType": "PATTERNS_AND_PHRASES",
"name": "test_updated",
"nameL10nTag": False,
"description": "test",
"phrases": [{"action": "PHRASE_COUNT_TYPE_ALL", "phrase": "test_updated"}],
"patterns": [{"action": "PATTERN_COUNT_TYPE_ALL", "pattern": "test_updated"}],
},
status=200,
match=[
matchers.json_params_matcher(
{
"id": 1,
"custom": True,
"customPhraseMatchType": "MATCH_ALL_CUSTOM_PHRASE_PATTERN_DICTIONARY",
"dictionaryType": "PATTERNS_AND_PHRASES",
"name": "test_updated",
"nameL10nTag": False,
"description": "test",
"phrases": [{"action": "PHRASE_COUNT_TYPE_ALL", "phrase": "test_updated"}],
"patterns": [{"action": "PATTERN_COUNT_TYPE_ALL", "pattern": "test_updated"}],
}
)
],
)
resp = zia.dlp.update_dict(
"1",
name="test_updated",
match_type="all",
phrases=[
("all", "test_updated"),
],
patterns=[
("all", "test_updated"),
],
)
assert resp.name == "test_updated"
assert resp.phrases[0].phrase == "test_updated"
assert resp.patterns[0].pattern == "test_updated"
@responses.activate
def test_dlp_update_dict_any(zia, dlp_dicts):
responses.add(
method="GET",
url="https://zsapi.zscaler.net/api/v1/dlpDictionaries/1",
json=dlp_dicts[0],
status=200,
)
responses.add(
method="PUT",
url="https://zsapi.zscaler.net/api/v1/dlpDictionaries/1",
json={
"id": 1,
"custom": True,
"customPhraseMatchType": "MATCH_ANY_CUSTOM_PHRASE_PATTERN_DICTIONARY",
"dictionaryType": "PATTERNS_AND_PHRASES",
"name": "test_updated",
"nameL10nTag": False,
"description": "test",
"phrases": [{"action": "PHRASE_COUNT_TYPE_ALL", "phrase": "test_updated"}],
"patterns": [{"action": "PATTERN_COUNT_TYPE_ALL", "pattern": "test_updated"}],
},
status=200,
match=[
matchers.json_params_matcher(
{
"id": 1,
"custom": True,
"customPhraseMatchType": "MATCH_ANY_CUSTOM_PHRASE_PATTERN_DICTIONARY",
"dictionaryType": "PATTERNS_AND_PHRASES",
"name": "test_updated",
"nameL10nTag": False,
"description": "test",
"phrases": [{"action": "PHRASE_COUNT_TYPE_ALL", "phrase": "test_updated"}],
"patterns": [{"action": "PATTERN_COUNT_TYPE_ALL", "pattern": "test_updated"}],
}
)
],
)
resp = zia.dlp.update_dict(
"1",
name="test_updated",
match_type="any",
phrases=[
("all", "test_updated"),
],
patterns=[
("all", "test_updated"),
],
)
assert resp.name == "test_updated"
assert resp.phrases[0].phrase == "test_updated"
assert resp.patterns[0].pattern == "test_updated"
@responses.activate
def test_dlp_update_dict_error(zia, dlp_dicts):
responses.add(
method="GET",
url="https://zsapi.zscaler.net/api/v1/dlpDictionaries/1",
json=dlp_dicts[0],
status=200,
)
with pytest.raises(Exception) as e_info:
resp = zia.dlp.update_dict("1", match_type="test")
@responses.activate
def test_dlp_list_dicts(zia, dlp_dicts):
responses.add(
method="GET",
url="https://zsapi.zscaler.net/api/v1/dlpDictionaries",
json=dlp_dicts,
status=200,
)
resp = zia.dlp.list_dicts()
assert isinstance(resp, BoxList)
assert resp[0].id == 1
@responses.activate
def test_dlp_delete(zia):
responses.add(
method="DELETE",
url="https://zsapi.zscaler.net/api/v1/dlpDictionaries/1",
body="",
status=204,
)
resp = zia.dlp.delete_dict("1")
assert isinstance(resp, int)
assert resp == 204
@responses.activate
def test_dlp_get(zia, dlp_dicts):
responses.add(
method="GET",
url="https://zsapi.zscaler.net/api/v1/dlpDictionaries/1",
json=dlp_dicts[0],
status=200,
)
resp = zia.dlp.get_dict("1")
assert isinstance(resp, Box)
assert resp.id == 1
@responses.activate
def test_dlp_add_type_all(zia, dlp_dicts):
responses.add(
method="POST",
url="https://zsapi.zscaler.net/api/v1/dlpDictionaries",
json=dlp_dicts[0],
match=[
matchers.json_params_matcher(
{
"name": "test",
"dictionaryType": "PATTERNS_AND_PHRASES",
"description": "test",
"customPhraseMatchType": "MATCH_ALL_CUSTOM_PHRASE_PATTERN_DICTIONARY",
"patterns": [
{
"action": "PATTERN_COUNT_TYPE_ALL",
"pattern": "test",
}
],
"phrases": [
{
"action": "PHRASE_COUNT_TYPE_ALL",
"phrase": "test",
}
],
}
)
],
status=200,
)
resp = zia.dlp.add_dict(
name="test",
description="test",
match_type="all",
patterns=[("all", "test")],
phrases=[("all", "test")],
)
assert isinstance(resp, Box)
assert resp.id == 1
@responses.activate
def test_dlp_add_type_any(zia, dlp_dicts):
responses.add(
method="POST",
url="https://zsapi.zscaler.net/api/v1/dlpDictionaries",
json=dlp_dicts[0],
match=[
matchers.json_params_matcher(
{
"name": "test",
"dictionaryType": "PATTERNS_AND_PHRASES",
"description": "test",
"customPhraseMatchType": "MATCH_ANY_CUSTOM_PHRASE_PATTERN_DICTIONARY",
"patterns": [
{
"action": "PATTERN_COUNT_TYPE_ALL",
"pattern": "test",
}
],
"phrases": [
{
"action": "PHRASE_COUNT_TYPE_ALL",
"phrase": "test",
}
],
}
)
],
status=200,
)
resp = zia.dlp.add_dict(
name="test",
description="test",
match_type="any",
patterns=[("all", "test")],
phrases=[("all", "test")],
)
assert isinstance(resp, Box)
assert resp.id == 1
def test_dlp_add_error(zia):
with pytest.raises(Exception) as e_info:
resp = zia.dlp.add_dict(name="test", description="test", match_type="test")
@responses.activate
def test_dlp_validate_dict(zia):
api_response = {
"err_msg": "Valid regular expression",
"err_parameter": None,
"err_position": 0,
"err_suggestion": None,
"id_list": None,
"status": 0,
}
responses.add(
method="POST",
url="https://zsapi.zscaler.net/api/v1/dlpDictionaries/validateDlpPattern",
json=api_response,
match=[
matchers.json_params_matcher(
{
"data": "test",
}
)
],
status=200,
)
resp = zia.dlp.validate_dict("test")
assert isinstance(resp, Box)
assert resp.status == 0
| 29.858859
| 98
| 0.501056
|
3236f9896370e2f2e24503d813cde669862389a7
| 1,319
|
py
|
Python
|
ooobuild/dyn/awt/x_window_peer.py
|
Amourspirit/ooo_uno_tmpl
|
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
|
[
"Apache-2.0"
] | null | null | null |
ooobuild/dyn/awt/x_window_peer.py
|
Amourspirit/ooo_uno_tmpl
|
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
|
[
"Apache-2.0"
] | null | null | null |
ooobuild/dyn/awt/x_window_peer.py
|
Amourspirit/ooo_uno_tmpl
|
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Interface Class
# this is a auto generated file generated by Cheetah
# Libre Office Version: 7.3
# Namespace: com.sun.star.awt
from typing import TYPE_CHECKING
from ooo.oenv.env_const import UNO_ENVIRONMENT, UNO_RUNTIME
_DYNAMIC = False
if (not TYPE_CHECKING) and UNO_RUNTIME and UNO_ENVIRONMENT:
_DYNAMIC = True
if not TYPE_CHECKING and _DYNAMIC:
from com.sun.star.awt import XWindowPeer as XWindowPeer
setattr(XWindowPeer, '__ooo_ns__', 'com.sun.star.awt')
setattr(XWindowPeer, '__ooo_full_ns__', 'com.sun.star.awt.XWindowPeer')
setattr(XWindowPeer, '__ooo_type_name__', 'interface')
else:
from ...lo.awt.x_window_peer import XWindowPeer as XWindowPeer
__all__ = ['XWindowPeer']
| 35.648649
| 75
| 0.761183
|
ae08c2524bec43e968f687e41ce97d1619b816d8
| 683
|
py
|
Python
|
detectron2/layers/rotated_boxes.py
|
Shun14/detectron2-ResNeSt
|
cda53a237199da3bbe7526d41c41b9d8df4c4814
|
[
"Apache-2.0"
] | 344
|
2020-04-18T18:33:33.000Z
|
2020-12-04T08:34:30.000Z
|
detectron2/layers/rotated_boxes.py
|
ZhanqiZhang66/detectron2
|
be0d7283297f6314c8e683e0d1ff80b668aa9f4a
|
[
"Apache-2.0"
] | 82
|
2020-01-29T23:48:32.000Z
|
2021-09-08T02:09:30.000Z
|
detectron2/layers/rotated_boxes.py
|
ZhanqiZhang66/detectron2
|
be0d7283297f6314c8e683e0d1ff80b668aa9f4a
|
[
"Apache-2.0"
] | 66
|
2020-04-20T08:30:49.000Z
|
2020-12-06T12:55:12.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from __future__ import absolute_import, division, print_function, unicode_literals
# import torch
from detectron2 import _C
def pairwise_iou_rotated(boxes1, boxes2):
"""
Return intersection-over-union (Jaccard index) of boxes.
Both sets of boxes are expected to be in
(x_center, y_center, width, height, angle) format.
Arguments:
boxes1 (Tensor[N, 5])
boxes2 (Tensor[M, 5])
Returns:
iou (Tensor[N, M]): the NxM matrix containing the pairwise
IoU values for every element in boxes1 and boxes2
"""
return _C.box_iou_rotated(boxes1, boxes2)
| 28.458333
| 82
| 0.695461
|
c2f923fc596e9504a12af90809039e2dcf0f7a2d
| 3,366
|
py
|
Python
|
SUAVE/SUAVE-2.5.0/trunk/SUAVE/Components/Lofted_Body.py
|
Vinicius-Tanigawa/Undergraduate-Research-Project
|
e92372f07882484b127d7affe305eeec2238b8a9
|
[
"MIT"
] | null | null | null |
SUAVE/SUAVE-2.5.0/trunk/SUAVE/Components/Lofted_Body.py
|
Vinicius-Tanigawa/Undergraduate-Research-Project
|
e92372f07882484b127d7affe305eeec2238b8a9
|
[
"MIT"
] | null | null | null |
SUAVE/SUAVE-2.5.0/trunk/SUAVE/Components/Lofted_Body.py
|
Vinicius-Tanigawa/Undergraduate-Research-Project
|
e92372f07882484b127d7affe305eeec2238b8a9
|
[
"MIT"
] | null | null | null |
## @ingroup Components
# Lofted_Body.py
#
# Created:
# Modified: Dec 2016, T. MacDonald
# May 2020, E. Botero
# ----------------------------------------------------------------------
# Imports
# ----------------------------------------------------------------------
from .Component import Component
from .Physical_Component import Physical_Component
from SUAVE.Core import DataOrdered
# ------------------------------------------------------------
# Lofted Body
# ------------------------------------------------------------
## @ingroup Components
class Lofted_Body(Physical_Component):
def __defaults__(self):
"""This sets the default values.
Assumptions:
None
Source:
N/A
Inputs:
None
Outputs:
None
Properties Used:
None
"""
self.tag = 'Lofted_Body'
self.Segments = DataOrdered() # think edges
# ------------------------------------------------------------
# Segment
# ------------------------------------------------------------
## @ingroup Components
class Segment(Component):
""" A class that stubs out what a segment is
Assumptions:
None
Source:
None
"""
def __defaults__(self):
"""This sets the default values.
Assumptions:
None
Source:
N/A
Inputs:
None
Outputs:
None
Properties Used:
None
"""
self.tag = 'Segment'
self.prev = None
self.next = None # for connectivity
# ------------------------------------------------------------
# Section
# ------------------------------------------------------------
## @ingroup Components
class Section(Component):
""" A class that stubs out what a section is
Assumptions:
None
Source:
None
"""
def __defaults__(self):
"""This sets the default values.
Assumptions:
None
Source:
N/A
Inputs:
None
Outputs:
None
Properties Used:
None
"""
self.tag = 'Section'
self.prev = None
self.next = None
# ------------------------------------------------------------
# Containers
# ------------------------------------------------------------
## @ingroup Components
class Section_Container(Component.Container):
""" This does nothing
Assumptions:
None
Source:
None
"""
def get_children(self):
""" Returns the components that can go inside
Assumptions:
None
Source:
N/A
Inputs:
None
Outputs:
None
Properties Used:
N/A
"""
return []
# ------------------------------------------------------------
# Handle Linking
# ------------------------------------------------------------
Section.Container = Section_Container
Lofted_Body.Section = Section
Lofted_Body.Segment = Segment
| 20.155689
| 72
| 0.366013
|
b632a2227c3e59e6db0168ab3181ecd87d0a6b15
| 30,214
|
py
|
Python
|
ryzen/frequency.py
|
akhilguliani/daemon
|
5faae4fb303da563d661571d93d9c7a7e6a36fb0
|
[
"Apache-2.0"
] | null | null | null |
ryzen/frequency.py
|
akhilguliani/daemon
|
5faae4fb303da563d661571d93d9c7a7e6a36fb0
|
[
"Apache-2.0"
] | 1
|
2020-06-12T06:14:14.000Z
|
2020-06-12T06:14:14.000Z
|
ryzen/frequency.py
|
akhilguliani/daemon
|
5faae4fb303da563d661571d93d9c7a7e6a36fb0
|
[
"Apache-2.0"
] | 1
|
2019-12-10T10:54:19.000Z
|
2019-12-10T10:54:19.000Z
|
import psutil
import subprocess
import time
import math
from collections import Counter
from msr import update_pstate_freq, print_pstate_values, get_pstate_freqs
TDP = 95000
def get_freq_bounds(cpu=0):
bounds = [0, 0]
freq_file = open("/sys/devices/system/cpu/cpu%d/cpufreq/scaling_max_freq" % cpu, 'r')
bounds[1] = int(freq_file.read())
freq_file.close()
freq_file = open("/sys/devices/system/cpu/cpu%d/cpufreq/scaling_min_freq" % cpu, 'r')
bounds[0] = int(freq_file.read())
freq_file.close()
return bounds
def get_freq_bounds_ryzen(cpu=0):
bounds = [800000, 3400000]
return bounds
def set_gov_userspace():
# Add check for intel_cpufreq
driver_file = open("/sys/devices/system/cpu/cpu0/cpufreq/scaling_driver")
driver = driver_file.read()
driver_file.close()
if "cpufreq" in driver:
for i in range(psutil.cpu_count()):
gov_file = "/sys/devices/system/cpu/cpu%d/cpufreq/scaling_governor" % i
gfd = open(gov_file, 'w')
gfd.write("userspace")
gfd.close()
else:
print("Unspported Driver: please enable intel/acpi_cpufreq from kerenl cmdline")
def quantize(value):
from decimal import Decimal
ret = int(Decimal(value/25000).quantize(Decimal("1"))*25000)
if ret > 3400000:
return 3400000
if ret < 800000:
return 800000
return ret
def read_freq(cpu=0):
f_file = "/sys/devices/system/cpu/cpu%d/cpufreq/scaling_cur_freq" % cpu
freq_file = open(f_file)
ret_val = freq_file.read()
freq_file.close()
return str(quantize(int(ret_val)))
def read_freq_real(cpu=0):
""" Return Real frequency as reported to cpufreq"""
f_file = "/sys/devices/system/cpu/cpu%d/cpufreq/scaling_cur_freq" % cpu
freq_file = open(f_file)
ret_val = freq_file.read()
freq_file.close()
return int(ret_val)
def write_freq(val, cpu=0):
bounds = get_freq_bounds_ryzen()
if val <= bounds[1] and val >= bounds[0]:
# print("Changing Freq to ", str(val))
f_file = "/sys/devices/system/cpu/cpu%d/cpufreq/scaling_setspeed" % cpu
freq_file = open(f_file, 'w')
freq_file.write(str(val))
freq_file.close()
return
def ryzen_write_freq(val, bounds, cpu=0):
""" AMD Ryzen Specific write frequency function
This function is needed when we have more than two frequencies for the same P-state
"""
states = [2200000, 3000000, 3400000]
if val > bounds[0] and val <= bounds[1]:
write_freq(states[1], cpu)
elif val <= bounds[0] and val >= 400000:
write_freq(states[0], cpu)
elif val > bounds[1] and val <= bounds[2]:
write_freq(states[2], cpu)
return
def reset_pstates(base=False):
states = [3400000, 3000000, 800000]
if base:
states = [3400000, 3000000, 2200000]
for i,val in enumerate(states):
update_pstate_freq(val, i)
return
def update_write_freq(val, cpu=0, turbo=False, update=True):
""" AMD Ryzen Specific write frequency loop
Here we update the relevant P-State to match the val given
the h/w can override us
"""
max_freq = 3400000
if turbo:
max_freq = 3800000
states = [2200000, 3000000, 3400000]
#if val in states:
# write_freq(val, cpu)
if val > states[0] and val < states[1]:
# Update state 1 to mean val
if update:
update_pstate_freq(val, 1)
write_freq(states[1], cpu)
elif val <= states[0] and val >= 400000:
if update:
update_pstate_freq(val, 2)
write_freq(states[0], cpu)
elif val >= states[1] and val <= max_freq:
# Overclocking
if update:
update_pstate_freq(val, 0)
write_freq(states[-1], cpu)
return
def set_to_max_freq(cpu=None):
""" Set all the cpus to max frequency"""
max_freq = get_freq_bounds_ryzen()[1]
if cpu is None:
for c in range(psutil.cpu_count()):
update_write_freq(max_freq, c)
else:
write_freq(max_freq, cpu)
return max_freq
def set_to_freq(freq, cpu=None):
""" Set all the cpus to max frequency"""
if cpu is None:
for c in range(psutil.cpu_count()):
update_write_freq(freq, c)
else:
write_freq(freq, cpu)
return freq
def set_seq_freqs(freq_seq, num_cores):
""" Set all the cpus to sequentially reducing frequencies
For Ryzen we only have three P-states """
if num_cores > psutil.cpu_count(logical=False):
# limit to max number of cores
num_cores = psutil.cpu_count(logical=False)
for c in range(num_cores):
curr_freq = freq_seq[c%len(freq_seq)]
write_freq(curr_freq, c*2)
write_freq(curr_freq, c*2+1)
return
def power_at_freq(in_freq):
# freq is represented as 8 = 800MHz; 42 = 4200MHz
bounds = get_freq_bounds_ryzen()
if in_freq <= bounds[1] and in_freq >= bounds[0]:
freq = in_freq/100000
elif in_freq < bounds[0]:
freq = 8
elif in_freq > bounds[1]:
freq = 34
#return (0.0211*(freq**2) - 0.4697*freq + 7.7535)*1000
return round((0.231)*freq - 0.85, 4)*1000
def freq_at_power(power):
#return int(-0.0773*((power/1000)**2)+ 3.7436*(power/1000) - 4.6404)*100000
return quantize((((power/1000)+0.85)*13/3)*100000)
def change_freq(target_power, cpu=0, increase=False, Update=True):
""" Update frequency based on target power and power model """
# power differential to freq reduction factor
new_freq = freq_at_power(target_power)
old_freq = int(read_freq())
# print(new_freq, old_freq, target_power)
if abs(old_freq - new_freq) <= 25000:
return old_freq
new_power = power_at_freq(new_freq)
if increase:
while new_power < target_power:
old_power = power_at_freq(new_freq)
new_freq = new_freq + 100000
new_power = power_at_freq(new_freq)
if new_power == old_power:
new_freq = new_freq - 100000
break
else:
while new_power > target_power:
old_power = power_at_freq(new_freq)
new_freq = new_freq - 100000
new_power = power_at_freq(new_freq)
# print(new_freq, old_freq, new_power)
if new_power == old_power:
new_freq = new_freq + 100000
break
# if new_freq < bounds[0]:
# new_freq = bounds[0]
print("change_freq:", new_freq, old_freq, new_power, target_power)
if new_freq < 400000:
new_freq = 400000
if new_freq > 3400000:
new_freq = 3400000
print("change_freq:", new_freq, old_freq, new_power, target_power)
# WARN: Hardecoded cpu numbers below
# update_write_freq(new_freq, cpu, update=Update)
# if (cpu % 2) == 0:
# update_write_freq(new_freq, cpu+1, update=Update)
# else:
# update_write_freq(new_freq, cpu-1, update=Update)
return new_freq
def change_freq_std(target_pwr, current_pwr, old_freq=None, cpu=0, increase=False, Update=True):
""" Update frequency based on target power and actulal power value """
# TODO: Fix this function - try to determine when to stop
# probably better to make this a class and track some of the variables and
# have a way of resetting
# power differential to freq reduction factor
new_freq = None
if old_freq is None:
new_freq = int(read_freq(cpu))
else:
new_freq = old_freq
power_diff = abs(current_pwr - target_pwr)
step = 25000
# Select the right step size
if power_diff < 300:
# to close better settle than oscillate
return new_freq
elif power_diff > 1500 and power_diff < 3000:
step = 100000
elif power_diff > 3000 and power_diff < 7000:
step = 200000
elif power_diff > 7000:
step = 500000
if increase:
new_freq = new_freq + step
else:
new_freq = new_freq - step
bounds = get_freq_bounds_ryzen()
# if new_freq < bounds[0]:
# new_freq = bounds[0]
if new_freq < 800000:
new_freq = 800000
if new_freq > bounds[1]:
new_freq = bounds[1]
print("ch_freq_std ", cpu, target_pwr, new_freq, increase, power_diff, Update)
# WARN: Hardecoded cpu numbers below
#write_freq(new_freq, cpu)
# update_write_freq(new_freq, cpu, update=Update)
# if (cpu % 2) == 0:
# update_write_freq(new_freq, cpu+1, update=Update)
# else:
# update_write_freq(new_freq, cpu-1, update=Update)
return new_freq
def get_new_freq(target_pwr, current_pwr, old_freq, increase=False):
""" Update frequency based on target power and actulal power value per core"""
bounds = get_freq_bounds_ryzen()
new_freq = old_freq
power_diff = abs(current_pwr - target_pwr)
step = 25000
direction = math.copysign(1, (target_pwr - current_pwr))
# Select the right step size
if power_diff < 100:
# to close better settle than oscillate
return new_freq
elif power_diff > 1000 and power_diff < 4000:
step = 100000
elif power_diff > 4000 and power_diff < 7000:
step = 200000
elif power_diff > 7000:
step = 1000000
new_freq = old_freq + direction*step
if new_freq >= bounds[1]-25000:
# at max value
new_freq = bounds[1]
elif new_freq <= bounds[0]+25000:
# at lowest
new_freq = bounds[0]
print("new_freq_calculator ", target_pwr, new_freq, increase, power_diff, step, direction)
return new_freq
def keep_limit(curr_power, limit, cpu=0, last_freq=None, first_limit=True, leader=False):
""" Follow the power limit """
new_limit = limit
old_freq = last_freq
if not first_limit:
# if curr_power - limit > 0 and new_limit > 1000:
# new_limit = new_limit - abs(curr_power - new_limit)/2
# #new_limit = new_limit - 1000
# elif curr_power - limit < 0 and new_limit > 1000:
# new_limit = new_limit + abs(curr_power - new_limit)/4
# #new_limit = new_limit + 1000
#
# # print("In keep_limit ", limit)
tolerance = 100
if curr_power - limit > tolerance:
# reduce frequency
old_freq = change_freq_std(new_limit, curr_power, last_freq, cpu, Update=leader)
elif limit - curr_power > tolerance:
# print("Increase")
old_freq = change_freq_std(new_limit, curr_power, last_freq, cpu, increase=True, Update=leader)
else:
# First Step
if curr_power > limit:
# reduce frequency
old_freq = change_freq(new_limit, cpu, Update=leader)
elif curr_power < limit:
# print("Increase")
old_freq = change_freq(new_limit, cpu, increase=True, Update=leader)
return old_freq
def set_per_core_freq_old(freq_list, cores):
""" Write Quantized Frequency Limits for given lists """
print("updating cores: ", cores, [quantize(f) for f in freq_list])
for i, core in enumerate(cores):
# print(i, core, quantize(freq_list[i]))
update_write_freq(quantize(freq_list[i]), core, update=True)
if core % 2 == 0:
write_freq(quantize(freq_list[i]), core+1)
else:
write_freq(quantize(freq_list[i]), core-1)
return
def set_per_core_freq(freq_list, cores, leaders=None):
""" Write Quantized Frequency Limits for given lists """
bounds = get_freq_bounds()
freqs = [quantize(f) for f in freq_list]
freqs_set = set()
if leaders != None:
# hard coding leader core concept
freqs_set = set([freqs[i] for i in leaders])
else:
freqs_set = set(freqs) # unique frequencies
freq_dict = {0:set(), 1:set(), 2:set()} # what states need to be modified
count_dict = {0:0, 1:0, 2:0} # How many options do we have
need_sep = [False, False, False] # do we have any options at all
# find seperate pstates
for val in freqs_set:
if val <= bounds[0]:
count_dict[2] += 1
freq_dict[2].add(min(val,bounds[0]))
if count_dict[2] > 1:
need_sep[2] = True
elif val > bounds[0] and val <= 3000000:
count_dict[1] +=1
freq_dict[1].add(min(val,3000000))
if count_dict[1] > 1:
need_sep[1] = True
elif val > 3000000 and val <= bounds[1]:
count_dict[0] +=1
freq_dict[0].add(min(val,bounds[1]))
if count_dict[0] > 1:
need_sep[0] = True
print(need_sep)
print(count_dict)
print(freq_dict)
for key, value in freq_dict.items():
freq_dict[key] = set(value)
print(freq_dict)
# decorator adapted from https://stackoverflow.com/questions/6254871/python-minnone-x
skipNone = lambda fn : lambda *args : fn(val for val in args if val is not None)
# initialize bounds with original values
new_bounds = get_pstate_freqs()
up_freq = [None,None,None]
update_state = [False]*3
# Select three P_states
if need_sep[2]:
up_freq[2] = min(freq_dict[2])
up_freq[1] = max(freq_dict[2])
new_bounds = [skipNone(min)(up_freq[2], new_bounds[0]), skipNone(min)(up_freq[1],new_bounds[1]), skipNone(max)(up_freq[0],new_bounds[2])]
update_state[2] = True
update_state[1] = True
elif freq_dict[2] != set():
new_bounds[0] = freq_dict[2].pop()
if need_sep[1]:
up_freq[2] = min(freq_dict[1])
up_freq[1] = max(freq_dict[1])
new_bounds = [skipNone(min)(up_freq[2], new_bounds[0]), skipNone(min)(up_freq[1], new_bounds[1]), skipNone(max)(up_freq[0],new_bounds[2])]
update_state[2] = True
update_state[1] = True
elif freq_dict[1] != set():
new_bounds[1] = freq_dict[1].pop()
if need_sep[0]:
up_freq[1] = min(freq_dict[0])
up_freq[0] = max(freq_dict[0])
new_bounds = [skipNone(min)(up_freq[2], new_bounds[0]), skipNone(max)(up_freq[1],new_bounds[1]), skipNone(max)(up_freq[0],new_bounds[2])]
update_state[0] = True
update_state[1] = True
elif freq_dict[0] != set():
new_bounds[2] = freq_dict[0].pop()
# Update the P-States as needed
for state, freq in enumerate(new_bounds[::-1]):
if update_state[state]:
update_pstate_freq(freq, state)
# Finally write the appropriate freq values
print(new_bounds)
# print_pstate_values()
for i, core in enumerate(cores):
ryzen_write_freq(freqs[i], new_bounds, cpu=core)
if core % 2 == 0:
ryzen_write_freq(freqs[i], new_bounds, cpu=core+1)
else:
ryzen_write_freq(freqs[i], new_bounds, cpu=core-1)
return
def keep_limit_prop_freq(curr_power, limit, hi_freqs, low_freqs, hi_shares, low_shares, high_cores, low_cores, first_limit=False, lp_active=False):
""" Proportional frequency power controller adapted from skylake branch """
tolerance = 500
max_per_core = max(hi_freqs)
max_freq = 3400000
alpha = abs(limit-curr_power)/TDP
# print(limit)
if abs(curr_power - limit) < tolerance:
# at power limit nothing todo
return False, hi_freqs, low_freqs
elif (limit - curr_power) > -1*tolerance:
# Below limit
# We have excess power
extra_freq = alpha * max_per_core
## distribute excess power - frequency among
# First Check if high power apps are at max freq
if not (hi_shares is None):
shares_per_core = [hi_shares[i] if not (math.isclose(hi_freqs[i],3400000,rel_tol=0.001)) else 0 for i in range(len(high_cores))]
sum_shares = sum(shares_per_core)
if not math.isclose(sum_shares, 0):
add_hi = [(s * extra_freq / sum_shares) for s in shares_per_core]
extra_freq = extra_freq - sum(add_hi)
hi_freqs = [ min(x+y, max_freq) for x,y in zip(add_hi, hi_freqs)]
set_per_core_freq(hi_freqs, high_cores)
max_per_core = max(hi_freqs)
if not first_limit:
if extra_freq > 200000 and lp_active:
if not (low_shares is None):
add_lo = [s * extra_freq for s in low_shares]
extra_freq = extra_freq - sum(add_lo)
low_freqs = [ min(x+y, max_per_core) for x,y in zip(add_lo, low_freqs)]
set_per_core_freq(low_freqs, low_cores)
return True, hi_freqs, low_freqs
return False, hi_freqs, low_freqs
elif (curr_power - limit) > tolerance:
# Above limit
# We have no excess power
# remove extra frequency from low power first
extra_freq = alpha * max_per_core
if lp_active and not(low_shares is None):
rem_lo = [s * extra_freq for s in low_shares]
extra_freq = extra_freq - sum(rem_lo)
low_freqs = [ y-x for x,y in zip(rem_lo, low_freqs)]
set_per_core_freq(low_freqs, low_cores)
# remove remaining frequency from hi power
if not (hi_shares is None):
shares_per_core = [hi_shares[i] if not (math.isclose(hi_freqs[i],800000,rel_tol=0.05)) else 0 for i in range(len(high_cores))]
sum_shares = sum(shares_per_core)
if not math.isclose(sum_shares, 0):
rem_hi = [(s * extra_freq)/sum_shares for s in shares_per_core]
extra_freq = extra_freq - sum(rem_hi)
hi_freqs = [ y-x for x,y in zip(rem_hi, hi_freqs)]
set_per_core_freq(hi_freqs, high_cores)
return False, hi_freqs, low_freqs
def prop_share_redist(shares,freqs,cores,extra_power,lims,power,max_per_core,first_limit=False, above=False):
""" Propshare redistribution formulae"""
shares_per_core = None
if above:
shares_per_core = [shares[i] if not (math.isclose(freqs[i]/1000,800, abs_tol=25)) else 0 for i in range(len(cores))]
else:
shares_per_core = [shares[i] if not (math.isclose(3400, freqs[i]/1000, abs_tol=25)) else 0 for i in range(len(cores))]
sum_shares = sum(shares_per_core)
if not above:
print("Below Limit", sum_shares)
else:
print("Above Limit", sum_shares)
if not math.isclose(sum_shares, 0):
add_hi = [(s * extra_power / sum_shares) for s in shares_per_core]
extra_power = extra_power - sum(add_hi)
if above:
lims = [max(y-x, 0) for x,y in zip(add_hi, lims)]
if first_limit:
freqs = [freq_at_power(l) for l in lims]
else:
freqs = [get_new_freq(l,a,f,increase=False) for l,a,f in zip(lims, power, freqs)]
else:
lims = [min(x+y, max_per_core) for x,y in zip(add_hi, lims)]
if first_limit:
freqs = [freq_at_power(l) for l in lims]
else:
freqs = [get_new_freq(l,a,f,increase=True) for l,a,f in zip(lims, power, freqs)]
return freqs,extra_power,lims
def keep_limit_prop_power(curr_power, limit, hi_lims, low_lims, hi_freqs, low_freqs,
hi_shares, low_shares, high_cores, low_cores, hi_power, low_power,
first_limit=False, lp_active=False, hi_lead=None, low_lead=None):
""" Proportional Core Power Power controller adapted from skylake branch
limit is package power limit; hi_lims and low_lims are per core limits
TODO: Extend the shares mechanism to low power apps"""
tolerance = 250
max_power = 10000
max_per_core = max(max(hi_lims), max(hi_power), max_power)
# max_freq = 3400000
alpha = abs(limit-curr_power)/TDP
# print(limit, curr_power)
if abs(curr_power - limit) < tolerance:
# at power limit nothing todo
return False, hi_lims, low_lims, hi_freqs, low_freqs
elif (limit - curr_power) > -1*tolerance:
# Below limit
# We have excess power
extra_power = alpha * max_per_core
## distribute excess power - frequency among
# First Check if high power apps are at max freq
if not (hi_shares is None):
shares_per_core = [hi_shares[i] if not (math.isclose(3400, hi_freqs[i]/1000, abs_tol=25)) else 0 for i in range(len(high_cores))]
sum_shares = sum(shares_per_core)
print("Below Limit", sum_shares)
if not math.isclose(sum_shares, 0):
add_hi = [(s * extra_power / sum_shares) for s in shares_per_core]
extra_power = extra_power - sum(add_hi)
hi_lims = [min(x+y, max_per_core) for x,y in zip(add_hi, hi_lims)]
if first_limit:
hi_freqs = [freq_at_power(l) for l in hi_lims]
else:
hi_freqs = [get_new_freq(l,a,f,increase=True) for l,a,f in zip(hi_lims, hi_power, hi_freqs)]
set_per_core_freq(hi_freqs, high_cores, leaders=hi_lead)
# max_per_core = min(max(hi_lims), max(hi_power))
# detect saturation
# hi_lims = [y if (math.isclose(3400, f/1000, abs_tol=25)) or (math.isclose(f/1000,800,abs_tol=25)) else x for x,y,f in zip(hi_lims, hi_power, hi_freqs)]
# hi_lims = [y if (math.isclose(3400, f/1000, abs_tol=25)) else x for x,y,f in zip(hi_lims, hi_power, hi_freqs)]
if not first_limit:
print("Extra Power: ", extra_power)
if extra_power > 500:
if (not (low_shares is None)) and lp_active:
low_freqs, extra_power, low_lims = prop_share_redist(low_shares,low_freqs,low_cores,
extra_power,low_lims,low_power,max_per_core,
first_limit=first_limit, above=False)
set_per_core_freq(low_freqs, low_cores, leaders=low_lead)
return True, hi_lims, low_lims, hi_freqs, low_freqs
return False, hi_lims, low_lims, hi_freqs, low_freqs
elif (curr_power - limit) > tolerance:
# Above limit
# We have no excess power
# remove extra frequency from low power first
print("Above Limit")
extra_power = alpha * max_per_core
if lp_active and not(low_shares is None):
low_freqs, extra_power, low_lims = prop_share_redist(low_shares,low_freqs,low_cores,
extra_power,low_lims,low_power,max_per_core,
first_limit=first_limit, above=True)
set_per_core_freq(low_freqs, low_cores, leaders=low_lead)
# detect saturation
low_lims = [ y if (math.isclose(3400, f/1000,abs_tol=25)) else x for x,y,f in zip(low_lims, low_power, low_freqs)]
# remove remaining frequency from hi power
if not (hi_shares is None):
shares_per_core = [hi_shares[i] if not (math.isclose(hi_freqs[i]/1000,800, abs_tol=25)) else 0 for i in range(len(high_cores))]
sum_shares = sum(shares_per_core)
if not math.isclose(sum_shares, 0):
rem_hi = [(s * extra_power)/sum_shares for s in shares_per_core]
extra_power = extra_power - sum(rem_hi)
hi_lims = [ y-x for x,y in zip(rem_hi, hi_lims)]
if first_limit:
hi_freqs = [freq_at_power(l) for l in hi_lims]
else:
hi_freqs = [get_new_freq(l,a,f,increase=False) for l,a,f in zip(hi_lims, hi_power, hi_freqs)]
set_per_core_freq(hi_freqs, high_cores, leaders=hi_lead)
# detect saturation
hi_lims = [ y if (math.isclose(3400, f/1000,abs_tol=25)) else x for x,y,f in zip(hi_lims, hi_power, hi_freqs)]
print(hi_freqs)
return False, hi_lims, low_lims, hi_freqs, low_freqs
def freq_at_perf(perf):
"""
performance model
Can be made into a per application function
we assume perfect performance scaling in our case
"""
factor = 26262.63
const = 7.74e5
if perf <= 1:
return 800000
elif perf >= 100:
return 3400000
return quantize(factor*perf + const)
def perf_at_freq(freq):
"""
performance model
Can be made into a per application function
we assume perfect performance scaling in our case
"""
factor = (3.81e-5)
const = -29.46
if freq >= 3400000:
return 100
elif freq <= 800000:
return 1
return int(factor*freq + const)
def get_new_freq_perf(target_perf, current_perf, old_freq, increase=False):
""" Update frequency based on target perf and actulal perf value per core"""
bounds = get_freq_bounds_ryzen()
new_freq = old_freq
perf_diff = abs(current_perf - target_perf)
step = 100000
direction = math.copysign(1, (target_perf - current_perf))
# Select the right step size
if perf_diff <= 1:
# to close better settle than oscillate
return new_freq
elif perf_diff > 1 and perf_diff <= 10:
step = 25000
elif perf_diff > 10 and perf_diff <= 50:
step = 200000
elif perf_diff > 50:
step = 400000
new_freq = old_freq + direction*step
if new_freq >= bounds[1]:
# at max value
new_freq = bounds[1]
elif new_freq <= bounds[0]:
# at lowest
new_freq = bounds[0]
print("new_freq_calculator ", target_perf, new_freq, increase, perf_diff, step, direction)
return new_freq
def keep_limit_prop_perf(curr_power, limit, hi_lims, low_lims, hi_freqs, low_freqs,
hi_shares, low_shares, high_cores, low_cores, hi_perf, low_perf,
first_limit=False, lp_active=False):
""" Proportional Core Power Power controller adapted from skylake branch
limit is package power limit; hi_lims and low_lims are per core limits
TODO: Extend the shares mechanism to low power apps"""
tolerance = 250
max_per_core = 100
min_per_core = 1
# max_freq = 3400000
alpha = abs(limit-curr_power)/TDP
if abs(curr_power - limit) < tolerance:
# at power limit nothing todo
return False, hi_lims, low_lims, hi_freqs, low_freqs
elif (limit - curr_power) > -1*tolerance:
# Below limit
# We have excess power
extra_perf = alpha * max_per_core
## distribute excess power - perf among
# First Check if high prio apps are at max freq
if not (hi_shares is None):
shares_per_core = [hi_shares[i] if not (math.isclose(3400, hi_freqs[i]/1000, abs_tol=25)) else 0 for i in range(len(high_cores))]
sum_shares = sum(shares_per_core)
print("Below Limit", sum_shares)
if not math.isclose(sum_shares, 0):
add_hi = [(s * extra_perf / sum_shares) for s in shares_per_core]
extra_perf = extra_perf - sum(add_hi)
hi_lims = [min(x+y, max_per_core) for x,y in zip(add_hi, hi_lims)]
if first_limit:
hi_freqs = [freq_at_perf(l) for l in hi_lims]
else:
hi_freqs = [get_new_freq_perf(l,a,f,increase=True) for l,a,f in zip(hi_lims, hi_perf, hi_freqs)]
set_per_core_freq(hi_freqs, high_cores, leaders=[0,4])
# max_per_core = min(max(hi_lims), max(hi_power))
# detect saturation
# hi_lims = [y if (math.isclose(3400, f/1000, abs_tol=25)) or (math.isclose(f/1000,800,abs_tol=25)) else x for x,y,f in zip(hi_lims, hi_power, hi_freqs)]
# hi_lims = [y if (math.isclose(3400, f/1000, abs_tol=25)) else x for x,y,f in zip(hi_lims, hi_power, hi_freqs)]
if not first_limit:
if extra_perf > 100 and lp_active:
if not (low_shares is None):
add_lo = [s * extra_perf for s in low_shares]
extra_perf = extra_perf - sum(add_lo)
low_lims = [ min(x+y, max_per_core) for x,y in zip(add_lo, low_lims)]
low_freqs = [freq_at_perf(l) for l in low_lims]
set_per_core_freq(low_freqs, low_cores)
return True, hi_lims, low_lims, hi_freqs, low_freqs
return False, hi_lims, low_lims, hi_freqs, low_freqs
elif (curr_power - limit) > tolerance:
# Above limit
# We have no excess power
# remove extra frequency from low power first
# print("Above Limit")
extra_perf = alpha * max_per_core
print("Above Limit: ", extra_perf)
if lp_active and not(low_shares is None):
rem_lo = [s * extra_perf for s in low_shares]
extra_perf = extra_perf - sum(rem_lo)
low_lims = [ y-x for x,y in zip(rem_lo, low_lims)]
low_freqs = [freq_at_perf(l) for l in low_lims]
set_per_core_freq(low_freqs, low_cores)
# remove remaining frequency from hi power
if not (hi_shares is None):
shares_per_core = [hi_shares[i] if not (math.isclose(hi_freqs[i]/1000,800, abs_tol=100)) else 0 for i in range(len(high_cores))]
sum_shares = sum(shares_per_core)
if not math.isclose(sum_shares, 0):
rem_hi = [(s * extra_perf)/sum_shares for s in shares_per_core]
extra_perf = extra_perf - sum(rem_hi)
hi_lims = [ max(y-x, min_per_core) for x,y in zip(rem_hi, hi_lims)]
if first_limit:
hi_freqs = [freq_at_perf(l) for l in hi_lims]
else:
hi_freqs = [get_new_freq_perf(l,a,f,increase=False) for l,a,f in zip(hi_lims, hi_perf, hi_freqs)]
set_per_core_freq(hi_freqs, high_cores, leaders=[0,4])
# detect saturation
hi_lims = [ y if (math.isclose(3400, f/1000,abs_tol=25)) else x for x,y,f in zip(hi_lims, hi_perf, hi_freqs)]
print(hi_freqs)
return False, hi_lims, low_lims, hi_freqs, low_freqs
| 40.447122
| 170
| 0.615112
|
7184cd4e37e0ecfd3da303b30b6f6d3751ef1b50
| 8,656
|
py
|
Python
|
overtime/selectors.py
|
Atwinenickson/lendsuphumanresourcemanagement
|
b46df164d59a4e94300376d679e07bd9a60d6343
|
[
"MIT",
"Unlicense"
] | 36
|
2019-11-26T11:46:32.000Z
|
2022-02-17T13:18:18.000Z
|
overtime/selectors.py
|
Atwinenickson/lendsuphumanresourcemanagement
|
b46df164d59a4e94300376d679e07bd9a60d6343
|
[
"MIT",
"Unlicense"
] | 13
|
2020-02-14T09:30:16.000Z
|
2022-03-12T00:58:09.000Z
|
overtime/selectors.py
|
Atwinenickson/lendsuphumanresourcemanagement
|
b46df164d59a4e94300376d679e07bd9a60d6343
|
[
"MIT",
"Unlicense"
] | 16
|
2019-06-14T12:11:29.000Z
|
2022-02-14T15:16:07.000Z
|
from django.contrib.auth import get_user_model
from employees.models import Employee
from ems_auth.models import SolitonUser
from organisation_details.selectors import get_team_instance, get_is_supervisor_in_team, \
get_is_hod_in_department
from overtime.models import OvertimeApplication, OvertimePlan, OvertimeSchedule
from payroll.selectors import get_current_month, get_current_year
User = get_user_model()
def get_overtime_application(id):
return OvertimeApplication.objects.get(pk=id)
def get_hod_pending_overtime_applications(hod_department):
# Get the pending overtime applications for the particular hod department
pending_applications = OvertimeApplication.objects.filter(status="Pending", HOD_approval="Pending",
supervisor_approval="Approved")
hod_pending_applications = []
for pending_application in pending_applications:
if pending_application.applicant.department == hod_department:
hod_pending_applications.append(pending_application)
return hod_pending_applications
def get_hod_approved_overtime_applications(user):
hod_department = user.solitonuser.employee.department
# Get the approved overtime applications for the particular hod department
approved_applications = OvertimeApplication.objects.filter(status="Pending", HOD_approval="Approved")
hod_pending_applications = []
for approved_application in approved_applications:
if approved_application.supervisee.department == hod_department:
hod_pending_applications.append(approved_application)
return hod_pending_applications
def get_cfo_pending_overtime_applications():
pending_applications = OvertimeApplication.objects.filter(status="Pending", cfo_approval="Pending",
HOD_approval="Approved")
return pending_applications
def get_ceo_pending_overtime_applications():
pending_applications = OvertimeApplication.objects.filter(status="Pending", cfo_approval="Approved",
ceo_approval="Pending")
return pending_applications
def get_approved_overtime_applications(employee: Employee):
approved_applications = OvertimeApplication.objects.filter(status="Approved", applicant=employee)
return approved_applications
def get_all_overtime_applications():
overtime_applications = OvertimeApplication.objects.all()
return overtime_applications
def get_supervisor_pending_overtime_applications(supervisor):
pending_applications = OvertimeApplication.objects.filter(supervisor_approval="Pending")
supervisor_team = get_team_instance(supervisor)
supervisor_pending_applications = []
for pending_application in pending_applications:
applicant = pending_application.applicant
applicant_team = get_team_instance(applicant)
if applicant_team.id is supervisor_team.id:
supervisor_pending_applications.append(pending_application)
return supervisor_pending_applications
def get_hr_pending_overtime_applications():
pending_applications = OvertimeApplication.objects.filter(HR_approval="Pending", HOD_approval="Approved",
status="Pending")
return pending_applications
def get_hod_in_department(approver):
pass
def get_pending_overtime_applications(approver_user: User) -> dict:
"""Return a dictionary of pending applications per user role"""
hod_pending_applications = OvertimeApplication.objects.none()
hr_pending_applications = OvertimeApplication.objects.none()
cfo_pending_applications = OvertimeApplication.objects.none()
ceo_pending_applications = OvertimeApplication.objects.none()
supervisor_pending_applications = OvertimeApplication.objects.none()
is_supervisor = get_is_supervisor_in_team(approver_user)
is_hod = get_is_hod_in_department(approver_user)
if is_hod:
hod_department = approver_user.solitonuser.employee.department
hod_pending_applications = get_hod_pending_overtime_applications(hod_department)
if approver_user.is_hr:
hr_pending_applications = get_hr_pending_overtime_applications()
if approver_user.is_cfo:
cfo_pending_applications = get_cfo_pending_overtime_applications()
if approver_user.is_ceo:
ceo_pending_applications = get_ceo_pending_overtime_applications()
if is_supervisor:
supervisor = approver_user.solitonuser.employee
supervisor_pending_applications = get_supervisor_pending_overtime_applications(supervisor)
pending_applications = {
"supervisor_pending_applications": supervisor_pending_applications,
"hod_pending_applications": hod_pending_applications,
"hr_pending_applications": hr_pending_applications,
"cfo_pending_applications": cfo_pending_applications,
"ceo_pending_applications": ceo_pending_applications,
}
return pending_applications
def get_recent_overtime_applications(limit, applicant):
return OvertimeApplication.objects.filter(applicant=applicant).order_by('-id')[:limit]
def get_all_overtime_plans():
overtime_plans = OvertimePlan.objects.all()
return overtime_plans
def get_most_recent_overtime_plans():
overtime_plans = OvertimePlan.objects.all().order_by('-id')
return overtime_plans
def get_overtime_plan(overtime_plan_id):
return OvertimePlan.objects.get(pk=overtime_plan_id)
def get_overtime_schedules(overtime_plan):
return OvertimeSchedule.objects.filter(overtime_plan=overtime_plan).order_by('-id')
def get_hr_pending_overtime_plans():
pending_overtime_plans = OvertimePlan.objects.filter(HR_approval="Pending",
status="Pending").order_by("-id")
return pending_overtime_plans
def get_ceo_pending_overtime_plans():
pending_overtime_plans = OvertimePlan.objects.filter(status="Pending", cfo_approval="Pending").order_by("-id")
return pending_overtime_plans
def get_pending_overtime_plans(approver):
"""Get pending overtime plans for each user"""
pending_overtime_plans = None
if approver.is_ceo:
pending_overtime_plans = get_ceo_pending_overtime_plans()
return pending_overtime_plans
def get_supervisor_users(applicant):
department = applicant.department
all_supervisor_users = User.objects.filter(is_supervisor=True)
users = []
for supervisor_user in all_supervisor_users:
if supervisor_user.solitonuser.employee.department == department:
users.append(supervisor_user)
return users
def get_supervisor_user(applicant):
team = get_team_instance(applicant)
supervisor = team.supervisor
supervisor_user = supervisor.solitonuser.user
return supervisor_user
def get_hod_users(applicant):
department = applicant.department
all_hod_users = User.objects.filter(is_hod=True)
users = []
for hod_user in all_hod_users:
if hod_user.solitonuser.employee.department == department:
users.append(hod_user)
return users
def get_hr_users():
all_hr_users = User.objects.filter(is_hr=True)
return all_hr_users
def get_cfo_users():
all_cfo_users = User.objects.filter(is_cfo=True)
return all_cfo_users
def get_ceo_users():
all_ceo_users = User.objects.filter(is_ceo=True)
return all_ceo_users
def get_is_overtime_approver(approver_user: User) -> bool:
is_supervisor = get_is_supervisor_in_team(approver_user)
is_hod = get_is_hod_in_department(approver_user)
return is_hod or approver_user.is_hr or approver_user.is_cfo or approver_user.is_ceo or is_supervisor
def get_approved_overtime_applications_in_current_month_and_year(applicant: Employee):
"""Pick applicant's overtime applications for the current month and year that are approved"""
current_month = get_current_month()
current_year = get_current_year()
overtime_applications = OvertimeApplication.objects.filter(date__month=current_month,
date__year=current_year,
applicant=applicant,
status="Approved")
return overtime_applications
def get_all_non_expired_overtime_applications():
# Only pending non expired applications can be expired
overtime_applications = OvertimeApplication.objects.filter(expired=False, status="Pending")
return overtime_applications
| 37.471861
| 114
| 0.748614
|
82885a54d5c5a4496fbb38b7582cd78a5821aed7
| 32,817
|
py
|
Python
|
Adelphi Academic Calendar/skill/skill_env/Lib/site-packages/pip/_vendor/urllib3/contrib/securetransport.py
|
EnriqueGambra/Amazon-Alexa-Skill
|
198ed51bef555eee006041fef0bcbf5c955142d5
|
[
"MIT"
] | null | null | null |
Adelphi Academic Calendar/skill/skill_env/Lib/site-packages/pip/_vendor/urllib3/contrib/securetransport.py
|
EnriqueGambra/Amazon-Alexa-Skill
|
198ed51bef555eee006041fef0bcbf5c955142d5
|
[
"MIT"
] | null | null | null |
Adelphi Academic Calendar/skill/skill_env/Lib/site-packages/pip/_vendor/urllib3/contrib/securetransport.py
|
EnriqueGambra/Amazon-Alexa-Skill
|
198ed51bef555eee006041fef0bcbf5c955142d5
|
[
"MIT"
] | 1
|
2019-10-11T17:15:20.000Z
|
2019-10-11T17:15:20.000Z
|
"""
SecureTranport support for urllib3 via ctypes.
This makes platform-native TLS available to urllib3 users on macOS without the
use of a compiler. This is an important feature because the Python Package
Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL
that ships with macOS is not capable of doing TLSv1.2. The only way to resolve
this is to give macOS users an alternative solution to the problem, and that
solution is to use SecureTransport.
We use ctypes here because this solution must not require a compiler. That's
because pip is not allowed to require a compiler either.
This is not intended to be a seriously long-term solution to this problem.
The hope is that PEP 543 will eventually solve this issue for us, at which
point we can retire this contrib module. But in the short term, we need to
solve the impending tire fire that is Python on Mac without this kind of
contrib module. So...here we are.
To use this module, simply import and inject it::
import urllib3.contrib.securetransport
urllib3.contrib.securetransport.inject_into_urllib3()
Happy TLSing!
This code is a bastardised version of the code found in Will Bond's oscrypto
library. An enormous debt is owed to him for blazing this trail for us. For
that reason, this code should be considered to be covered both by urllib3's
license and by oscrypto's:
Copyright (c) 2015-2016 Will Bond <will@wbond.net>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import
import contextlib
import ctypes
import errno
import os.path
import shutil
import socket
import ssl
import threading
import weakref
from .. import util
from ._securetransport.bindings import (
Security, SecurityConst, CoreFoundation
)
from ._securetransport.low_level import (
_assert_no_error, _cert_array_from_pem, _temporary_keychain,
_load_client_cert_chain
)
try: # Platform-specific: Python 2
from socket import _fileobject
except ImportError: # Platform-specific: Python 3
_fileobject = None
from ..packages.backports.makefile import backport_makefile
__all__ = ['inject_into_urllib3', 'extract_from_urllib3']
# SNI always works
HAS_SNI = True
orig_util_HAS_SNI = util.HAS_SNI
orig_util_SSLContext = util.ssl_.SSLContext
# This dictionary is used by the read callback to obtain a handle to the
# calling wrapped socket. This is a pretty silly approach, but for now it'll
# do. I feel like I should be able to smuggle a handle to the wrapped socket
# directly in the SSLConnectionRef, but for now this approach will work I
# guess.
#
# We need to lock around this structure for inserts, but we don't do it for
# reads/writes in the callbacks. The reasoning here goes as follows:
#
# 1. It is not possible to call into the callbacks before the dictionary is
# populated, so once in the callback the id must be in the dictionary.
# 2. The callbacks don't mutate the dictionary, they only read from it, and
# so cannot conflict with any of the insertions.
#
# This is good: if we had to lock in the callbacks we'd drastically slow down
# the performance of this code.
_connection_refs = weakref.WeakValueDictionary()
_connection_ref_lock = threading.Lock()
# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over
# for no better reason than we need *a* limit, and this one is right there.
SSL_WRITE_BLOCKSIZE = 16384
# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to
# individual cipher suites. We need to do this because this is how
# SecureTransport wants them.
CIPHER_SUITES = [
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_AES_256_GCM_SHA384,
SecurityConst.TLS_AES_128_GCM_SHA256,
SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_AES_128_CCM_8_SHA256,
SecurityConst.TLS_AES_128_CCM_SHA256,
SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256,
SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA,
]
# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
# TLSv1 and a high of TLSv1.3. For everything else, we pin to that version.
# TLSv1 to 1.2 are supported on macOS 10.8+ and TLSv1.3 is macOS 10.13+
_protocol_to_min_max = {
util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocolMaxSupported),
}
if hasattr(ssl, "PROTOCOL_SSLv2"):
_protocol_to_min_max[ssl.PROTOCOL_SSLv2] = (
SecurityConst.kSSLProtocol2, SecurityConst.kSSLProtocol2
)
if hasattr(ssl, "PROTOCOL_SSLv3"):
_protocol_to_min_max[ssl.PROTOCOL_SSLv3] = (
SecurityConst.kSSLProtocol3, SecurityConst.kSSLProtocol3
)
if hasattr(ssl, "PROTOCOL_TLSv1"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1] = (
SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol1
)
if hasattr(ssl, "PROTOCOL_TLSv1_1"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = (
SecurityConst.kTLSProtocol11, SecurityConst.kTLSProtocol11
)
if hasattr(ssl, "PROTOCOL_TLSv1_2"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = (
SecurityConst.kTLSProtocol12, SecurityConst.kTLSProtocol12
)
def inject_into_urllib3():
"""
Monkey-patch urllib3 with SecureTransport-backed SSL-support.
"""
util.SSLContext = SecureTransportContext
util.ssl_.SSLContext = SecureTransportContext
util.HAS_SNI = HAS_SNI
util.ssl_.HAS_SNI = HAS_SNI
util.IS_SECURETRANSPORT = True
util.ssl_.IS_SECURETRANSPORT = True
def extract_from_urllib3():
"""
Undo monkey-patching by :func:`inject_into_urllib3`.
"""
util.SSLContext = orig_util_SSLContext
util.ssl_.SSLContext = orig_util_SSLContext
util.HAS_SNI = orig_util_HAS_SNI
util.ssl_.HAS_SNI = orig_util_HAS_SNI
util.IS_SECURETRANSPORT = False
util.ssl_.IS_SECURETRANSPORT = False
def _read_callback(connection_id, data_buffer, data_length_pointer):
"""
SecureTransport read callback. This is called by ST to request that tmp
be returned from the socket.
"""
wrapped_socket = None
try:
wrapped_socket = _connection_refs.get(connection_id)
if wrapped_socket is None:
return SecurityConst.errSSLInternal
base_socket = wrapped_socket.socket
requested_length = data_length_pointer[0]
timeout = wrapped_socket.gettimeout()
error = None
read_count = 0
try:
while read_count < requested_length:
if timeout is None or timeout >= 0:
if not util.wait_for_read(base_socket, timeout):
raise socket.error(errno.EAGAIN, 'timed out')
remaining = requested_length - read_count
buffer = (ctypes.c_char * remaining).from_address(
data_buffer + read_count
)
chunk_size = base_socket.recv_into(buffer, remaining)
read_count += chunk_size
if not chunk_size:
if not read_count:
return SecurityConst.errSSLClosedGraceful
break
except (socket.error) as e:
error = e.errno
if error is not None and error != errno.EAGAIN:
data_length_pointer[0] = read_count
if error == errno.ECONNRESET or error == errno.EPIPE:
return SecurityConst.errSSLClosedAbort
raise
data_length_pointer[0] = read_count
if read_count != requested_length:
return SecurityConst.errSSLWouldBlock
return 0
except Exception as e:
if wrapped_socket is not None:
wrapped_socket._exception = e
return SecurityConst.errSSLInternal
def _write_callback(connection_id, data_buffer, data_length_pointer):
"""
SecureTransport write callback. This is called by ST to request that tmp
actually be sent on the network.
"""
wrapped_socket = None
try:
wrapped_socket = _connection_refs.get(connection_id)
if wrapped_socket is None:
return SecurityConst.errSSLInternal
base_socket = wrapped_socket.socket
bytes_to_write = data_length_pointer[0]
data = ctypes.string_at(data_buffer, bytes_to_write)
timeout = wrapped_socket.gettimeout()
error = None
sent = 0
try:
while sent < bytes_to_write:
if timeout is None or timeout >= 0:
if not util.wait_for_write(base_socket, timeout):
raise socket.error(errno.EAGAIN, 'timed out')
chunk_sent = base_socket.send(data)
sent += chunk_sent
# This has some needless copying here, but I'm not sure there's
# much value in optimising this tmp path.
data = data[chunk_sent:]
except (socket.error) as e:
error = e.errno
if error is not None and error != errno.EAGAIN:
data_length_pointer[0] = sent
if error == errno.ECONNRESET or error == errno.EPIPE:
return SecurityConst.errSSLClosedAbort
raise
data_length_pointer[0] = sent
if sent != bytes_to_write:
return SecurityConst.errSSLWouldBlock
return 0
except Exception as e:
if wrapped_socket is not None:
wrapped_socket._exception = e
return SecurityConst.errSSLInternal
# We need to keep these two objects references alive: if they get GC'd while
# in use then SecureTransport could attempt to call a function that is in freed
# memory. That would be...uh...bad. Yeah, that's the word. Bad.
_read_callback_pointer = Security.SSLReadFunc(_read_callback)
_write_callback_pointer = Security.SSLWriteFunc(_write_callback)
class WrappedSocket(object):
"""
API-compatibility wrapper for Python's OpenSSL wrapped socket object.
Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage
collector of PyPy.
"""
def __init__(self, socket):
self.socket = socket
self.context = None
self._makefile_refs = 0
self._closed = False
self._exception = None
self._keychain = None
self._keychain_dir = None
self._client_cert_chain = None
# We save off the previously-configured timeout and then set it to
# zero. This is done because we use select and friends to handle the
# timeouts, but if we leave the timeout set on the lower socket then
# Python will "kindly" call select on that socket again for us. Avoid
# that by forcing the timeout to zero.
self._timeout = self.socket.gettimeout()
self.socket.settimeout(0)
@contextlib.contextmanager
def _raise_on_error(self):
"""
A context manager that can be used to wrap calls that do I/O from
SecureTransport. If any of the I/O callbacks hit an exception, this
context manager will correctly propagate the exception after the fact.
This avoids silently swallowing those exceptions.
It also correctly forces the socket closed.
"""
self._exception = None
# We explicitly don't catch around this yield because in the unlikely
# event that an exception was hit in the block we don't want to swallow
# it.
yield
if self._exception is not None:
exception, self._exception = self._exception, None
self.close()
raise exception
def _set_ciphers(self):
"""
Sets up the allowed ciphers. By default this matches the set in
util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done
custom and doesn't allow changing at this time, mostly because parsing
OpenSSL cipher strings is going to be a freaking nightmare.
"""
ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES)
result = Security.SSLSetEnabledCiphers(
self.context, ciphers, len(CIPHER_SUITES)
)
_assert_no_error(result)
def _custom_validate(self, verify, trust_bundle):
"""
Called when we have set custom validation. We do this in two cases:
first, when cert validation is entirely disabled; and second, when
using a custom trust DB.
"""
# If we disabled cert validation, just say: cool.
if not verify:
return
# We want tmp in memory, so load it up.
if os.path.isfile(trust_bundle):
with open(trust_bundle, 'rb') as f:
trust_bundle = f.read()
cert_array = None
trust = Security.SecTrustRef()
try:
# Get a CFArray that contains the certs we want.
cert_array = _cert_array_from_pem(trust_bundle)
# Ok, now the hard part. We want to get the SecTrustRef that ST has
# created for this connection, shove our CAs into it, tell ST to
# ignore everything else it knows, and then ask if it can build a
# chain. This is a buuuunch of code.
result = Security.SSLCopyPeerTrust(
self.context, ctypes.byref(trust)
)
_assert_no_error(result)
if not trust:
raise ssl.SSLError("Failed to copy trust reference")
result = Security.SecTrustSetAnchorCertificates(trust, cert_array)
_assert_no_error(result)
result = Security.SecTrustSetAnchorCertificatesOnly(trust, True)
_assert_no_error(result)
trust_result = Security.SecTrustResultType()
result = Security.SecTrustEvaluate(
trust, ctypes.byref(trust_result)
)
_assert_no_error(result)
finally:
if trust:
CoreFoundation.CFRelease(trust)
if cert_array is not None:
CoreFoundation.CFRelease(cert_array)
# Ok, now we can look at what the result was.
successes = (
SecurityConst.kSecTrustResultUnspecified,
SecurityConst.kSecTrustResultProceed
)
if trust_result.value not in successes:
raise ssl.SSLError(
"certificate verify failed, error code: %d" %
trust_result.value
)
def handshake(self,
server_hostname,
verify,
trust_bundle,
min_version,
max_version,
client_cert,
client_key,
client_key_passphrase):
"""
Actually performs the TLS handshake. This is run automatically by
wrapped socket, and shouldn't be needed in user code.
"""
# First, we do the initial bits of connection setup. We need to create
# a context, set its I/O funcs, and set the connection reference.
self.context = Security.SSLCreateContext(
None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType
)
result = Security.SSLSetIOFuncs(
self.context, _read_callback_pointer, _write_callback_pointer
)
_assert_no_error(result)
# Here we need to compute the handle to use. We do this by taking the
# id of self modulo 2**31 - 1. If this is already in the dictionary, we
# just keep incrementing by one until we find a free space.
with _connection_ref_lock:
handle = id(self) % 2147483647
while handle in _connection_refs:
handle = (handle + 1) % 2147483647
_connection_refs[handle] = self
result = Security.SSLSetConnection(self.context, handle)
_assert_no_error(result)
# If we have a server hostname, we should set that too.
if server_hostname:
if not isinstance(server_hostname, bytes):
server_hostname = server_hostname.encode('utf-8')
result = Security.SSLSetPeerDomainName(
self.context, server_hostname, len(server_hostname)
)
_assert_no_error(result)
# Setup the ciphers.
self._set_ciphers()
# Set the minimum and maximum TLS versions.
result = Security.SSLSetProtocolVersionMin(self.context, min_version)
_assert_no_error(result)
# TLS 1.3 isn't necessarily enabled by the OS
# so we have to detect when we error out and try
# setting TLS 1.3 if it's allowed. kTLSProtocolMaxSupported
# was added in macOS 10.13 along with kTLSProtocol13.
result = Security.SSLSetProtocolVersionMax(self.context, max_version)
if result != 0 and max_version == SecurityConst.kTLSProtocolMaxSupported:
result = Security.SSLSetProtocolVersionMax(self.context, SecurityConst.kTLSProtocol12)
_assert_no_error(result)
# If there's a trust DB, we need to use it. We do that by telling
# SecureTransport to break on server auth. We also do that if we don't
# want to validate the certs at all: we just won't actually do any
# authing in that case.
if not verify or trust_bundle is not None:
result = Security.SSLSetSessionOption(
self.context,
SecurityConst.kSSLSessionOptionBreakOnServerAuth,
True
)
_assert_no_error(result)
# If there's a client cert, we need to use it.
if client_cert:
self._keychain, self._keychain_dir = _temporary_keychain()
self._client_cert_chain = _load_client_cert_chain(
self._keychain, client_cert, client_key
)
result = Security.SSLSetCertificate(
self.context, self._client_cert_chain
)
_assert_no_error(result)
while True:
with self._raise_on_error():
result = Security.SSLHandshake(self.context)
if result == SecurityConst.errSSLWouldBlock:
raise socket.timeout("handshake timed out")
elif result == SecurityConst.errSSLServerAuthCompleted:
self._custom_validate(verify, trust_bundle)
continue
else:
_assert_no_error(result)
break
def fileno(self):
return self.socket.fileno()
# Copy-pasted from Python 3.5 source code
def _decref_socketios(self):
if self._makefile_refs > 0:
self._makefile_refs -= 1
if self._closed:
self.close()
def recv(self, bufsiz):
buffer = ctypes.create_string_buffer(bufsiz)
bytes_read = self.recv_into(buffer, bufsiz)
data = buffer[:bytes_read]
return data
def recv_into(self, buffer, nbytes=None):
# Read short on EOF.
if self._closed:
return 0
if nbytes is None:
nbytes = len(buffer)
buffer = (ctypes.c_char * nbytes).from_buffer(buffer)
processed_bytes = ctypes.c_size_t(0)
with self._raise_on_error():
result = Security.SSLRead(
self.context, buffer, nbytes, ctypes.byref(processed_bytes)
)
# There are some result codes that we want to treat as "not always
# errors". Specifically, those are errSSLWouldBlock,
# errSSLClosedGraceful, and errSSLClosedNoNotify.
if (result == SecurityConst.errSSLWouldBlock):
# If we didn't process any bytes, then this was just a time out.
# However, we can get errSSLWouldBlock in situations when we *did*
# read some tmp, and in those cases we should just read "short"
# and return.
if processed_bytes.value == 0:
# Timed out, no tmp read.
raise socket.timeout("recv timed out")
elif result in (SecurityConst.errSSLClosedGraceful, SecurityConst.errSSLClosedNoNotify):
# The remote peer has closed this connection. We should do so as
# well. Note that we don't actually return here because in
# principle this could actually be fired along with return tmp.
# It's unlikely though.
self.close()
else:
_assert_no_error(result)
# Ok, we read and probably succeeded. We should return whatever tmp
# was actually read.
return processed_bytes.value
def settimeout(self, timeout):
self._timeout = timeout
def gettimeout(self):
return self._timeout
def send(self, data):
processed_bytes = ctypes.c_size_t(0)
with self._raise_on_error():
result = Security.SSLWrite(
self.context, data, len(data), ctypes.byref(processed_bytes)
)
if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0:
# Timed out
raise socket.timeout("send timed out")
else:
_assert_no_error(result)
# We sent, and probably succeeded. Tell them how much we sent.
return processed_bytes.value
def sendall(self, data):
total_sent = 0
while total_sent < len(data):
sent = self.send(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE])
total_sent += sent
def shutdown(self):
with self._raise_on_error():
Security.SSLClose(self.context)
def close(self):
# TODO: should I do clean shutdown here? Do I have to?
if self._makefile_refs < 1:
self._closed = True
if self.context:
CoreFoundation.CFRelease(self.context)
self.context = None
if self._client_cert_chain:
CoreFoundation.CFRelease(self._client_cert_chain)
self._client_cert_chain = None
if self._keychain:
Security.SecKeychainDelete(self._keychain)
CoreFoundation.CFRelease(self._keychain)
shutil.rmtree(self._keychain_dir)
self._keychain = self._keychain_dir = None
return self.socket.close()
else:
self._makefile_refs -= 1
def getpeercert(self, binary_form=False):
# Urgh, annoying.
#
# Here's how we do this:
#
# 1. Call SSLCopyPeerTrust to get hold of the trust object for this
# connection.
# 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf.
# 3. To get the CN, call SecCertificateCopyCommonName and process that
# string so that it's of the appropriate type.
# 4. To get the SAN, we need to do something a bit more complex:
# a. Call SecCertificateCopyValues to get the tmp, requesting
# kSecOIDSubjectAltName.
# b. Mess about with this dictionary to try to get the SANs out.
#
# This is gross. Really gross. It's going to be a few hundred LoC extra
# just to repeat something that SecureTransport can *already do*. So my
# operating assumption at this time is that what we want to do is
# instead to just flag to urllib3 that it shouldn't do its own hostname
# validation when using SecureTransport.
if not binary_form:
raise ValueError(
"SecureTransport only supports dumping binary certs"
)
trust = Security.SecTrustRef()
certdata = None
der_bytes = None
try:
# Grab the trust store.
result = Security.SSLCopyPeerTrust(
self.context, ctypes.byref(trust)
)
_assert_no_error(result)
if not trust:
# Probably we haven't done the handshake yet. No biggie.
return None
cert_count = Security.SecTrustGetCertificateCount(trust)
if not cert_count:
# Also a case that might happen if we haven't handshaked.
# Handshook? Handshaken?
return None
leaf = Security.SecTrustGetCertificateAtIndex(trust, 0)
assert leaf
# Ok, now we want the DER bytes.
certdata = Security.SecCertificateCopyData(leaf)
assert certdata
data_length = CoreFoundation.CFDataGetLength(certdata)
data_buffer = CoreFoundation.CFDataGetBytePtr(certdata)
der_bytes = ctypes.string_at(data_buffer, data_length)
finally:
if certdata:
CoreFoundation.CFRelease(certdata)
if trust:
CoreFoundation.CFRelease(trust)
return der_bytes
def version(self):
protocol = Security.SSLProtocol()
result = Security.SSLGetNegotiatedProtocolVersion(self.context, ctypes.byref(protocol))
_assert_no_error(result)
if protocol.value == SecurityConst.kTLSProtocol13:
return 'TLSv1.3'
elif protocol.value == SecurityConst.kTLSProtocol12:
return 'TLSv1.2'
elif protocol.value == SecurityConst.kTLSProtocol11:
return 'TLSv1.1'
elif protocol.value == SecurityConst.kTLSProtocol1:
return 'TLSv1'
elif protocol.value == SecurityConst.kSSLProtocol3:
return 'SSLv3'
elif protocol.value == SecurityConst.kSSLProtocol2:
return 'SSLv2'
else:
raise ssl.SSLError('Unknown TLS version: %r' % protocol)
def _reuse(self):
self._makefile_refs += 1
def _drop(self):
if self._makefile_refs < 1:
self.close()
else:
self._makefile_refs -= 1
if _fileobject: # Platform-specific: Python 2
def makefile(self, mode, bufsize=-1):
self._makefile_refs += 1
return _fileobject(self, mode, bufsize, close=True)
else: # Platform-specific: Python 3
def makefile(self, mode="r", buffering=None, *args, **kwargs):
# We disable buffering with SecureTransport because it conflicts with
# the buffering that ST does internally (see issue #1153 for more).
buffering = 0
return backport_makefile(self, mode, buffering, *args, **kwargs)
WrappedSocket.makefile = makefile
class SecureTransportContext(object):
"""
I am a wrapper class for the SecureTransport library, to translate the
interface of the standard library ``SSLContext`` object to calls into
SecureTransport.
"""
def __init__(self, protocol):
self._min_version, self._max_version = _protocol_to_min_max[protocol]
self._options = 0
self._verify = False
self._trust_bundle = None
self._client_cert = None
self._client_key = None
self._client_key_passphrase = None
@property
def check_hostname(self):
"""
SecureTransport cannot have its hostname checking disabled. For more,
see the comment on getpeercert() in this file.
"""
return True
@check_hostname.setter
def check_hostname(self, value):
"""
SecureTransport cannot have its hostname checking disabled. For more,
see the comment on getpeercert() in this file.
"""
pass
@property
def options(self):
# TODO: Well, crap.
#
# So this is the bit of the code that is the most likely to cause us
# trouble. Essentially we need to enumerate all of the SSL options that
# users might want to use and try to see if we can sensibly translate
# them, or whether we should just ignore them.
return self._options
@options.setter
def options(self, value):
# TODO: Update in line with above.
self._options = value
@property
def verify_mode(self):
return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE
@verify_mode.setter
def verify_mode(self, value):
self._verify = True if value == ssl.CERT_REQUIRED else False
def set_default_verify_paths(self):
# So, this has to do something a bit weird. Specifically, what it does
# is nothing.
#
# This means that, if we had previously had load_verify_locations
# called, this does not undo that. We need to do that because it turns
# out that the rest of the urllib3 code will attempt to load the
# default verify paths if it hasn't been told about any paths, even if
# the context itself was sometime earlier. We resolve that by just
# ignoring it.
pass
def load_default_certs(self):
return self.set_default_verify_paths()
def set_ciphers(self, ciphers):
# For now, we just require the default cipher string.
if ciphers != util.ssl_.DEFAULT_CIPHERS:
raise ValueError(
"SecureTransport doesn't support custom cipher strings"
)
def load_verify_locations(self, cafile=None, capath=None, cadata=None):
# OK, we only really support cadata and cafile.
if capath is not None:
raise ValueError(
"SecureTransport does not support cert directories"
)
self._trust_bundle = cafile or cadata
def load_cert_chain(self, certfile, keyfile=None, password=None):
self._client_cert = certfile
self._client_key = keyfile
self._client_cert_passphrase = password
def wrap_socket(self, sock, server_side=False,
do_handshake_on_connect=True, suppress_ragged_eofs=True,
server_hostname=None):
# So, what do we do here? Firstly, we assert some properties. This is a
# stripped down shim, so there is some functionality we don't support.
# See PEP 543 for the real deal.
assert not server_side
assert do_handshake_on_connect
assert suppress_ragged_eofs
# Ok, we're good to go. Now we want to create the wrapped socket object
# and store it in the appropriate place.
wrapped_socket = WrappedSocket(sock)
# Now we can handshake
wrapped_socket.handshake(
server_hostname, self._verify, self._trust_bundle,
self._min_version, self._max_version, self._client_cert,
self._client_key, self._client_key_passphrase
)
return wrapped_socket
| 38.4274
| 98
| 0.65786
|
0244f2e1efc78a4d139e46e94c21be3f76c324b2
| 108
|
py
|
Python
|
python-tutorial/003_number.py
|
alvachien/learning-notes
|
502abf4734234ed3fde0a34b3b54aef738b2a639
|
[
"MIT"
] | null | null | null |
python-tutorial/003_number.py
|
alvachien/learning-notes
|
502abf4734234ed3fde0a34b3b54aef738b2a639
|
[
"MIT"
] | null | null | null |
python-tutorial/003_number.py
|
alvachien/learning-notes
|
502abf4734234ed3fde0a34b3b54aef738b2a639
|
[
"MIT"
] | null | null | null |
print("10 / 3 = ", 10 / 3)
print("9 / 3 = ", 9 / 3)
print("10 // 3 = ", 10 // 3)
print("10 % 3 = ", 10 % 3)
| 21.6
| 28
| 0.388889
|
b5741536aed6f1c26a4b2dbba45c77e73f42a963
| 961
|
py
|
Python
|
models/comment.py
|
dude123studios/Connect
|
cd4669d025720b394080d8f99eedce02c31f835b
|
[
"CC0-1.0"
] | null | null | null |
models/comment.py
|
dude123studios/Connect
|
cd4669d025720b394080d8f99eedce02c31f835b
|
[
"CC0-1.0"
] | null | null | null |
models/comment.py
|
dude123studios/Connect
|
cd4669d025720b394080d8f99eedce02c31f835b
|
[
"CC0-1.0"
] | 1
|
2021-03-16T19:42:22.000Z
|
2021-03-16T19:42:22.000Z
|
from extensions import db
from sqlalchemy import asc, desc, or_
class Comment(db.Model):
__tablename__ = 'comment'
id = db.Column(db.Integer, primary_key = True)
user_id = db.Column(db.Integer(), db.ForeignKey("user.id"))
value = db.Column(db.String(1000), nullable=False)
created_at = db.Column(db.DateTime(), nullable=False,
server_default=db.func.now())
post_id = db.Column(db.Integer(), db.ForeignKey('post.id'))
likes = db.Column(db.Integer())
@classmethod
def get_all_from_post(cls, post_id, page, per_page):
return cls.query.filter_by(post_id=post_id).order_by(desc(getattr(cls, '')))
@classmethod
def get_users_pending_sent(cls, user_id):
return cls.query.filter_by(second_id=user_id, pending=False).all()
def delete(self):
db.session.remove(self)
db.session.commit()
def save(self):
db.session.add(self)
db.session.commit()
| 38.44
| 84
| 0.659729
|
27c527f7ac09589c09a07bca251a665d95f34eee
| 16,292
|
py
|
Python
|
infra/python/bootstrap_virtualenv.py
|
garyli1019/impala
|
ea0e1def6160d596082b01365fcbbb6e24afb21d
|
[
"Apache-2.0"
] | 1,523
|
2015-01-01T03:42:24.000Z
|
2022-02-06T22:24:04.000Z
|
infra/python/bootstrap_virtualenv.py
|
garyli1019/impala
|
ea0e1def6160d596082b01365fcbbb6e24afb21d
|
[
"Apache-2.0"
] | 10
|
2015-01-09T06:46:05.000Z
|
2022-03-29T21:57:57.000Z
|
infra/python/bootstrap_virtualenv.py
|
garyli1019/impala
|
ea0e1def6160d596082b01365fcbbb6e24afb21d
|
[
"Apache-2.0"
] | 647
|
2015-01-02T04:01:40.000Z
|
2022-03-30T15:57:35.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# This module will create a python virtual env and install external dependencies. If the
# virtualenv already exists and it contains all the expected packages, nothing is done.
#
# A multi-step bootstrapping process is required to build and install all of the
# dependencies:
# 1. install basic non-C/C++ packages into the virtualenv
# 1b. install packages that depend on step 1 but cannot be installed together with their
# dependencies
# 2. use the virtualenv Python to bootstrap the toolchain
# 3. use toolchain gcc to build C/C++ packages
# 4. build the kudu-python package with toolchain gcc and Cython
#
# Every time this script is run, it completes as many of the bootstrapping steps as
# possible with the available dependencies.
#
# This module can be run with python >= 2.4 but python >= 2.6 must be installed on the
# system. If the default 'python' command refers to < 2.6, python 2.6 will be used
# instead.
from __future__ import print_function
import glob
import logging
import optparse
import os
import shutil
import subprocess
import sys
import tarfile
import tempfile
import textwrap
import urllib
LOG = logging.getLogger(os.path.splitext(os.path.basename(__file__))[0])
DEPS_DIR = os.path.join(os.path.dirname(__file__), "deps")
ENV_DIR = os.path.join(os.path.dirname(__file__), "env")
# Requirements file with packages we need for our build and tests.
REQS_PATH = os.path.join(DEPS_DIR, "requirements.txt")
# Second stage of requirements which cannot be installed together with their dependencies
# in requirements.txt.
REQS2_PATH = os.path.join(DEPS_DIR, "stage2-requirements.txt")
# Requirements for the next bootstrapping step that builds compiled requirements
# with toolchain gcc.
COMPILED_REQS_PATH = os.path.join(DEPS_DIR, "compiled-requirements.txt")
# Requirements for the Kudu bootstrapping step, which depends on Cython being installed
# by the compiled requirements step.
KUDU_REQS_PATH = os.path.join(DEPS_DIR, "kudu-requirements.txt")
# Requirements for the ADLS test client step, which depends on Cffi (C Foreign Function
# Interface) being installed by the compiled requirements step.
ADLS_REQS_PATH = os.path.join(DEPS_DIR, "adls-requirements.txt")
def delete_virtualenv_if_exist():
if os.path.exists(ENV_DIR):
shutil.rmtree(ENV_DIR)
def create_virtualenv():
LOG.info("Creating python virtualenv")
build_dir = tempfile.mkdtemp()
file = tarfile.open(find_file(DEPS_DIR, "virtualenv*.tar.gz"), "r:gz")
for member in file.getmembers():
file.extract(member, build_dir)
file.close()
python_cmd = detect_python_cmd()
exec_cmd([python_cmd, find_file(build_dir, "virtualenv*", "virtualenv.py"), "--quiet",
"--python", python_cmd, ENV_DIR])
shutil.rmtree(build_dir)
def exec_cmd(args, **kwargs):
'''Executes a command and waits for it to finish, raises an exception if the return
status is not zero. The command output is returned.
'args' and 'kwargs' use the same format as subprocess.Popen().
'''
process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
**kwargs)
output = process.communicate()[0]
if process.returncode != 0:
raise Exception("Command returned non-zero status\nCommand: %s\nOutput: %s"
% (args, output))
return output
def use_ccache():
'''Returns true if ccache is available and should be used'''
if 'DISABLE_CCACHE' in os.environ: return False
try:
exec_cmd(['ccache', '-V'])
return True
except:
return False
def select_cc():
'''Return the C compiler command that should be used as a string or None if the
compiler is not available '''
# Use toolchain gcc for ABI compatibility with other toolchain packages, e.g.
# Kudu/kudu-python
if not have_toolchain(): return None
toolchain_gcc_dir = toolchain_pkg_dir("gcc")
cc = os.path.join(toolchain_gcc_dir, "bin/gcc")
if not os.path.exists(cc): return None
if use_ccache(): cc = "ccache %s" % cc
return cc
def exec_pip_install(args, cc="no-cc-available", env=None):
'''Executes "pip install" with the provided command line arguments. If 'cc' is set,
it is used as the C compiler. Otherwise compilation of C/C++ code is disabled by
setting the CC environment variable to a bogus value.
Other environment vars can optionally be set with the 'env' argument. By default the
current process's command line arguments are inherited.'''
if not env: env = dict(os.environ)
env["CC"] = cc
# Parallelize the slow numpy build.
# Use getconf instead of nproc because it is supported more widely, e.g. on older
# linux distributions.
env["NPY_NUM_BUILD_JOBS"] = exec_cmd(["getconf", "_NPROCESSORS_ONLN"]).strip()
# Don't call the virtualenv pip directly, it uses a hashbang to to call the python
# virtualenv using an absolute path. If the path to the virtualenv is very long, the
# hashbang won't work.
impala_pip_base_cmd = [os.path.join(ENV_DIR, "bin", "python"),
os.path.join(ENV_DIR, "bin", "pip"), "install", "-v"]
# Passes --no-binary for IMPALA-3767: without this, Cython (and
# several other packages) fail download.
#
# --no-cache-dir is used to prevent caching of compiled artifacts, which may be built
# with different compilers or settings.
third_party_pkg_install_cmd = \
impala_pip_base_cmd[:] + ["--no-binary", ":all:", "--no-cache-dir"]
# When using a custom mirror, we also must use the index of that mirror.
if "PYPI_MIRROR" in os.environ:
third_party_pkg_install_cmd.extend(["--index-url",
"%s/simple" % os.environ["PYPI_MIRROR"]])
else:
# Prevent fetching additional packages from the index. If we forget to add a package
# to one of the requirements.txt files, this should trigger an error. However, we will
# still access the index for version/dependency resolution, hence we need to change it
# when using a private mirror.
third_party_pkg_install_cmd.append("--no-index")
third_party_pkg_install_cmd.extend(["--find-links",
"file://%s" % urllib.pathname2url(os.path.abspath(DEPS_DIR))])
third_party_pkg_install_cmd.extend(args)
exec_cmd(third_party_pkg_install_cmd, env=env)
# Finally, we want to install the packages from our own internal python lib
local_package_install_cmd = impala_pip_base_cmd + \
['-e', os.path.join(os.getenv('IMPALA_HOME'), 'lib', 'python')]
exec_cmd(local_package_install_cmd)
def find_file(*paths):
'''Returns the path specified by the glob 'paths', raises an exception if no file is
found.
Ex: find_file('/etc', 'h*sts') --> /etc/hosts
'''
path = os.path.join(*paths)
files = glob.glob(path)
if len(files) > 1:
raise Exception("Found too many files at %s: %s" % (path, files))
if len(files) == 0:
raise Exception("No file found at %s" % path)
return files[0]
def detect_python_cmd():
'''Returns the system command that provides python 2.6 or greater.'''
paths = os.getenv("PATH").split(os.path.pathsep)
for cmd in ("python", "python27", "python2.7", "python-27", "python-2.7", "python26",
"python2.6", "python-26", "python-2.6"):
for path in paths:
cmd_path = os.path.join(path, cmd)
if not os.path.exists(cmd_path) or not os.access(cmd_path, os.X_OK):
continue
exit = subprocess.call([cmd_path, "-c", textwrap.dedent("""
import sys
sys.exit(int(sys.version_info[:2] < (2, 6)))""")])
if exit == 0:
return cmd_path
raise Exception("Could not find minimum required python version 2.6")
def install_deps():
LOG.info("Installing packages into the virtualenv")
exec_pip_install(["-r", REQS_PATH])
mark_reqs_installed(REQS_PATH)
LOG.info("Installing stage 2 packages into the virtualenv")
exec_pip_install(["-r", REQS2_PATH])
mark_reqs_installed(REQS2_PATH)
def have_toolchain():
'''Return true if the Impala toolchain is available'''
return "IMPALA_TOOLCHAIN" in os.environ
def toolchain_pkg_dir(pkg_name):
'''Return the path to the toolchain package'''
pkg_version = os.environ["IMPALA_" + pkg_name.upper() + "_VERSION"]
return os.path.join(os.environ["IMPALA_TOOLCHAIN"], pkg_name + "-" + pkg_version)
def install_compiled_deps_if_possible():
'''Install dependencies that require compilation with toolchain GCC, if the toolchain
is available. Returns true if the deps are installed'''
if reqs_are_installed(COMPILED_REQS_PATH):
LOG.debug("Skipping compiled deps: matching compiled-installed-requirements.txt found")
return True
cc = select_cc()
if cc is None:
LOG.debug("Skipping compiled deps: cc not available yet")
return False
env = dict(os.environ)
# Compilation of pycrypto fails on CentOS 5 with newer GCC versions because of a
# problem with inline declarations in older libc headers. Setting -fgnu89-inline is a
# workaround.
distro_version = ''.join(exec_cmd(["lsb_release", "-irs"]).lower().split())
print(distro_version)
if distro_version.startswith("centos5."):
env["CFLAGS"] = "-fgnu89-inline"
LOG.info("Installing compiled requirements into the virtualenv")
exec_pip_install(["-r", COMPILED_REQS_PATH], cc=cc, env=env)
mark_reqs_installed(COMPILED_REQS_PATH)
return True
def install_adls_deps():
# The ADLS dependencies require that the OS is at least CentOS 6.7 or above,
# which is why we break this into a seperate step. If the target filesystem is
# ADLS, the expectation is that the dev environment is running at least CentOS 6.7.
if os.environ.get('TARGET_FILESYSTEM') == "adls":
if reqs_are_installed(ADLS_REQS_PATH):
LOG.debug("Skipping ADLS deps: matching adls-installed-requirements.txt found")
return True
cc = select_cc()
assert cc is not None
LOG.info("Installing ADLS packages into the virtualenv")
exec_pip_install(["-r", ADLS_REQS_PATH], cc=cc)
mark_reqs_installed(ADLS_REQS_PATH)
def install_kudu_client_if_possible():
'''Installs the Kudu python module if possible, which depends on the toolchain and
the compiled requirements in compiled-requirements.txt. If the toolchain isn't
available, nothing will be done. Also nothing will be done if the Kudu client lib
required by the module isn't available (as determined by KUDU_IS_SUPPORTED)'''
if reqs_are_installed(KUDU_REQS_PATH):
LOG.debug("Skipping Kudu: matching kudu-installed-requirements.txt found")
return
if os.environ["KUDU_IS_SUPPORTED"] != "true":
LOG.debug("Skipping Kudu: Kudu is not supported")
return
kudu_base_dir = os.environ["IMPALA_KUDU_HOME"]
if not os.path.exists(kudu_base_dir):
LOG.debug("Skipping Kudu: %s doesn't exist" % kudu_base_dir)
return
LOG.info("Installing Kudu into the virtualenv")
# The installation requires that KUDU_HOME/build/latest exists. An empty directory
# structure will be made to satisfy that. The Kudu client headers and lib will be made
# available through GCC environment variables.
fake_kudu_build_dir = os.path.join(tempfile.gettempdir(), "virtualenv-kudu")
try:
artifact_dir = os.path.join(fake_kudu_build_dir, "build", "latest")
if not os.path.exists(artifact_dir):
os.makedirs(artifact_dir)
cc = select_cc()
assert cc is not None
env = dict(os.environ)
env["KUDU_HOME"] = fake_kudu_build_dir
kudu_client_dir = find_kudu_client_install_dir()
env["CPLUS_INCLUDE_PATH"] = os.path.join(kudu_client_dir, "include")
env["LIBRARY_PATH"] = os.path.pathsep.join([os.path.join(kudu_client_dir, 'lib'),
os.path.join(kudu_client_dir, 'lib64')])
exec_pip_install(["-r", KUDU_REQS_PATH], cc=cc, env=env)
mark_reqs_installed(KUDU_REQS_PATH)
finally:
try:
shutil.rmtree(fake_kudu_build_dir)
except Exception:
LOG.debug("Error removing temp Kudu build dir", exc_info=True)
def find_kudu_client_install_dir():
custom_client_dir = os.environ["KUDU_CLIENT_DIR"]
if custom_client_dir:
install_dir = os.path.join(custom_client_dir, "usr", "local")
error_if_kudu_client_not_found(install_dir)
else:
# If the toolchain appears to have been setup already, then the Kudu client is
# required to exist. It's possible that the toolchain won't be setup yet though
# since the toolchain bootstrap script depends on the virtualenv.
kudu_base_dir = os.environ["IMPALA_KUDU_HOME"]
install_dir = os.path.join(kudu_base_dir, "debug")
if os.path.exists(kudu_base_dir):
error_if_kudu_client_not_found(install_dir)
return install_dir
def error_if_kudu_client_not_found(install_dir):
header_path = os.path.join(install_dir, "include", "kudu", "client", "client.h")
if not os.path.exists(header_path):
raise Exception("Kudu client header not found at %s" % header_path)
kudu_client_lib = "libkudu_client.so"
lib_dir = os.path.join(install_dir, "lib64")
if not os.path.exists(lib_dir):
lib_dir = os.path.join(install_dir, "lib")
for _, _, files in os.walk(lib_dir):
for file in files:
if file == kudu_client_lib:
return
raise Exception("%s not found at %s" % (kudu_client_lib, lib_dir))
def mark_reqs_installed(reqs_path):
'''Mark that the requirements from the given file are installed by copying it into the root
directory of the virtualenv.'''
installed_reqs_path = os.path.join(ENV_DIR, os.path.basename(reqs_path))
shutil.copyfile(reqs_path, installed_reqs_path)
def reqs_are_installed(reqs_path):
'''Check if the requirements from the given file are installed in the virtualenv by
looking for a matching requirements file in the root directory of the virtualenv.'''
installed_reqs_path = os.path.join(ENV_DIR, os.path.basename(reqs_path))
if not os.path.exists(installed_reqs_path):
return False
installed_reqs_file = open(installed_reqs_path)
try:
reqs_file = open(reqs_path)
try:
if reqs_file.read() == installed_reqs_file.read():
return True
else:
LOG.debug("Virtualenv upgrade needed")
return False
finally:
reqs_file.close()
finally:
installed_reqs_file.close()
def setup_virtualenv_if_not_exists():
if not (reqs_are_installed(REQS_PATH) and reqs_are_installed(REQS2_PATH)):
delete_virtualenv_if_exist()
create_virtualenv()
install_deps()
LOG.debug("Virtualenv setup complete")
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-l", "--log-level", default="INFO",
choices=("DEBUG", "INFO", "WARN", "ERROR"))
parser.add_option("-r", "--rebuild", action="store_true", help="Force a rebuild of"
" the virtualenv even if it exists and appears to be completely up-to-date.")
parser.add_option("--print-ld-library-path", action="store_true", help="Print the"
" LD_LIBRARY_PATH that should be used when running python from the virtualenv.")
options, args = parser.parse_args()
if options.print_ld_library_path:
kudu_client_dir = find_kudu_client_install_dir()
print(os.path.pathsep.join([os.path.join(kudu_client_dir, 'lib'),
os.path.join(kudu_client_dir, 'lib64')]))
sys.exit()
logging.basicConfig(level=getattr(logging, options.log_level))
if options.rebuild:
delete_virtualenv_if_exist()
# Complete as many bootstrap steps as possible (see file comment for the steps).
setup_virtualenv_if_not_exists()
if install_compiled_deps_if_possible():
install_kudu_client_if_possible()
install_adls_deps()
| 40.934673
| 93
| 0.72465
|
7a4d963a1bd5024ebd517cd690ec54567de953a1
| 776
|
py
|
Python
|
vendor-local/lib/python/easy_thumbnails/storage.py
|
Koenkk/popcorn_maker
|
0978b9f98dacd4e8eb753404b24eb584f410aa11
|
[
"BSD-3-Clause"
] | 15
|
2015-03-23T02:55:20.000Z
|
2021-01-12T12:42:30.000Z
|
vendor-local/lib/python/easy_thumbnails/storage.py
|
Koenkk/popcorn_maker
|
0978b9f98dacd4e8eb753404b24eb584f410aa11
|
[
"BSD-3-Clause"
] | null | null | null |
vendor-local/lib/python/easy_thumbnails/storage.py
|
Koenkk/popcorn_maker
|
0978b9f98dacd4e8eb753404b24eb584f410aa11
|
[
"BSD-3-Clause"
] | 16
|
2015-02-18T21:43:31.000Z
|
2021-11-09T22:50:03.000Z
|
from django.core.files.storage import FileSystemStorage
from easy_thumbnails.conf import settings
class ThumbnailFileSystemStorage(FileSystemStorage):
"""
Standard file system storage.
The default ``location`` and ``base_url`` are set to
``THUMBNAIL_MEDIA_ROOT`` and ``THUMBNAIL_MEDIA_URL``, falling back to the
standard ``MEDIA_ROOT`` and ``MEDIA_URL`` if the custom settings are blank.
"""
def __init__(self, location=None, base_url=None, *args, **kwargs):
if location is None:
location = settings.THUMBNAIL_MEDIA_ROOT or None
if base_url is None:
base_url = settings.THUMBNAIL_MEDIA_URL or None
super(ThumbnailFileSystemStorage, self).__init__(location, base_url,
*args, **kwargs)
| 36.952381
| 79
| 0.701031
|
f588a580a345f0d8496758e6e5d4c5d9a89adb18
| 13,509
|
py
|
Python
|
pyefun/commonlyUtil.py
|
nobodxbodon/pyefun
|
a713e499ed0f1678c836a68320616423e6f59c1e
|
[
"Apache-2.0"
] | null | null | null |
pyefun/commonlyUtil.py
|
nobodxbodon/pyefun
|
a713e499ed0f1678c836a68320616423e6f59c1e
|
[
"Apache-2.0"
] | null | null | null |
pyefun/commonlyUtil.py
|
nobodxbodon/pyefun
|
a713e499ed0f1678c836a68320616423e6f59c1e
|
[
"Apache-2.0"
] | null | null | null |
import random
import ubelt as ub
from collections import OrderedDict
import operator
from .dirkBase import *
import hashlib
def 取sha1(data, 哈希算法='sha1'):
return ub.hash_data(data, hasher=哈希算法)
def 取md5(内容, 编码="utf-8"):
MD5 = hashlib.md5()
MD5.update(内容.encode(encoding=编码))
return MD5.hexdigest()
def 取哈希(data, 哈希算法='sha1'):
return ub.hash_data(data, hasher=哈希算法)
def 取文件哈希(文件路径, 哈希算法='sha1'):
return ub.hash_file(文件路径, hasher=哈希算法, base='hex')
def 运行命令(str,
运行目录=None,
环境变量=None,
显示信息=0,
后台运行=False,
shell=False
):
info = ub.cmd(str,
cwd=运行目录,
env=环境变量,
verbose=显示信息,
detach=后台运行,
shell=shell,
)
return (info['out'])
def 取缓存目录(name):
"""
# The resource root directory is
# ~/AppData/Roaming on Windows,
# ~/.config on Linux and ~/Library/Application Support on Mac.
# The cache root directory is ~/AppData/Local on Windows,
# ~/.config on Linux and ~/Library/Caches on Mac.
"""
return ub.shrinkuser(ub.ensure_app_cache_dir(name))
def 下载文件(url, 保存文件路径=None):
fpath = ub.download(url, fpath=保存文件路径, verbose=0)
return ub.shrinkuser(fpath)
def 下载文件缓存(url, 保存文件路径=None):
fpath = ub.grabdata(url, fpath=保存文件路径, verbose=0)
return ub.shrinkuser(fpath)
def 字典_取最小值(indexable, key=None):
"""
# assert argmin({'a': 3, 'b': 2, 'c': 100}) == 'b'
# assert argmin(['a', 'c', 'b', 'z', 'f']) == 0
# assert argmin([[0, 1], [2, 3, 4], [5]], key=len) == 2
# assert argmin({'a': 3, 'b': 2, 3: 100, 4: 4}) == 'b'
# assert argmin(iter(['a', 'c', 'A', 'z', 'f'])) == 2
"""
return ub.argmin(indexable, key)
def 字典_分组(数组, 类别数组):
groups = ub.group_items(数组, 类别数组)
return groups
def 字典_统计(数组, weights=None, ordered=False, labels=None):
data = ub.dict_hist(数组, weights, ordered, labels)
return data
def 字典_取子集(数组, key, default=ub.util_const.NoParam, cls=OrderedDict):
data = ub.dict_subset(数组, key, default, cls)
return data
def 字典_取值(字典, key, default=None):
return list(ub.dict_take(字典, key, default))
def 字典_合并(*args):
# 字典_取值({'a': 1, 'b': 1}, {'b': 2, 'c': 2})
data = ub.dict_union(*args)
return data
def 字典_差集(*args):
# 字典_差集({'a': 1, 'b': 1}, {'a'}, {'c'})
data = ub.dict_diff(*args)
return data
def 字典_根据值重建(func, dict_):
data = ub.map_vals(func, dict_)
return data
def 字典_根据健重建(func, dict_):
data = ub.map_keys(func, dict_)
return data
def 字典_根据值排序(dict_, key=None, reverse=False):
# dict_ = {'spam': 2.62, 'eggs': 1.20, 'jam': 2.92}
# newdict = 字典_排序(dict_)
# print(ub.repr2(newdict, nl=0))
# {'eggs': 1.2, 'spam': 2.62, 'jam': 2.92}
# newdict = 字典_排序(dict_, reverse=True)
# print(ub.repr2(newdict, nl=0))
# {'jam': 2.92, 'spam': 2.62, 'eggs': 1.2}
# newdict = 字典_排序(dict_, key=lambda x: x % 1.6)
# print(ub.repr2(newdict, nl=0))
# {'spam': 2.62, 'eggs': 1.2, 'jam': 2.92}
data = ub.sorted_vals(dict_, key, reverse)
return data
def 字典_根据键排序(dict_, key=None, reverse=False):
# import ubelt as ub
# dict_ = {'spam': 2.62, 'eggs': 1.20, 'jam': 2.92}
# newdict = sorted_keys(dict_)
# print(ub.repr2(newdict, nl=0))
# {'eggs': 1.2, 'jam': 2.92, 'spam': 2.62}
# newdict = sorted_keys(dict_, reverse=True)
# print(ub.repr2(newdict, nl=0))
# {'spam': 2.62, 'jam': 2.92, 'eggs': 1.2}
# newdict = sorted_keys(dict_, key=lambda x: sum(map(ord, x)))
# print(ub.repr2(newdict, nl=0))
# {'jam': 2.92, 'eggs': 1.2, 'spam': 2.62}
data = ub.sorted_keys(dict_, key, reverse)
return data
def 字典_交换健值(dict_, 唯一值=True):
data = ub.invert_dict(dict_, unique_vals=唯一值)
return data
def 字典_查找重复项(items, 至少出现=2, key=None):
data = ub.util_dict.find_duplicates(items, 至少出现, key)
return data
def 灵活字典():
return ub.AutoDict()
def 灵活有序字典():
return ub.AutoOrderedDict()
def 数组_合并为字典(items1, items2, cls=dict):
return ub.dzip(items1, items2, cls)
def 导入包_从路径(路径):
return ub.import_module_from_path(路径)
def 创建连接(文件路径, 目标路径, 覆盖=False, 显示信息=0):
return ub.symlink(文件路径, 目标路径, 覆盖, 显示信息)
def 数组_查找重复项(items, 至少出现=2):
data = ub.find_duplicates(items, k=至少出现)
return data
def 数组_随机排序(items):
return random.shuffle(items)
def 命令行_获取参数(参数名, 默认值=ub.util_const.NoParam, argv=None):
return ub.argval(参数名, 默认值, argv)
def 命令行_是否存在参数(参数名, argv=None):
return ub.argflag(参数名, argv)
def 内存缓存(func):
# 缓存函数结果
return ub.memoize(func)
def 内存缓存方法(func):
# 缓存方法函数结果
return ub.memoize_method(func)
def 内存缓存属性(func):
# 只会执行1次不可变更
return ub.memoize_property(func)
def 路径_名字处理(路径, 末尾='', 前缀='', 扩展名=None, 名称=None, dpath=None,
relative=None, 不包含点=False):
return ub.augpath(路径, 末尾, 前缀, 扩展名, 名称, dpath, relative, 不包含点)
def 路径_取用户目录(用户名=None):
# 返回某个用户主目录的路径。
return ub.userhome(用户名)
def 路径_替换为用户路径(path, home='~'):
# 返回某个用户主目录的路径。
return ub.shrinkuser(path, home)
def 路径_扩展路径(path):
return ub.expandpath(path)
def 路径_优化路径(path):
return ub.util_path.normpath(path)
def 目录_创建(路径, 权限=0o1777, 显示信息=None, 重建=False):
return ub.ensuredir(路径, 权限, 显示信息, 重建)
class 临时目录(ub.TempDir):
"""
用于创建和清理临时目录的上下文。
#Example:
# with 临时目录() as self:
# dpath = self.dpath
# print(dpath)
# assert 文件是否存在(dpath)
# assert not 文件是否存在(dpath)
#Example:
# self = 临时目录()
# dpath = self.ensure()
# assert exists(dpath)
# self.cleanup()
# assert not exists(dpath)
"""
def 初始化(self):
return self.ensure()
def 清理(self):
self.cleanup()
def 取路径(self):
return self.dpath
def 系统_取用户数据目录():
return ub.platform_data_dir()
def 系统_取配置目录():
return ub.platform_config_dir()
def 系统_缓存目录():
return ub.platform_cache_dir()
def 系统_设置应用数据目录(appname, *args):
return ub.ensure_app_data_dir(appname, *args)
def 系统_取应用配置目录(appname, *args):
return ub.get_app_config_dir(appname, *args)
def 系统_设置应用配置目录(appname, *args):
return ub.ensure_app_config_dir(appname, *args)
def 系统_取应用缓存目录(appname, *args):
return ub.get_app_cache_dir(appname, *args)
def 系统_设置应用缓存目录(appname, *args):
return ub.ensure_app_cache_dir(appname, *args)
def 查找可执行文件(名称, 匹配所有=False, 路径=None):
"""
# 查找可执行文件('ls')
# 查找可执行文件('ping')
# assert 查找可执行文件('which') == 查找可执行文件(查找可执行文件('which'))
# 查找可执行文件('which', 匹配所有=True)
# 查找可执行文件('ping', 匹配所有=True)
# 查找可执行文件('cmake', 匹配所有=True)
# 查找可执行文件('nvcc', 匹配所有=True)
# 查找可执行文件('noexist', 匹配所有=True)
"""
return ub.find_exe(名称, 匹配所有, 路径)
def 查找文件或目录(名称, 路径=None, 精确=False):
"""
# list(查找文件或目录('ping', exact=True))
# list(查找文件或目录('bin'))
# list(查找文件或目录('bin'))
# list(查找文件或目录('*cc*'))
# list(查找文件或目录('cmake*'))
"""
return ub.find_path(名称, 路径, 精确)
def 文本_缩进(文本, 前缀=' '):
return ub.util_str.indent(文本, 前缀)
def 文本_代码块(文本):
return ub.util_str.codeblock(文本)
def 文本_段落(文本):
return ub.util_str.paragraph(文本)
def 文本_水平合并(args, sep=''):
"""
# import ubelt as ub
# B = ub.repr2([[1, 2], [3, 457]], nl=1, cbr=True, trailsep=False)
# C = ub.repr2([[5, 6], [7, 8]], nl=1, cbr=True, trailsep=False)
# args = ['A = ', B, ' * ', C]
# print(文本_水平合并(args))
# A = [[1, 2], * [[5, 6],
# [3, 457]] [7, 8]]
"""
return ub.util_str.hzcat(args, sep)
def 文本_转unicode(str):
return ub.ensure_unicode(str)
class 控制台(ub.CaptureStdout):
"""
控制台操作
"""
def __init__(self, 获取内容=True, 是否启用=True):
super().__init__(supress=获取内容, enabled=是否启用)
def 停止(self):
self.stop()
def 开始(self):
self.start()
def 获取内容(self):
return self.text.strip()
class 分块(ub.chunks):
pass
def __init__(self, items, 分块数量=None, 创建块数=None, 创建数量=None,
边界模式='none'):
"""
# 边界模式(str)–确定输入的长度不能被块大小整除的最后一种情况,
# 有效值为:{'none','cycle','replicate'}
"""
super().__init__(
items=items,
chunksize=分块数量,
nchunks=创建块数,
total=创建数量,
bordermode=边界模式,
)
def 数组_索引取值(items, indices, default=ub.util_const.NoParam):
return ub.util_list.take(items, indices, default)
def 数组_逻辑取值(items, flags):
return ub.util_list.compress(items, flags)
def 数组_转平面(items):
return ub.util_list.flatten(items)
def 数组_去重复(items, key=None):
"""
# import ubelt as ub
# import six
# items = ['A', 'a', 'b', 'B', 'C', 'c', 'D', 'e', 'D', 'E']
# unique_items = list(ub.unique(items, key=six.text_type.lower))
# assert unique_items == ['A', 'b', 'C', 'D', 'e']
# unique_items = list(ub.unique(items))
# assert unique_items == ['A', 'a', 'b', 'B', 'C', 'c', 'D', 'e', 'E']
"""
return ub.util_list.unique(items, key)
def 数组_取唯一值的索引(items, key=None):
"""
# import ubelt as ub
# import six
# items = ['A', 'a', 'b', 'B', 'C', 'c', 'D', 'e', 'D', 'E']
# unique_items = list(ub.unique(items, key=six.text_type.lower))
# assert unique_items == ['A', 'b', 'C', 'D', 'e']
# unique_items = list(ub.unique(items))
# assert unique_items == ['A', 'a', 'b', 'B', 'C', 'c', 'D', 'e', 'E']
"""
return ub.util_list.argunique(items, key)
def 数组_取唯一值的逻辑值(items, key=None):
"""
# import ubelt as ub
# import six
# items = ['A', 'a', 'b', 'B', 'C', 'c', 'D', 'e', 'D', 'E']
# unique_items = list(ub.unique(items, key=six.text_type.lower))
# assert unique_items == ['A', 'b', 'C', 'D', 'e']
# unique_items = list(ub.unique(items))
# assert unique_items == ['A', 'a', 'b', 'B', 'C', 'c', 'D', 'e', 'E']
"""
return ub.util_list.unique_flags(items, key)
def 数组_构建逻辑值列表(indices, maxval=None):
"""
# import ubelt as ub
#indices = [0, 1, 4]
#mask = ub.boolmask(indices, maxval=6)
#assert mask == [True, True, False, False, True, False]
#mask = ub.boolmask(indices)
#assert mask == [True, True, False, False, True]
"""
return ub.util_list.boolmask(indices, maxval)
def 数组_是否全部相同(iterable, eq=operator.eq):
"""
#allsame([1, 1, 1, 1])
# True
#allsame([])
# True
#allsame([0, 1])
# False
#iterable = iter([0, 1, 1, 1])
#next(iterable)
#allsame(iterable)
# True
#allsame(range(10))
# False
#allsame(range(10), lambda a, b: True)
# True
"""
return ub.allsame(iterable, eq)
def 数组_排序索引(indexable, key=None, reverse=False):
return ub.argsort(indexable, key, reverse)
def 数组_取最小值(indexable, key=None):
"""
# assert argmin({'a': 3, 'b': 2, 'c': 100}) == 'b'
# assert argmin(['a', 'c', 'b', 'z', 'f']) == 0
# assert argmin([[0, 1], [2, 3, 4], [5]], key=len) == 2
# assert argmin({'a': 3, 'b': 2, 3: 100, 4: 4}) == 'b'
# assert argmin(iter(['a', 'c', 'A', 'z', 'f'])) == 2
"""
return ub.argmin(indexable, key)
def 数组_弹出(iterable):
return ub.peek(iterable)
def 取执行文件名():
"""
调用格式: 〈文本型〉 取执行文件名 () - 系统核心支持库->环境存取
英文名称:GetRunFileName
取当前被执行的易程序文件的名称。本命令为初级命令。
操作系统需求: Windows
:return: commonlyUtil.py
"""
return 文件_取文件名(__file__)
def 读环境变量(环境变量名称: str) -> str:
"""
调用格式: 〈文本型〉 读环境变量 (文本型 环境变量名称) - 系统核心支持库->环境存取
英文名称:GetEnv
返回文本,它关连于一个操作系统环境变量。成功时返回所取得的值,失败则返回空文本。本命令为初级命令。
参数<1>的名称为“环境变量名称”,类型为“文本型(text)”。
操作系统需求: Windows、Linux
"""
return os.environ.get(环境变量名称)
def 写环境变量(环境变量名称: str, 欲写入内容: str) -> bool:
"""
调用格式: 〈逻辑型〉 写环境变量 (文本型 环境变量名称,文本型 欲写入内容) - 系统核心支持库->环境存取
英文名称:PutEnv
修改或建立指定的操作系统环境变量。成功返回真,失败返回假。本命令为初级命令。
参数<1>的名称为“环境变量名称”,类型为“文本型(text)”。
参数<2>的名称为“欲写入内容”,类型为“文本型(text)”。
操作系统需求: Windows、Linux
"""
return os.environ.setdefault(环境变量名称, 欲写入内容)
import socket
def 取主机名():
"""
调用格式: 〈文本型〉 取主机名 () - 系统核心支持库->网络通信
英文名称:GetHostName
返回本机的主机名,用作在网络通讯中标志本机地址。本命令为初级命令。
操作系统需求: Windows
:return:
"""
return socket.gethostname()
def 转换为IP地址(欲转换主机名):
"""
调用格式: 〈文本型〉 转换为IP地址 (文本型 欲转换主机名) - 系统核心支持库->网络通信
英文名称:HostNameToIP
将指定的主机名转换为其 IP 地址。如果失败返回空文本。本命令为初级命令。
参数<1>的名称为“欲转换主机名”,类型为“文本型(text)”。
操作系统需求: Windows
"""
return socket.gethostbyname(欲转换主机名)
def 标准输入():
"""
调用格式: 〈文本型〉 标准输入 ([逻辑型 是否回显]) - 系统核心支持库->控制台操作
英文名称:fgets
在标准输入设备上请求输入最多包含2048个字符的一行文本,返回用户所输入的内容。注意本命令只能在控制台程序中使用。本命令为初级命令。
参数<1>的名称为“是否回显”,类型为“逻辑型(bool)”,可以被省略。本参数决定输入时是否显示所输入字符,为假不显示,为真显示。如果被省略,默认值为真,即回显。可以通过将本参数设置为假以输入密码等特殊信息。
操作系统需求: Windows、Linux
"""
return input()
def 标准输出(*args):
"""
调用格式: 〈无返回值〉 标准输出 ([整数型 输出方向],通用型 欲输出内容,... ) - 系统核心支持库->控制台操作
英文名称:fputs
在标准输出设备或标准错误设备上输出指定的内容,注意本命令只能在控制台程序中使用。本命令为初级命令。命令参数表中最后一个参数可以被重复添加。
参数<1>的名称为“输出方向”,类型为“整数型(int)”,可以被省略。本参数提供内容所输出到的设备,可以为以下常量值之一: 1、#标准输出设备; 2、#标准错误设备。如果省略本参数,默认为“#标准输出设备”。
参数<2>的名称为“欲输出内容”,类型为“通用型(all)”。本参数只能为文本、数值、逻辑值或日期时间。如果内容为文本且包含多行,可在各行之间用回车符 (即“字符 (13)”)、换行符 (即“字符 (10)”) 或回车换行符的组合 (即:“字符 (13) + 字符 (10)”) 来分隔。
操作系统需求: Windows、Linux
"""
return print(*args)
def 结束(*args, **kwargs):
"""
调用格式: 〈无返回值〉 结束 () - 系统核心支持库->流程控制
英文名称:end
本命令结束当前易程序的运行。本命令为初级命令。
操作系统需求: Windows、Linux、Unix
"""
return exit(args, **kwargs)
| 21.649038
| 148
| 0.585536
|
ed1d80b9513c4abea76c324ec1c2536591b94f01
| 1,459
|
py
|
Python
|
tests/apollo/util/bft_test_exceptions.py
|
definitelyNotFBI/utt
|
1695e3a1f81848e19b042cdc4db9cf1d263c26a9
|
[
"Apache-2.0"
] | 340
|
2018-08-27T16:30:45.000Z
|
2022-03-28T14:31:44.000Z
|
tests/apollo/util/bft_test_exceptions.py
|
definitelyNotFBI/utt
|
1695e3a1f81848e19b042cdc4db9cf1d263c26a9
|
[
"Apache-2.0"
] | 706
|
2018-09-02T17:50:32.000Z
|
2022-03-31T13:03:15.000Z
|
tests/apollo/util/bft_test_exceptions.py
|
glevkovich/concord-bft
|
a1b7b57472f5375230428d16c613a760b33233fa
|
[
"Apache-2.0"
] | 153
|
2018-08-29T05:37:25.000Z
|
2022-03-23T14:08:45.000Z
|
# Concord
#
# Copyright (c) 2019 VMware, Inc. All Rights Reserved.
#
# This product is licensed to you under the Apache 2.0 license (the "License").
# You may not use this product except in compliance with the Apache 2.0 License.
#
# This product may include a number of subcomponents with separate copyright
# notices and license terms. Your use of these subcomponents is subject to the
# terms and conditions of the subcomponent's license, as noted in the LICENSE
# file.
class Error(Exception):
"""Base class for exceptions in this module."""
pass
##
## Exceptions for bft_tester
##
class AlreadyRunningError(Error):
def __init__(self, replica):
self.replica = replica
def __repr__(self):
return (f'{self.__class__.__name__}:\n'
f' replica={self.replica}\n')
class AlreadyStoppedError(Error):
def __init__(self, replica):
self.replica = replica
def __repr__(self):
return (f'{self.__class__.__name__}:\n'
f' replica={self.replica}\n')
class BadReplyError(Error):
def __init__(self):
pass
def __repr__(self):
return f'{self.__class__.__name__}\n'
class KeyExchangeError(Error):
def __init__(self):
pass
def __repr__(self):
return f'{self.__class__.__name__}\n'
class CreError(Error):
def __init__(self, msg):
self.msg = msg
def __repr__(self):
return f'{self.__class__.__name__, self.msg}\n'
| 25.155172
| 80
| 0.671693
|
4835d3815895ffa24153cc4bf97a29e24f0c86b5
| 542
|
py
|
Python
|
common/src/stack/command/stack/commands/remove/host/plugin_attr.py
|
khanfluence/stacki-cumulus-switch
|
df54afb20f6ea6a3a136b3c09b30df54ea79ffcc
|
[
"BSD-3-Clause"
] | null | null | null |
common/src/stack/command/stack/commands/remove/host/plugin_attr.py
|
khanfluence/stacki-cumulus-switch
|
df54afb20f6ea6a3a136b3c09b30df54ea79ffcc
|
[
"BSD-3-Clause"
] | null | null | null |
common/src/stack/command/stack/commands/remove/host/plugin_attr.py
|
khanfluence/stacki-cumulus-switch
|
df54afb20f6ea6a3a136b3c09b30df54ea79ffcc
|
[
"BSD-3-Clause"
] | null | null | null |
# @copyright@
# @copyright@
#
# @rocks@
# Copyright (c) 2000 - 2010 The Regents of the University of California
# All rights reserved. Rocks(r) v5.4 www.rocksclusters.org
# https://github.com/Teradata/stacki/blob/master/LICENSE-ROCKS.txt
# @rocks@
import stack.commands
class Plugin(stack.commands.Plugin):
def provides(self):
return 'attr'
def run(self, hosts):
for host in hosts:
self.owner.db.execute("""delete from attributes
where scope="host" and
scopeid=(select id from nodes where name = '%s')""" %
host)
| 20.846154
| 71
| 0.695572
|
60145e973f1d32843a6f9808b5c589c922112f76
| 12,592
|
py
|
Python
|
Lib/compiler/static/module_table.py
|
penguin-wwy/cinder
|
2699849639420e1ed77269a671c0d480efe0981d
|
[
"CNRI-Python-GPL-Compatible"
] | null | null | null |
Lib/compiler/static/module_table.py
|
penguin-wwy/cinder
|
2699849639420e1ed77269a671c0d480efe0981d
|
[
"CNRI-Python-GPL-Compatible"
] | null | null | null |
Lib/compiler/static/module_table.py
|
penguin-wwy/cinder
|
2699849639420e1ed77269a671c0d480efe0981d
|
[
"CNRI-Python-GPL-Compatible"
] | null | null | null |
# Copyright (c) Facebook, Inc. and its affiliates. (http://www.facebook.com)
from __future__ import annotations
import ast
from ast import (
AST,
Attribute,
AsyncFunctionDef,
BinOp,
Call,
ClassDef,
Constant,
Expression,
FunctionDef,
Subscript,
Index,
Name,
NameConstant,
)
from contextlib import nullcontext
from enum import Enum
from functools import partial
from typing import (
cast,
Callable as typingCallable,
ContextManager,
Dict,
List,
Optional,
Set,
TYPE_CHECKING,
Tuple,
Union,
)
from ..symbols import Scope, ModuleScope
from .errors import TypedSyntaxError
from .types import (
Callable,
CType,
Callable,
Class,
ClassVar,
DecoratedMethod,
DynamicClass,
Function,
FunctionGroup,
DYNAMIC,
DYNAMIC_TYPE,
FLOAT_TYPE,
FinalClass,
INT_TYPE,
MethodType,
NONE_TYPE,
Object,
OPTIONAL_TYPE,
TypeDescr,
UNION_TYPE,
UnionType,
UnknownDecoratedMethod,
Value,
)
from .visitor import GenericVisitor
if TYPE_CHECKING:
from .compiler import Compiler
class ModuleFlag(Enum):
CHECKED_DICTS = 1
SHADOW_FRAME = 2
CHECKED_LISTS = 3
class ReferenceVisitor(GenericVisitor[Optional[Value]]):
def __init__(self, module: ModuleTable) -> None:
super().__init__(module)
self.types: Dict[AST, Value] = {}
self.subscr_nesting = 0
def visitName(self, node: Name) -> Optional[Value]:
return self.module.children.get(
node.id
) or self.module.compiler.builtins.children.get(node.id)
def visitAttribute(self, node: Attribute) -> Optional[Value]:
val = self.visit(node.value)
if val is not None:
return val.resolve_attr(node, self)
class AnnotationVisitor(ReferenceVisitor):
def resolve_annotation(
self,
node: ast.AST,
*,
is_declaration: bool = False,
) -> Optional[Class]:
with self.error_context(node):
klass = self.visit(node)
if not isinstance(klass, Class):
return None
if self.subscr_nesting or not is_declaration:
if isinstance(klass, FinalClass):
raise TypedSyntaxError(
"Final annotation is only valid in initial declaration "
"of attribute or module-level constant",
)
if isinstance(klass, ClassVar):
raise TypedSyntaxError(
"ClassVar is allowed only in class attribute annotations. "
"Class Finals are inferred ClassVar; do not nest with Final."
)
# Even if we know that e.g. `builtins.str` is the exact `str` type and
# not a subclass, and it's useful to track that knowledge, when we
# annotate `x: str` that annotation should not exclude subclasses.
klass = klass.inexact_type()
# PEP-484 specifies that ints should be treated as a subclass of floats,
# even though they differ in the runtime. We need to maintain the distinction
# between the two internally, so we should view user-specified `float` annotations
# as `float | int`. This widening of the type prevents us from applying
# optimizations to user-specified floats, but does not affect ints. Since we
# don't optimize Python floats anyway, we accept this to maintain PEP-484 compatibility.
if klass is FLOAT_TYPE:
klass = UNION_TYPE.make_generic_type(
(FLOAT_TYPE, INT_TYPE), self.compiler.generic_types
)
# TODO until we support runtime checking of unions, we must for
# safety resolve union annotations to dynamic (except for
# optionals, which we can check at runtime)
if (
isinstance(klass, UnionType)
and klass is not UNION_TYPE
and klass is not OPTIONAL_TYPE
and klass.opt_type is None
):
return None
return klass
def visitSubscript(self, node: Subscript) -> Optional[Value]:
target = self.resolve_annotation(node.value, is_declaration=True)
if target is None:
return None
self.subscr_nesting += 1
slice = self.visit(node.slice) or DYNAMIC
self.subscr_nesting -= 1
return target.resolve_subscr(node, slice, self) or target
def visitBinOp(self, node: BinOp) -> Optional[Value]:
if isinstance(node.op, ast.BitOr):
ltype = self.resolve_annotation(node.left)
rtype = self.resolve_annotation(node.right)
if ltype is None or rtype is None:
return None
return UNION_TYPE.make_generic_type(
(ltype, rtype), self.module.compiler.generic_types
)
def visitConstant(self, node: Constant) -> Optional[Value]:
sval = node.value
if sval is None:
return NONE_TYPE
elif isinstance(sval, str):
n = cast(Expression, ast.parse(node.value, "", "eval")).body
return self.visit(n)
class ModuleTable:
def __init__(
self,
name: str,
filename: str,
compiler: Compiler,
members: Optional[Dict[str, Value]] = None,
) -> None:
self.name = name
self.filename = filename
self.children: Dict[str, Value] = members or {}
self.compiler = compiler
self.types: Dict[AST, Value] = {}
self.node_data: Dict[Tuple[AST, object], object] = {}
self.flags: Set[ModuleFlag] = set()
self.decls: List[Tuple[AST, Optional[str], Optional[Value]]] = []
# TODO: final constants should be typed to literals, and
# this should be removed in the future
self.named_finals: Dict[str, ast.Constant] = {}
# Have we completed our first pass through the module, populating
# imports and types defined in the module? Until we have, resolving
# type annotations is not safe.
self.first_pass_done = False
self.ann_visitor = AnnotationVisitor(self)
self.ref_visitor = ReferenceVisitor(self)
def syntax_error(self, msg: str, node: AST) -> None:
return self.compiler.error_sink.syntax_error(msg, self.filename, node)
def error_context(self, node: Optional[AST]) -> ContextManager[None]:
if node is None:
return nullcontext()
return self.compiler.error_sink.error_context(self.filename, node)
def declare_class(self, node: ClassDef, klass: Class) -> None:
self.decls.append((node, node.name, klass))
self.children[node.name] = klass
def declare_function(self, func: Function) -> None:
existing = self.children.get(func.func_name)
new_member = func
if existing is not None:
if isinstance(existing, Function):
new_member = FunctionGroup([existing, new_member])
elif isinstance(existing, FunctionGroup):
existing.functions.append(new_member)
new_member = existing
else:
raise TypedSyntaxError(
f"function conflicts with other member {func.func_name} in {self.name}"
)
self.decls.append((func.node, func.func_name, new_member))
self.children[func.func_name] = new_member
def _get_inferred_type(self, value: ast.expr) -> Optional[Value]:
if not isinstance(value, ast.Name):
return None
return self.children.get(value.id)
def finish_bind(self) -> None:
self.first_pass_done = True
for node, name, value in self.decls:
with self.error_context(node):
if value is not None:
assert name is not None
new_value = value.finish_bind(self)
if new_value is None:
del self.children[name]
elif new_value is not value:
self.children[name] = new_value
if isinstance(node, ast.AnnAssign):
typ = self.resolve_annotation(node.annotation, is_declaration=True)
if typ is not None:
# Special case Final[dynamic] to use inferred type.
target = node.target
instance = typ.instance
value = node.value
if (
value is not None
and isinstance(typ, FinalClass)
and isinstance(typ.unwrap(), DynamicClass)
):
instance = self._get_inferred_type(value) or instance
if isinstance(target, ast.Name):
self.children[target.id] = instance
if isinstance(typ, FinalClass):
target = node.target
value = node.value
if not value:
raise TypedSyntaxError(
"Must assign a value when declaring a Final"
)
elif (
not isinstance(typ, CType)
and isinstance(target, ast.Name)
and isinstance(value, ast.Constant)
):
self.named_finals[target.id] = value
# We don't need these anymore...
self.decls.clear()
def finish_decorator(
self, node: FunctionDef | AsyncFunctionDef, func: Function
) -> Optional[Value]:
res: Optional[Value] = func
for decorator in reversed(node.decorator_list):
decorator_type = self.resolve_decorator(decorator) or DYNAMIC_TYPE
res = decorator_type.resolve_decorate_function(res, decorator)
if res is None:
self.types[node] = UnknownDecoratedMethod(func)
return None
self.types[node] = res
return res
def resolve_type(self, node: ast.AST) -> Optional[Class]:
typ = self.ann_visitor.visit(node)
if isinstance(typ, Class):
return typ
def resolve_decorator(self, node: ast.AST) -> Optional[Value]:
if isinstance(node, Call):
func = self.ref_visitor.visit(node.func)
if isinstance(func, Class):
return func.instance
elif isinstance(func, Callable):
return func.return_type.resolved().instance
elif isinstance(func, MethodType):
return func.function.return_type.resolved().instance
return self.ref_visitor.visit(node)
def resolve_annotation(
self,
node: ast.AST,
*,
is_declaration: bool = False,
) -> Optional[Class]:
assert self.first_pass_done, (
"Type annotations cannot be resolved until after initial pass, "
"so that all imports and types are available."
)
return self.ann_visitor.resolve_annotation(node, is_declaration=is_declaration)
def resolve_name_with_descr(
self, name: str
) -> Tuple[Optional[Value], Optional[TypeDescr]]:
if val := self.children.get(name):
return val, (self.name, name)
elif val := self.compiler.builtins.children.get(name):
return val, None
return None, None
def resolve_name(self, name: str) -> Optional[Value]:
return self.resolve_name_with_descr(name)[0]
def get_final_literal(self, node: AST, scope: Scope) -> Optional[ast.Constant]:
if not isinstance(node, Name):
return None
final_val = self.named_finals.get(node.id, None)
if (
final_val is not None
and isinstance(node.ctx, ast.Load)
and (
# Ensure the name is not shadowed in the local scope
isinstance(scope, ModuleScope)
or node.id not in scope.defs
)
):
return final_val
def declare_variable(self, node: ast.AnnAssign, module: ModuleTable) -> None:
self.decls.append((node, None, None))
def declare_variables(self, node: ast.Assign, module: ModuleTable) -> None:
pass
| 35.271709
| 100
| 0.58021
|
ddabd0117887205c5158f35e5ffd1ec3fdf4d059
| 1,636
|
py
|
Python
|
django/contrib/gis/geos/prepared.py
|
dwightgunning/django
|
9e399e15fbf03507fa54e4bb20ed6f1b0d817b83
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 1
|
2017-08-30T06:46:16.000Z
|
2017-08-30T06:46:16.000Z
|
django/contrib/gis/geos/prepared.py
|
dwightgunning/django
|
9e399e15fbf03507fa54e4bb20ed6f1b0d817b83
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
django/contrib/gis/geos/prepared.py
|
dwightgunning/django
|
9e399e15fbf03507fa54e4bb20ed6f1b0d817b83
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 1
|
2018-07-23T12:13:04.000Z
|
2018-07-23T12:13:04.000Z
|
from .base import GEOSBase
from .prototypes import prepared as capi
class PreparedGeometry(GEOSBase):
"""
A geometry that is prepared for performing certain operations.
At the moment this includes the contains covers, and intersects
operations.
"""
ptr_type = capi.PREPGEOM_PTR
def __init__(self, geom):
# Keeping a reference to the original geometry object to prevent it
# from being garbage collected which could then crash the prepared one
# See #21662
self._base_geom = geom
from .geometry import GEOSGeometry
if not isinstance(geom, GEOSGeometry):
raise TypeError
self.ptr = capi.geos_prepare(geom.ptr)
def __del__(self):
if self._ptr and capi:
capi.prepared_destroy(self._ptr)
def contains(self, other):
return capi.prepared_contains(self.ptr, other.ptr)
def contains_properly(self, other):
return capi.prepared_contains_properly(self.ptr, other.ptr)
def covers(self, other):
return capi.prepared_covers(self.ptr, other.ptr)
def intersects(self, other):
return capi.prepared_intersects(self.ptr, other.ptr)
def crosses(self, other):
return capi.prepared_crosses(self.ptr, other.ptr)
def disjoint(self, other):
return capi.prepared_disjoint(self.ptr, other.ptr)
def overlaps(self, other):
return capi.prepared_overlaps(self.ptr, other.ptr)
def touches(self, other):
return capi.prepared_touches(self.ptr, other.ptr)
def within(self, other):
return capi.prepared_within(self.ptr, other.ptr)
| 30.867925
| 78
| 0.68154
|
0e86bedc07b9a94fa7691dca3b6eb5e023e5ff1d
| 1,092
|
py
|
Python
|
clients/kratos/python/test/test_ui_text.py
|
simoneromano96/sdk
|
a6113d0daefbbb803790297e4b242d4c7cbbcb22
|
[
"Apache-2.0"
] | null | null | null |
clients/kratos/python/test/test_ui_text.py
|
simoneromano96/sdk
|
a6113d0daefbbb803790297e4b242d4c7cbbcb22
|
[
"Apache-2.0"
] | null | null | null |
clients/kratos/python/test/test_ui_text.py
|
simoneromano96/sdk
|
a6113d0daefbbb803790297e4b242d4c7cbbcb22
|
[
"Apache-2.0"
] | null | null | null |
"""
Ory Kratos API
Documentation for all public and administrative Ory Kratos APIs. Public and administrative APIs are exposed on different ports. Public APIs can face the public internet without any protection while administrative APIs should never be exposed without prior authorization. To protect the administative API port you should use something like Nginx, Ory Oathkeeper, or any other technology capable of authorizing incoming requests. # noqa: E501
The version of the OpenAPI document: v0.0.0-alpha.38
Contact: hi@ory.sh
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import ory_kratos_client
from ory_kratos_client.model.ui_text import UiText
class TestUiText(unittest.TestCase):
"""UiText unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testUiText(self):
"""Test UiText"""
# FIXME: construct object with mandatory attributes with example values
# model = UiText() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 29.513514
| 446
| 0.721612
|
d396c6adb80c95b3746d3af6c2c2dfdcebadf916
| 3,171
|
py
|
Python
|
airflow/contrib/sensors/datadog_sensor.py
|
InigoSJ/airflow
|
8b97a387dc30d8c88390d500ec99333798c20f1c
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 3
|
2019-08-17T15:16:11.000Z
|
2019-11-18T06:42:15.000Z
|
airflow/contrib/sensors/datadog_sensor.py
|
InigoSJ/airflow
|
8b97a387dc30d8c88390d500ec99333798c20f1c
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 7
|
2019-03-27T07:58:14.000Z
|
2020-02-12T17:42:33.000Z
|
airflow/contrib/sensors/datadog_sensor.py
|
upjohnc/airflow-upjohn-k8s
|
caadbc1618d73e054de99138b0892cea3a9327c4
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 5
|
2017-06-19T19:55:47.000Z
|
2020-10-10T00:49:20.000Z
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow.sensors.base_sensor_operator import BaseSensorOperator
from airflow.contrib.hooks.datadog_hook import DatadogHook
from airflow.utils.decorators import apply_defaults
from airflow.exceptions import AirflowException
from datadog import api
class DatadogSensor(BaseSensorOperator):
"""
A sensor to listen, with a filter, to datadog event streams and determine
if some event was emitted.
Depends on the datadog API, which has to be deployed on the same server where
Airflow runs.
:param datadog_conn_id: The connection to datadog, containing metadata for api keys.
:param datadog_conn_id: str
"""
ui_color = '#66c3dd'
@apply_defaults
def __init__(
self,
datadog_conn_id='datadog_default',
from_seconds_ago=3600,
up_to_seconds_from_now=0,
priority=None,
sources=None,
tags=None,
response_check=None,
*args,
**kwargs):
super().__init__(*args, **kwargs)
self.datadog_conn_id = datadog_conn_id
self.from_seconds_ago = from_seconds_ago
self.up_to_seconds_from_now = up_to_seconds_from_now
self.priority = priority
self.sources = sources
self.tags = tags
self.response_check = response_check
def poke(self, context):
# This instantiates the hook, but doesn't need it further,
# because the API authenticates globally (unfortunately),
# but for airflow this shouldn't matter too much, because each
# task instance runs in its own process anyway.
DatadogHook(datadog_conn_id=self.datadog_conn_id)
response = api.Event.query(
start=self.from_seconds_ago,
end=self.up_to_seconds_from_now,
priority=self.priority,
sources=self.sources,
tags=self.tags)
if isinstance(response, dict) and response.get('status', 'ok') != 'ok':
self.log.error("Unexpected Datadog result: %s", response)
raise AirflowException("Datadog returned unexpected result")
if self.response_check:
# run content check on response
return self.response_check(response)
# If no check was inserted, assume any event that matched yields true.
return len(response) > 0
| 37.75
| 88
| 0.68748
|
bd2d7f9e4b58c361ebf3cb2953eecafc8d964e15
| 2,119
|
py
|
Python
|
Lib/test/test_seq_jy.py
|
clach04/bb_import_jython
|
4529a0f578186a1c33c476300294ab42658eaf7c
|
[
"CNRI-Jython"
] | null | null | null |
Lib/test/test_seq_jy.py
|
clach04/bb_import_jython
|
4529a0f578186a1c33c476300294ab42658eaf7c
|
[
"CNRI-Jython"
] | null | null | null |
Lib/test/test_seq_jy.py
|
clach04/bb_import_jython
|
4529a0f578186a1c33c476300294ab42658eaf7c
|
[
"CNRI-Jython"
] | null | null | null |
"""Additional seq_tests
Made for Jython.
"""
import unittest
from collections import deque
from test import test_support
class SeqTestCase(unittest.TestCase):
types2test = list, tuple, deque
def test_seq_item_equality(self):
eq_called = []
class Foo(object):
def __eq__(self, other):
eq_called.append(other)
return False
for type2test in self.types2test:
foo = Foo()
seq1 = type2test([foo])
self.assertEqual(seq1, seq1)
self.assertEqual(cmp(seq1, seq1), 0)
seq2 = type2test([foo])
self.assertEqual(seq1, seq2)
self.assertEqual(cmp(seq1, seq2), 0)
self.assertTrue(foo in seq1)
self.assertFalse(eq_called)
def test_seq_equality(self):
class Foo(object):
def __eq__(self, other):
return True
foo = [Foo()]
for type2test in self.types2test:
self.assertTrue(type2test() in foo)
@unittest.skip("FIXME: broken")
def test_seq_subclass_equality(self):
# Various combinations of PyObject._eq, overriden Object.equals,
# and cmp implementations
for type2test in self.types2test:
class Foo(type2test):
def __eq__(self, other):
return False
l = type2test(['bar', 'baz'])
foo = Foo(l)
self.assertNotEqual(l, foo)
self.assertEqual(cmp(l, foo), 1)
self.assertEqual(cmp(foo, foo), 0)
seqs1 = type2test([l, foo])
seqs2 = type2test([l, foo])
self.assertEqual(seqs1, seqs1)
self.assertEqual(seqs1, seqs2)
self.assertEqual(cmp(seqs1, seqs2), 0)
self.assertTrue(foo in seqs1)
if hasattr(seqs1, 'count'):
self.assertTrue(seqs1.count(foo), 1)
if hasattr(seqs1, 'index'):
self.assertEqual(seqs1.index(foo), 1)
def test_main():
test_support.run_unittest(SeqTestCase)
if __name__ == "__main__":
test_main()
| 30.271429
| 72
| 0.566777
|
6560166333a123714272dcf20124f0cb7d17f243
| 766
|
py
|
Python
|
utils/Exceptions.py
|
salbrec/seqQscorer
|
311715bf6dedd19ffd50538beb8b30472cad9e5c
|
[
"MIT"
] | 17
|
2019-09-16T10:31:23.000Z
|
2022-01-09T15:35:28.000Z
|
utils/Exceptions.py
|
salbrec/seqQscorer
|
311715bf6dedd19ffd50538beb8b30472cad9e5c
|
[
"MIT"
] | 3
|
2021-03-06T11:54:25.000Z
|
2021-07-15T11:45:40.000Z
|
utils/Exceptions.py
|
salbrec/seqQscorer
|
311715bf6dedd19ffd50538beb8b30472cad9e5c
|
[
"MIT"
] | 5
|
2020-06-16T09:50:49.000Z
|
2021-11-04T08:29:44.000Z
|
"""Software specific Exceptions
Used for exceptions that can occur when using seqQscorer.
date: 2019-05-12
author: Steffen Albrecht
"""
class WrongFeatureInputException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class WrongSettingException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class WrongOutputFileException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class IncorrectModelException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
| 22.529412
| 57
| 0.685379
|
860499f72d4879ba52afbf4dc22ce6b4c239681b
| 31,033
|
py
|
Python
|
cadquery/occ_impl/geom.py
|
just-georgeb/cadquery
|
25cb063d6176f16d20f2e071fccf0c64e4dc2b2e
|
[
"Apache-2.0"
] | 1,423
|
2018-10-28T18:01:04.000Z
|
2022-03-30T20:22:28.000Z
|
cadquery/occ_impl/geom.py
|
just-georgeb/cadquery
|
25cb063d6176f16d20f2e071fccf0c64e4dc2b2e
|
[
"Apache-2.0"
] | 1,017
|
2018-11-18T20:50:34.000Z
|
2022-03-31T22:56:39.000Z
|
cadquery/occ_impl/geom.py
|
just-georgeb/cadquery
|
25cb063d6176f16d20f2e071fccf0c64e4dc2b2e
|
[
"Apache-2.0"
] | 175
|
2018-11-18T06:07:54.000Z
|
2022-03-31T16:21:18.000Z
|
import math
from typing import overload, Sequence, Union, Tuple, Type, Optional
from OCP.gp import (
gp_Vec,
gp_Ax1,
gp_Ax3,
gp_Pnt,
gp_Dir,
gp_Pln,
gp_Trsf,
gp_GTrsf,
gp_XYZ,
gp_EulerSequence,
gp,
)
from OCP.Bnd import Bnd_Box
from OCP.BRepBndLib import BRepBndLib
from OCP.BRepMesh import BRepMesh_IncrementalMesh
from OCP.TopoDS import TopoDS_Shape
from OCP.TopLoc import TopLoc_Location
TOL = 1e-2
class Vector(object):
"""Create a 3-dimensional vector
:param args: a 3D vector, with x-y-z parts.
you can either provide:
* nothing (in which case the null vector is return)
* a gp_Vec
* a vector ( in which case it is copied )
* a 3-tuple
* a 2-tuple (z assumed to be 0)
* three float values: x, y, and z
* two float values: x,y
"""
_wrapped: gp_Vec
@overload
def __init__(self, x: float, y: float, z: float) -> None:
...
@overload
def __init__(self, x: float, y: float) -> None:
...
@overload
def __init__(self, v: "Vector") -> None:
...
@overload
def __init__(self, v: Sequence[float]) -> None:
...
@overload
def __init__(self, v: Union[gp_Vec, gp_Pnt, gp_Dir, gp_XYZ]) -> None:
...
@overload
def __init__(self) -> None:
...
def __init__(self, *args):
if len(args) == 3:
fV = gp_Vec(*args)
elif len(args) == 2:
fV = gp_Vec(*args, 0)
elif len(args) == 1:
if isinstance(args[0], Vector):
fV = gp_Vec(args[0].wrapped.XYZ())
elif isinstance(args[0], (tuple, list)):
arg = args[0]
if len(arg) == 3:
fV = gp_Vec(*arg)
elif len(arg) == 2:
fV = gp_Vec(*arg, 0)
elif isinstance(args[0], (gp_Vec, gp_Pnt, gp_Dir)):
fV = gp_Vec(args[0].XYZ())
elif isinstance(args[0], gp_XYZ):
fV = gp_Vec(args[0])
else:
raise TypeError("Expected three floats, OCC gp_, or 3-tuple")
elif len(args) == 0:
fV = gp_Vec(0, 0, 0)
else:
raise TypeError("Expected three floats, OCC gp_, or 3-tuple")
self._wrapped = fV
@property
def x(self) -> float:
return self.wrapped.X()
@x.setter
def x(self, value: float) -> None:
self.wrapped.SetX(value)
@property
def y(self) -> float:
return self.wrapped.Y()
@y.setter
def y(self, value: float) -> None:
self.wrapped.SetY(value)
@property
def z(self) -> float:
return self.wrapped.Z()
@z.setter
def z(self, value: float) -> None:
self.wrapped.SetZ(value)
@property
def Length(self) -> float:
return self.wrapped.Magnitude()
@property
def wrapped(self) -> gp_Vec:
return self._wrapped
def toTuple(self) -> Tuple[float, float, float]:
return (self.x, self.y, self.z)
def cross(self, v: "Vector") -> "Vector":
return Vector(self.wrapped.Crossed(v.wrapped))
def dot(self, v: "Vector") -> float:
return self.wrapped.Dot(v.wrapped)
def sub(self, v: "Vector") -> "Vector":
return Vector(self.wrapped.Subtracted(v.wrapped))
def __sub__(self, v: "Vector") -> "Vector":
return self.sub(v)
def add(self, v: "Vector") -> "Vector":
return Vector(self.wrapped.Added(v.wrapped))
def __add__(self, v: "Vector") -> "Vector":
return self.add(v)
def multiply(self, scale: float) -> "Vector":
"""Return a copy multiplied by the provided scalar"""
return Vector(self.wrapped.Multiplied(scale))
def __mul__(self, scale: float) -> "Vector":
return self.multiply(scale)
def __truediv__(self, denom: float) -> "Vector":
return self.multiply(1.0 / denom)
def __rmul__(self, scale: float) -> "Vector":
return self.multiply(scale)
def normalized(self) -> "Vector":
"""Return a normalized version of this vector"""
return Vector(self.wrapped.Normalized())
def Center(self) -> "Vector":
"""Return the vector itself
The center of myself is myself.
Provided so that vectors, vertices, and other shapes all support a
common interface, when Center() is requested for all objects on the
stack.
"""
return self
def getAngle(self, v: "Vector") -> float:
return self.wrapped.Angle(v.wrapped)
def getSignedAngle(self, v: "Vector") -> float:
return self.wrapped.AngleWithRef(v.wrapped, gp_Vec(0, 0, -1))
def distanceToLine(self):
raise NotImplementedError("Have not needed this yet, but OCCT supports it!")
def projectToLine(self, line: "Vector") -> "Vector":
"""
Returns a new vector equal to the projection of this Vector onto the line
represented by Vector <line>
:param args: Vector
Returns the projected vector.
"""
lineLength = line.Length
return line * (self.dot(line) / (lineLength * lineLength))
def distanceToPlane(self):
raise NotImplementedError("Have not needed this yet, but OCCT supports it!")
def projectToPlane(self, plane: "Plane") -> "Vector":
"""
Vector is projected onto the plane provided as input.
:param args: Plane object
Returns the projected vector.
"""
base = plane.origin
normal = plane.zDir
return self - normal * (((self - base).dot(normal)) / normal.Length ** 2)
def __neg__(self) -> "Vector":
return self * -1
def __abs__(self) -> float:
return self.Length
def __repr__(self) -> str:
return "Vector: " + str((self.x, self.y, self.z))
def __str__(self) -> str:
return "Vector: " + str((self.x, self.y, self.z))
def __eq__(self, other: "Vector") -> bool: # type: ignore[override]
return self.wrapped.IsEqual(other.wrapped, 0.00001, 0.00001)
def toPnt(self) -> gp_Pnt:
return gp_Pnt(self.wrapped.XYZ())
def toDir(self) -> gp_Dir:
return gp_Dir(self.wrapped.XYZ())
def transform(self, T: "Matrix") -> "Vector":
# to gp_Pnt to obey cq transformation convention (in OCP.vectors do not translate)
pnt = self.toPnt()
pnt_t = pnt.Transformed(T.wrapped.Trsf())
return Vector(gp_Vec(pnt_t.XYZ()))
class Matrix:
"""A 3d , 4x4 transformation matrix.
Used to move geometry in space.
The provided "matrix" parameter may be None, a gp_GTrsf, or a nested list of
values.
If given a nested list, it is expected to be of the form:
[[m11, m12, m13, m14],
[m21, m22, m23, m24],
[m31, m32, m33, m34]]
A fourth row may be given, but it is expected to be: [0.0, 0.0, 0.0, 1.0]
since this is a transform matrix.
"""
wrapped: gp_GTrsf
@overload
def __init__(self) -> None:
...
@overload
def __init__(self, matrix: Union[gp_GTrsf, gp_Trsf]) -> None:
...
@overload
def __init__(self, matrix: Sequence[Sequence[float]]) -> None:
...
def __init__(self, matrix=None):
if matrix is None:
self.wrapped = gp_GTrsf()
elif isinstance(matrix, gp_GTrsf):
self.wrapped = matrix
elif isinstance(matrix, gp_Trsf):
self.wrapped = gp_GTrsf(matrix)
elif isinstance(matrix, (list, tuple)):
# Validate matrix size & 4x4 last row value
valid_sizes = all(
(isinstance(row, (list, tuple)) and (len(row) == 4)) for row in matrix
) and len(matrix) in (3, 4)
if not valid_sizes:
raise TypeError(
"Matrix constructor requires 2d list of 4x3 or 4x4, but got: {!r}".format(
matrix
)
)
elif (len(matrix) == 4) and (tuple(matrix[3]) != (0, 0, 0, 1)):
raise ValueError(
"Expected the last row to be [0,0,0,1], but got: {!r}".format(
matrix[3]
)
)
# Assign values to matrix
self.wrapped = gp_GTrsf()
[
self.wrapped.SetValue(i + 1, j + 1, e)
for i, row in enumerate(matrix[:3])
for j, e in enumerate(row)
]
else:
raise TypeError("Invalid param to matrix constructor: {}".format(matrix))
def rotateX(self, angle: float):
self._rotate(gp.OX_s(), angle)
def rotateY(self, angle: float):
self._rotate(gp.OY_s(), angle)
def rotateZ(self, angle: float):
self._rotate(gp.OZ_s(), angle)
def _rotate(self, direction: gp_Ax1, angle: float):
new = gp_Trsf()
new.SetRotation(direction, angle)
self.wrapped = self.wrapped * gp_GTrsf(new)
def inverse(self) -> "Matrix":
return Matrix(self.wrapped.Inverted())
@overload
def multiply(self, other: Vector) -> Vector:
...
@overload
def multiply(self, other: "Matrix") -> "Matrix":
...
def multiply(self, other):
if isinstance(other, Vector):
return other.transform(self)
return Matrix(self.wrapped.Multiplied(other.wrapped))
def transposed_list(self) -> Sequence[float]:
"""Needed by the cqparts gltf exporter"""
trsf = self.wrapped
data = [[trsf.Value(i, j) for j in range(1, 5)] for i in range(1, 4)] + [
[0.0, 0.0, 0.0, 1.0]
]
return [data[j][i] for i in range(4) for j in range(4)]
def __getitem__(self, rc: Tuple[int, int]) -> float:
"""Provide Matrix[r, c] syntax for accessing individual values. The row
and column parameters start at zero, which is consistent with most
python libraries, but is counter to gp_GTrsf(), which is 1-indexed.
"""
if not isinstance(rc, tuple) or (len(rc) != 2):
raise IndexError("Matrix subscript must provide (row, column)")
(r, c) = rc
if (0 <= r <= 3) and (0 <= c <= 3):
if r < 3:
return self.wrapped.Value(r + 1, c + 1)
else:
# gp_GTrsf doesn't provide access to the 4th row because it has
# an implied value as below:
return [0.0, 0.0, 0.0, 1.0][c]
else:
raise IndexError("Out of bounds access into 4x4 matrix: {!r}".format(rc))
def __repr__(self) -> str:
"""
Generate a valid python expression representing this Matrix
"""
matrix_transposed = self.transposed_list()
matrix_str = ",\n ".join(str(matrix_transposed[i::4]) for i in range(4))
return f"Matrix([{matrix_str}])"
class Plane(object):
"""A 2D coordinate system in space
A 2D coordinate system in space, with the x-y axes on the plane, and a
particular point as the origin.
A plane allows the use of 2D coordinates, which are later converted to
global, 3d coordinates when the operations are complete.
Frequently, it is not necessary to create work planes, as they can be
created automatically from faces.
"""
xDir: Vector
yDir: Vector
zDir: Vector
_origin: Vector
lcs: gp_Ax3
rG: Matrix
fG: Matrix
# equality tolerances
_eq_tolerance_origin = 1e-6
_eq_tolerance_dot = 1e-6
@classmethod
def named(cls: Type["Plane"], stdName: str, origin=(0, 0, 0)) -> "Plane":
"""Create a predefined Plane based on the conventional names.
:param stdName: one of (XY|YZ|ZX|XZ|YX|ZY|front|back|left|right|top|bottom)
:type stdName: string
:param origin: the desired origin, specified in global coordinates
:type origin: 3-tuple of the origin of the new plane, in global coordinates.
Available named planes are as follows. Direction references refer to
the global directions.
=========== ======= ======= ======
Name xDir yDir zDir
=========== ======= ======= ======
XY +x +y +z
YZ +y +z +x
ZX +z +x +y
XZ +x +z -y
YX +y +x -z
ZY +z +y -x
front +x +y +z
back -x +y -z
left +z +y -x
right -z +y +x
top +x -z +y
bottom +x +z -y
=========== ======= ======= ======
"""
namedPlanes = {
# origin, xDir, normal
"XY": Plane(origin, (1, 0, 0), (0, 0, 1)),
"YZ": Plane(origin, (0, 1, 0), (1, 0, 0)),
"ZX": Plane(origin, (0, 0, 1), (0, 1, 0)),
"XZ": Plane(origin, (1, 0, 0), (0, -1, 0)),
"YX": Plane(origin, (0, 1, 0), (0, 0, -1)),
"ZY": Plane(origin, (0, 0, 1), (-1, 0, 0)),
"front": Plane(origin, (1, 0, 0), (0, 0, 1)),
"back": Plane(origin, (-1, 0, 0), (0, 0, -1)),
"left": Plane(origin, (0, 0, 1), (-1, 0, 0)),
"right": Plane(origin, (0, 0, -1), (1, 0, 0)),
"top": Plane(origin, (1, 0, 0), (0, 1, 0)),
"bottom": Plane(origin, (1, 0, 0), (0, -1, 0)),
}
try:
return namedPlanes[stdName]
except KeyError:
raise ValueError("Supported names are {}".format(list(namedPlanes.keys())))
@classmethod
def XY(cls, origin=(0, 0, 0), xDir=Vector(1, 0, 0)):
plane = Plane.named("XY", origin)
plane._setPlaneDir(xDir)
return plane
@classmethod
def YZ(cls, origin=(0, 0, 0), xDir=Vector(0, 1, 0)):
plane = Plane.named("YZ", origin)
plane._setPlaneDir(xDir)
return plane
@classmethod
def ZX(cls, origin=(0, 0, 0), xDir=Vector(0, 0, 1)):
plane = Plane.named("ZX", origin)
plane._setPlaneDir(xDir)
return plane
@classmethod
def XZ(cls, origin=(0, 0, 0), xDir=Vector(1, 0, 0)):
plane = Plane.named("XZ", origin)
plane._setPlaneDir(xDir)
return plane
@classmethod
def YX(cls, origin=(0, 0, 0), xDir=Vector(0, 1, 0)):
plane = Plane.named("YX", origin)
plane._setPlaneDir(xDir)
return plane
@classmethod
def ZY(cls, origin=(0, 0, 0), xDir=Vector(0, 0, 1)):
plane = Plane.named("ZY", origin)
plane._setPlaneDir(xDir)
return plane
@classmethod
def front(cls, origin=(0, 0, 0), xDir=Vector(1, 0, 0)):
plane = Plane.named("front", origin)
plane._setPlaneDir(xDir)
return plane
@classmethod
def back(cls, origin=(0, 0, 0), xDir=Vector(-1, 0, 0)):
plane = Plane.named("back", origin)
plane._setPlaneDir(xDir)
return plane
@classmethod
def left(cls, origin=(0, 0, 0), xDir=Vector(0, 0, 1)):
plane = Plane.named("left", origin)
plane._setPlaneDir(xDir)
return plane
@classmethod
def right(cls, origin=(0, 0, 0), xDir=Vector(0, 0, -1)):
plane = Plane.named("right", origin)
plane._setPlaneDir(xDir)
return plane
@classmethod
def top(cls, origin=(0, 0, 0), xDir=Vector(1, 0, 0)):
plane = Plane.named("top", origin)
plane._setPlaneDir(xDir)
return plane
@classmethod
def bottom(cls, origin=(0, 0, 0), xDir=Vector(1, 0, 0)):
plane = Plane.named("bottom", origin)
plane._setPlaneDir(xDir)
return plane
def __init__(
self,
origin: Union[Tuple[float, float, float], Vector],
xDir: Optional[Union[Tuple[float, float, float], Vector]] = None,
normal: Union[Tuple[float, float, float], Vector] = (0, 0, 1),
):
"""
Create a Plane with an arbitrary orientation
:param origin: the origin in global coordinates
:param xDir: an optional vector representing the xDirection.
:param normal: the normal direction for the plane
:raises ValueError: if the specified xDir is not orthogonal to the provided normal
"""
zDir = Vector(normal)
if zDir.Length == 0.0:
raise ValueError("normal should be non null")
self.zDir = zDir.normalized()
if xDir is None:
ax3 = gp_Ax3(Vector(origin).toPnt(), Vector(normal).toDir())
xDir = Vector(ax3.XDirection())
else:
xDir = Vector(xDir)
if xDir.Length == 0.0:
raise ValueError("xDir should be non null")
self._setPlaneDir(xDir)
self.origin = Vector(origin)
def _eq_iter(self, other):
"""Iterator to successively test equality"""
cls = type(self)
yield isinstance(other, Plane) # comparison is with another Plane
# origins are the same
yield abs(self.origin - other.origin) < cls._eq_tolerance_origin
# z-axis vectors are parallel (assumption: both are unit vectors)
yield abs(self.zDir.dot(other.zDir) - 1) < cls._eq_tolerance_dot
# x-axis vectors are parallel (assumption: both are unit vectors)
yield abs(self.xDir.dot(other.xDir) - 1) < cls._eq_tolerance_dot
def __eq__(self, other):
return all(self._eq_iter(other))
def __ne__(self, other):
return not self.__eq__(other)
@property
def origin(self) -> Vector:
return self._origin
@origin.setter
def origin(self, value):
self._origin = Vector(value)
self._calcTransforms()
def setOrigin2d(self, x, y):
"""
Set a new origin in the plane itself
Set a new origin in the plane itself. The plane's orientation and
xDrection are unaffected.
:param float x: offset in the x direction
:param float y: offset in the y direction
:return: void
The new coordinates are specified in terms of the current 2D system.
As an example:
p = Plane.XY()
p.setOrigin2d(2, 2)
p.setOrigin2d(2, 2)
results in a plane with its origin at (x, y) = (4, 4) in global
coordinates. Both operations were relative to local coordinates of the
plane.
"""
self.origin = self.toWorldCoords((x, y))
def toLocalCoords(self, obj):
"""Project the provided coordinates onto this plane
:param obj: an object or vector to convert
:type vector: a vector or shape
:return: an object of the same type, but converted to local coordinates
Most of the time, the z-coordinate returned will be zero, because most
operations based on a plane are all 2D. Occasionally, though, 3D
points outside of the current plane are transformed. One such example is
:py:meth:`Workplane.box`, where 3D corners of a box are transformed to
orient the box in space correctly.
"""
from .shapes import Shape
if isinstance(obj, Vector):
return obj.transform(self.fG)
elif isinstance(obj, Shape):
return obj.transformShape(self.fG)
else:
raise ValueError(
"Don't know how to convert type {} to local coordinates".format(
type(obj)
)
)
def toWorldCoords(self, tuplePoint) -> Vector:
"""Convert a point in local coordinates to global coordinates
:param tuplePoint: point in local coordinates to convert.
:type tuplePoint: a 2 or three tuple of float. The third value is taken to be zero if not supplied.
:return: a Vector in global coordinates
"""
if isinstance(tuplePoint, Vector):
v = tuplePoint
elif len(tuplePoint) == 2:
v = Vector(tuplePoint[0], tuplePoint[1], 0)
else:
v = Vector(tuplePoint)
return v.transform(self.rG)
def rotated(self, rotate=(0, 0, 0)):
"""Returns a copy of this plane, rotated about the specified axes
Since the z axis is always normal the plane, rotating around Z will
always produce a plane that is parallel to this one.
The origin of the workplane is unaffected by the rotation.
Rotations are done in order x, y, z. If you need a different order,
manually chain together multiple rotate() commands.
:param rotate: Vector [xDegrees, yDegrees, zDegrees]
:return: a copy of this plane rotated as requested.
"""
# NB: this is not a geometric Vector
rotate = Vector(rotate)
# Convert to radians.
rotate = rotate.multiply(math.pi / 180.0)
# Compute rotation matrix.
T1 = gp_Trsf()
T1.SetRotation(
gp_Ax1(gp_Pnt(*(0, 0, 0)), gp_Dir(*self.xDir.toTuple())), rotate.x
)
T2 = gp_Trsf()
T2.SetRotation(
gp_Ax1(gp_Pnt(*(0, 0, 0)), gp_Dir(*self.yDir.toTuple())), rotate.y
)
T3 = gp_Trsf()
T3.SetRotation(
gp_Ax1(gp_Pnt(*(0, 0, 0)), gp_Dir(*self.zDir.toTuple())), rotate.z
)
T = Matrix(gp_GTrsf(T1 * T2 * T3))
# Compute the new plane.
newXdir = self.xDir.transform(T)
newZdir = self.zDir.transform(T)
return Plane(self.origin, newXdir, newZdir)
def mirrorInPlane(self, listOfShapes, axis="X"):
local_coord_system = gp_Ax3(
self.origin.toPnt(), self.zDir.toDir(), self.xDir.toDir()
)
T = gp_Trsf()
if axis == "X":
T.SetMirror(gp_Ax1(self.origin.toPnt(), local_coord_system.XDirection()))
elif axis == "Y":
T.SetMirror(gp_Ax1(self.origin.toPnt(), local_coord_system.YDirection()))
else:
raise NotImplementedError
resultWires = []
for w in listOfShapes:
mirrored = w.transformShape(Matrix(T))
# attempt stitching of the wires
resultWires.append(mirrored)
return resultWires
def _setPlaneDir(self, xDir):
"""Set the vectors parallel to the plane, i.e. xDir and yDir"""
xDir = Vector(xDir)
self.xDir = xDir.normalized()
self.yDir = self.zDir.cross(self.xDir).normalized()
def _calcTransforms(self):
"""Computes transformation matrices to convert between coordinates
Computes transformation matrices to convert between local and global
coordinates.
"""
# r is the forward transformation matrix from world to local coordinates
# ok i will be really honest, i cannot understand exactly why this works
# something bout the order of the translation and the rotation.
# the double-inverting is strange, and I don't understand it.
forward = Matrix()
inverse = Matrix()
forwardT = gp_Trsf()
inverseT = gp_Trsf()
global_coord_system = gp_Ax3()
local_coord_system = gp_Ax3(
gp_Pnt(*self.origin.toTuple()),
gp_Dir(*self.zDir.toTuple()),
gp_Dir(*self.xDir.toTuple()),
)
forwardT.SetTransformation(global_coord_system, local_coord_system)
forward.wrapped = gp_GTrsf(forwardT)
inverseT.SetTransformation(local_coord_system, global_coord_system)
inverse.wrapped = gp_GTrsf(inverseT)
self.lcs = local_coord_system
self.rG = inverse
self.fG = forward
@property
def location(self) -> "Location":
return Location(self)
def toPln(self) -> gp_Pln:
return gp_Pln(gp_Ax3(self.origin.toPnt(), self.zDir.toDir(), self.xDir.toDir()))
class BoundBox(object):
"""A BoundingBox for an object or set of objects. Wraps the OCP one"""
wrapped: Bnd_Box
xmin: float
xmax: float
xlen: float
ymin: float
ymax: float
ylen: float
zmin: float
zmax: float
zlen: float
def __init__(self, bb: Bnd_Box) -> None:
self.wrapped = bb
XMin, YMin, ZMin, XMax, YMax, ZMax = bb.Get()
self.xmin = XMin
self.xmax = XMax
self.xlen = XMax - XMin
self.ymin = YMin
self.ymax = YMax
self.ylen = YMax - YMin
self.zmin = ZMin
self.zmax = ZMax
self.zlen = ZMax - ZMin
self.center = Vector((XMax + XMin) / 2, (YMax + YMin) / 2, (ZMax + ZMin) / 2)
self.DiagonalLength = self.wrapped.SquareExtent() ** 0.5
def add(
self,
obj: Union[Tuple[float, float, float], Vector, "BoundBox"],
tol: Optional[float] = None,
) -> "BoundBox":
"""Returns a modified (expanded) bounding box
obj can be one of several things:
1. a 3-tuple corresponding to x,y, and z amounts to add
2. a vector, containing the x,y,z values to add
3. another bounding box, where a new box will be created that
encloses both.
This bounding box is not changed.
"""
tol = TOL if tol is None else tol # tol = TOL (by default)
tmp = Bnd_Box()
tmp.SetGap(tol)
tmp.Add(self.wrapped)
if isinstance(obj, tuple):
tmp.Update(*obj)
elif isinstance(obj, Vector):
tmp.Update(*obj.toTuple())
elif isinstance(obj, BoundBox):
tmp.Add(obj.wrapped)
return BoundBox(tmp)
@staticmethod
def findOutsideBox2D(bb1: "BoundBox", bb2: "BoundBox") -> Optional["BoundBox"]:
"""Compares bounding boxes
Compares bounding boxes. Returns none if neither is inside the other.
Returns the outer one if either is outside the other.
BoundBox.isInside works in 3d, but this is a 2d bounding box, so it
doesn't work correctly plus, there was all kinds of rounding error in
the built-in implementation i do not understand.
"""
if (
bb1.xmin < bb2.xmin
and bb1.xmax > bb2.xmax
and bb1.ymin < bb2.ymin
and bb1.ymax > bb2.ymax
):
return bb1
if (
bb2.xmin < bb1.xmin
and bb2.xmax > bb1.xmax
and bb2.ymin < bb1.ymin
and bb2.ymax > bb1.ymax
):
return bb2
return None
@classmethod
def _fromTopoDS(
cls: Type["BoundBox"],
shape: TopoDS_Shape,
tol: Optional[float] = None,
optimal: bool = True,
):
"""
Constructs a bounding box from a TopoDS_Shape
"""
tol = TOL if tol is None else tol # tol = TOL (by default)
bbox = Bnd_Box()
if optimal:
BRepBndLib.AddOptimal_s(
shape, bbox
) # this is 'exact' but expensive - not yet wrapped by PythonOCC
else:
mesh = BRepMesh_IncrementalMesh(shape, tol, True)
mesh.Perform()
# this is adds +margin but is faster
BRepBndLib.Add_s(shape, bbox, True)
return cls(bbox)
def isInside(self, b2: "BoundBox") -> bool:
"""Is the provided bounding box inside this one?"""
if (
b2.xmin > self.xmin
and b2.ymin > self.ymin
and b2.zmin > self.zmin
and b2.xmax < self.xmax
and b2.ymax < self.ymax
and b2.zmax < self.zmax
):
return True
else:
return False
class Location(object):
"""Location in 3D space. Depending on usage can be absolute or relative.
This class wraps the TopLoc_Location class from OCCT. It can be used to move Shape
objects in both relative and absolute manner. It is the preferred type to locate objects
in CQ.
"""
wrapped: TopLoc_Location
@overload
def __init__(self) -> None:
"""Empty location with not rotation or translation with respect to the original location."""
...
@overload
def __init__(self, t: Vector) -> None:
"""Location with translation t with respect to the original location."""
...
@overload
def __init__(self, t: Plane) -> None:
"""Location corresponding to the location of the Plane t."""
...
@overload
def __init__(self, t: Plane, v: Vector) -> None:
"""Location corresponding to the angular location of the Plane t with translation v."""
...
@overload
def __init__(self, t: TopLoc_Location) -> None:
"""Location wrapping the low-level TopLoc_Location object t"""
...
@overload
def __init__(self, t: gp_Trsf) -> None:
"""Location wrapping the low-level gp_Trsf object t"""
...
@overload
def __init__(self, t: Vector, ax: Vector, angle: float) -> None:
"""Location with translation t and rotation around ax by angle
with respect to the original location."""
...
def __init__(self, *args):
T = gp_Trsf()
if len(args) == 0:
pass
elif len(args) == 1:
t = args[0]
if isinstance(t, Vector):
T.SetTranslationPart(t.wrapped)
elif isinstance(t, Plane):
cs = gp_Ax3(t.origin.toPnt(), t.zDir.toDir(), t.xDir.toDir())
T.SetTransformation(cs)
T.Invert()
elif isinstance(t, TopLoc_Location):
self.wrapped = t
return
elif isinstance(t, gp_Trsf):
T = t
elif isinstance(t, (tuple, list)):
raise TypeError(
"A tuple or list is not a valid parameter, use a Vector instead."
)
else:
raise TypeError("Unexpected parameters")
elif len(args) == 2:
t, v = args
cs = gp_Ax3(v.toPnt(), t.zDir.toDir(), t.xDir.toDir())
T.SetTransformation(cs)
T.Invert()
else:
t, ax, angle = args
T.SetRotation(gp_Ax1(Vector().toPnt(), ax.toDir()), angle * math.pi / 180.0)
T.SetTranslationPart(t.wrapped)
self.wrapped = TopLoc_Location(T)
@property
def inverse(self) -> "Location":
return Location(self.wrapped.Inverted())
def __mul__(self, other: "Location") -> "Location":
return Location(self.wrapped * other.wrapped)
def toTuple(self) -> Tuple[Tuple[float, float, float], Tuple[float, float, float]]:
"""Convert the location to a translation, rotation tuple."""
T = self.wrapped.Transformation()
trans = T.TranslationPart()
rot = T.GetRotation()
rv_trans = (trans.X(), trans.Y(), trans.Z())
rv_rot = rot.GetEulerAngles(gp_EulerSequence.gp_Extrinsic_XYZ)
return rv_trans, rv_rot
| 30.634748
| 107
| 0.562691
|
7246a173d7efdc4f0e83ccc82dcfe0f2cfae8ebe
| 5,479
|
py
|
Python
|
FoE Bot.py
|
Iapetus-11/Forge-of-Empires-bot
|
7a78474584567468626558c3e046d44e5b677eae
|
[
"MIT"
] | 3
|
2019-05-17T21:37:19.000Z
|
2021-01-27T12:25:52.000Z
|
FoE Bot.py
|
Iapetus-11/Forge-of-Empires-bot
|
7a78474584567468626558c3e046d44e5b677eae
|
[
"MIT"
] | null | null | null |
FoE Bot.py
|
Iapetus-11/Forge-of-Empires-bot
|
7a78474584567468626558c3e046d44e5b677eae
|
[
"MIT"
] | 2
|
2020-06-01T05:37:56.000Z
|
2021-08-31T18:53:59.000Z
|
import pyautogui
from time import sleep
from random import randint
import threading
#opencv-python is required! (pip install opencv-python).
#functions to be run, you can change these!
collectGold = True #collect gold from buildings.
collectSupplies = True #collect supplies from buildings.
restartIdleBuildings = True #restart any idle building.
collectGoods = True #collect goods from buildings other than supplies and gold.
pressButtons = True #automatically aid other people and accept friend requests.
#One might need to change these based on screen resolution
ydiff1 = 25
ydiff2 = 50
pyautogui.FAILSAFE = False
lock = threading.Lock()
#Yes I know I'm a crappy programmer.
open("pause.txt", "w+").write("cont")
def processOutput(output):
#get coordinates to click from output
xcoord = int(output[0])
ycoord = int(output[1])
ycoord += ydiff1
#goto coordinates and click there
lock.acquire()
pyautogui.moveTo(xcoord, ycoord, duration=randint(5,15)/10)
sleep(randint(12,25)/100)
pyautogui.click()
pyautogui.moveRel(0, ydiff2, duration=randint(5,15)/20)
sleep(randint(6,12)/10/7)
pyautogui.click()
print("Bot has collected gold from a building.")
sleep(randint(80,100)/100)
pyautogui.typewrite(['esc'])
lock.release()
def processIdleOutput(output):
#get coordinates to click from output
xcoord = int(output[0])
ycoord = int(output[1])
ycoord += ydiff1
#goto coordinates and click there
lock.acquire()
pyautogui.moveTo(xcoord, ycoord, duration=randint(5,15)/10)
sleep(randint(12,25)/100)
pyautogui.click()
sleep(randint(70,90)/100)
pyautogui.typewrite(['1', '2', '3', '4', '5'])
pyautogui.moveRel(0, ydiff2, duration=randint(5,15)/20)
sleep(randint(6,12)/10/7)
pyautogui.click()
pyautogui.typewrite(['1', '2', '3', '4', '5'])
print("Bot has restarted a production building.")
sleep(randint(80,100)/100)
pyautogui.typewrite(['esc']) #for some reason [] are required around 'esc' to actually press the ESC button
lock.release()
def processButtonOutput(output,suppressESC):
lock.acquire()
#get coordinates to click from output
xcoord, ycoord, xcoord2, ycoord2 = output
#goto coordinates and click there
pyautogui.moveTo(randint(xcoord+1, xcoord+xcoord2-1), randint(ycoord+1, ycoord+ycoord2-1), duration=randint(5,15)/10)
sleep(randint(12,25)/100)
pyautogui.click()
print("Bot has clicked a button.")
if suppressESC == False:
sleep(randint(80,100)/100)
pyautogui.typewrite(['esc'])
lock.release()
def worker1(lock): #gold icons
while True:
output = pyautogui.locateOnScreen('gold1.png', confidence=0.905)
lock.acquire()
print("gold1:", output)
lock.release()
if output == None:
output = pyautogui.locateOnScreen('gold2.png', confidence=0.895)
lock.acquire()
print("gold2:", output)
lock.release()
if not output == None:
processOutput(output)
def worker2(lock): #supplies icons
while True:
output = pyautogui.locateOnScreen('supplies1.png', confidence=0.805)
lock.acquire()
print("supplies1:", output)
lock.release()
if output== None:
output = pyautogui.locateOnScreen('supplies2.png', confidence=0.820)
lock.acquire()
print("supplies2:", output)
lock.release()
if not output == None:
processOutput(output)
def worker3(lock): #idle building icons
while True:
output = pyautogui.locateOnScreen('idle1.png', confidence=0.545)
lock.acquire()
print("idle1:", output)
lock.release()
if not output == None:
processIdleOutput(output)
def worker4(lock): #goods boxes icons
while True:
output = pyautogui.locateOnScreen('goods1.png', confidence=0.885)
lock.acquire()
print("goods1:", output)
lock.release()
if not output == None:
processIdleOutput(output)
def worker5(lock): #ingame buttons
suppressESC = False
while True:
output = pyautogui.locateOnScreen('button1.png', confidence=0.800, grayscale=True)
lock.acquire()
print("button1:", output)
lock.release()
if output == None:
output = pyautogui.locateOnScreen('button2.png', confidence=0.800, grayscale=True)
lock.acquire()
print("button2:", output)
lock.release()
if not output == None:
processButtonOutput(output, suppressESC)
else:
sleep(5)
#multithreading
if collectGold == True:
t1 = threading.Thread(target=worker1, args=(lock,))
t1.start()
if collectSupplies == True:
t2 = threading.Thread(target=worker2, args=(lock,))
t2.start()
if restartIdleBuildings == True:
t3 = threading.Thread(target=worker3, args=(lock,))
t3.start()
if collectGoods == True:
t4 = threading.Thread(target=worker4, args=(lock,))
t4.start()
if pressButtons == True:
t5 = threading.Thread(target=worker5, args=(lock,))
t5.start()
| 31.854651
| 122
| 0.612703
|
0238a2d094125cc04a7e28be778887deecccc8a2
| 3,535
|
py
|
Python
|
src/peter_sslers/web/lib/handler.py
|
jvanasco/pyramid_letsencrypt_admin
|
6db37d30ef8028ff978bf6083cdf978fc88a4782
|
[
"MIT"
] | 1
|
2016-03-31T22:19:19.000Z
|
2016-03-31T22:19:19.000Z
|
src/peter_sslers/web/lib/handler.py
|
jvanasco/pyramid_letsencrypt_admin
|
6db37d30ef8028ff978bf6083cdf978fc88a4782
|
[
"MIT"
] | null | null | null |
src/peter_sslers/web/lib/handler.py
|
jvanasco/pyramid_letsencrypt_admin
|
6db37d30ef8028ff978bf6083cdf978fc88a4782
|
[
"MIT"
] | null | null | null |
# pypi
import pypages
from pyramid.httpexceptions import HTTPFound
# localapp
from ...lib import db
from ...lib.errors import InvalidRequest
# ==============================================================================
# misc config options
items_per_page = 50
def json_pagination(items_count, pager):
"""
return {"pagination": json_pagination(items_count, pager),}
"""
return {
"total_items": items_count,
"page": pager.page_num,
"page_next": pager.next if pager.has_next else None,
}
# ==============================================================================
class Handler(object):
"""core response class"""
#: The active :class:`Pyramid.request.Request`
request = None
#: The default :class:`model.objects.AcmeAccount`
dbAcmeAccount_GlobalDefault = None
def __init__(self, request):
"""
:param request: A :class:`Pyramid.request.Request` instance.
"""
self.request = request
def _paginate(
self, collection_count, items_per_page=items_per_page, url_template=None
):
"""
:param collection_count: the number of items in the collection
:param items_per_page: the number of items per page
:param url_template: the url of a template which pypages should render the paginator with
"""
page_requested = (
1
if "page" not in self.request.matchdict
else int(self.request.matchdict["page"])
)
pager = pypages.Paginator(
collection_count,
per_page=items_per_page,
current=page_requested,
start=None,
range_num=10,
)
pager.template = url_template
if page_requested == 0:
raise HTTPFound(pager.template.format(1))
if page_requested > pager.page_num:
if pager.page_num > 0:
raise HTTPFound(pager.template.format(pager.page_num))
# return pager, offset
return pager, ((page_requested - 1) * items_per_page)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def _ensure_nginx(self):
"""
if nginx is not enabled, raise a HTTPFound to the admin dashboard
"""
if not self.request.registry.settings["app_settings"]["enable_nginx"]:
raise InvalidRequest("nginx is not enabled")
def _ensure_redis(self):
"""
if redis is not enabled, raise a HTTPFound to the admin dashboard
"""
if not self.request.registry.settings["app_settings"]["enable_redis"]:
raise InvalidRequest("redis is not enabled")
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def _load_AcmeAccount_GlobalDefault(self):
"""
Loads the default :class:`model.objects.AcmeAccount` into the view's :attr:`.dbAcmeAccount_GlobalDefault`.
"""
self.dbAcmeAccount_GlobalDefault = db.get.get__AcmeAccount__GlobalDefault(
self.request.api_context, active_only=True
)
return self.dbAcmeAccount_GlobalDefault
def _load_AcmeAccountProviders(self):
"""
Loads the options for :class:`model.objects.AcmeAccountProvider` into the view's :attr:`.dbAcmeAccountProviders`.
"""
self.dbAcmeAccountProviders = db.get.get__AcmeAccountProviders__paginated(
self.request.api_context, is_enabled=True
)
return self.dbAcmeAccountProviders
| 32.136364
| 121
| 0.579915
|
1d0c8fb3044ab780e8c9321f5d87090cd3098cea
| 419
|
py
|
Python
|
Ad-Hoc/1574.py
|
LorranSutter/URI-Online-Judge
|
aef885b9a7caa83484cf172e29eea8ec92fc3627
|
[
"MIT"
] | null | null | null |
Ad-Hoc/1574.py
|
LorranSutter/URI-Online-Judge
|
aef885b9a7caa83484cf172e29eea8ec92fc3627
|
[
"MIT"
] | null | null | null |
Ad-Hoc/1574.py
|
LorranSutter/URI-Online-Judge
|
aef885b9a7caa83484cf172e29eea8ec92fc3627
|
[
"MIT"
] | null | null | null |
#Sinceramente nao sei o que esta errado...
for k in range(int(input())):
p = 0
n = int(input())
inst = [0 for k in range(n)]
for w in range(n):
I = input()
if I == "LEFT":
p-=1
inst[w] = -1
elif I == "RIGHT":
p+=1
inst[w] = 1
else:
p += inst[int(I[-1])-1]
inst[w] = inst[int(I[-1])-1]
print(p)
| 22.052632
| 42
| 0.396181
|
3f9585534d08934c03cf3254b135072ebf8f6488
| 3,402
|
py
|
Python
|
models/build.py
|
yhlleo/VTs-Drloc
|
b69adb699d85c0fd789c64fc4950272c02aa2ae0
|
[
"MIT"
] | 71
|
2021-11-19T11:51:00.000Z
|
2022-03-29T06:17:02.000Z
|
models/build.py
|
yhlleo/VTs-Drloc
|
b69adb699d85c0fd789c64fc4950272c02aa2ae0
|
[
"MIT"
] | 7
|
2021-11-30T02:32:46.000Z
|
2022-02-24T15:31:19.000Z
|
models/build.py
|
yhlleo/VTs-Drloc
|
b69adb699d85c0fd789c64fc4950272c02aa2ae0
|
[
"MIT"
] | 7
|
2021-11-28T05:09:04.000Z
|
2022-02-07T07:35:03.000Z
|
# --------------------------------------------------------
# Swin Transformer
# Copyright (c) 2021 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ze Liu
# --------------------------------------------------------
from .swin import SwinTransformer
from .cvt import CvT
from .t2t import T2t_vit_14
from .resnet import ResNet50
from .vit import ViT
def build_model(config):
model_type = config.MODEL.TYPE
if model_type == 'swin':
model = SwinTransformer(
img_size=config.DATA.IMG_SIZE,
patch_size=config.MODEL.SWIN.PATCH_SIZE,
in_chans=config.MODEL.SWIN.IN_CHANS,
num_classes=config.MODEL.NUM_CLASSES,
embed_dim=config.MODEL.SWIN.EMBED_DIM,
depths=config.MODEL.SWIN.DEPTHS,
num_heads=config.MODEL.SWIN.NUM_HEADS,
window_size=config.MODEL.SWIN.WINDOW_SIZE,
mlp_ratio=config.MODEL.SWIN.MLP_RATIO,
qkv_bias=config.MODEL.SWIN.QKV_BIAS,
qk_scale=config.MODEL.SWIN.QK_SCALE,
drop_rate=config.MODEL.DROP_RATE,
drop_path_rate=config.MODEL.DROP_PATH_RATE,
ape=config.MODEL.SWIN.APE,
rpe=config.MODEL.SWIN.RPE,
patch_norm=config.MODEL.SWIN.PATCH_NORM,
use_checkpoint=config.TRAIN.USE_CHECKPOINT,
use_drloc=config.TRAIN.USE_DRLOC,
sample_size=config.TRAIN.SAMPLE_SIZE,
use_multiscale=config.TRAIN.USE_MULTISCALE,
drloc_mode=config.TRAIN.DRLOC_MODE,
use_abs=config.TRAIN.USE_ABS)
elif model_type == "cvt":
model = CvT(
num_classes=config.MODEL.NUM_CLASSES,
use_drloc=config.TRAIN.USE_DRLOC,
sample_size=config.TRAIN.SAMPLE_SIZE,
drloc_mode=config.TRAIN.DRLOC_MODE,
use_abs=config.TRAIN.USE_ABS)
elif model_type == "twins":
model = TwinsSVT(
num_classes=config.MODEL.NUM_CLASSES,
use_drloc=config.TRAIN.USE_DRLOC,
sample_size=config.TRAIN.SAMPLE_SIZE,
drloc_mode=config.TRAIN.DRLOC_MODE,
use_abs=config.TRAIN.USE_ABS)
elif model_type == "t2t":
model = T2t_vit_14(
img_size=config.DATA.IMG_SIZE,
num_classes=config.MODEL.NUM_CLASSES,
use_drloc=config.TRAIN.USE_DRLOC,
sample_size=config.TRAIN.SAMPLE_SIZE,
drloc_mode=config.TRAIN.DRLOC_MODE,
use_abs=config.TRAIN.USE_ABS
)
elif model_type == 'resnet50':
model = ResNet50(
num_classes=config.MODEL.NUM_CLASSES,
use_drloc=config.TRAIN.USE_DRLOC,
sample_size=config.TRAIN.SAMPLE_SIZE,
drloc_mode=config.TRAIN.DRLOC_MODE,
use_abs=config.TRAIN.USE_ABS
)
elif model_type == "vit":
model = ViT(
image_size=224,
patch_size=16,
num_classes=config.MODEL.NUM_CLASSES,
dim=768,
depth=12,
heads=12,
mlp_dim=3072,
dropout=0.1,
emb_dropout=0,
use_drloc=config.TRAIN.USE_DRLOC,
sample_size=config.TRAIN.SAMPLE_SIZE,
drloc_mode=config.TRAIN.DRLOC_MODE,
use_abs=config.TRAIN.USE_ABS
)
else:
raise NotImplementedError(f"Unkown model: {model_type}")
return model
| 36.978261
| 64
| 0.601411
|
bc73570a26943f2d5d4b2aff1e99f9cc3776ce28
| 2,494
|
py
|
Python
|
Round745/B_Diameter_of_Graph.py
|
tqa236/codeforces
|
81ad7bdb7786455f83d48d59a8884f62ded66caf
|
[
"MIT"
] | null | null | null |
Round745/B_Diameter_of_Graph.py
|
tqa236/codeforces
|
81ad7bdb7786455f83d48d59a8884f62ded66caf
|
[
"MIT"
] | null | null | null |
Round745/B_Diameter_of_Graph.py
|
tqa236/codeforces
|
81ad7bdb7786455f83d48d59a8884f62ded66caf
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os
import sys
from io import BytesIO, IOBase
import math
from collections import Counter
def func(n, m, k):
if m > n * (n - 1) // 2:
return "NO"
if k <= 1:
return "NO"
if k == 2:
if n == 1 and m == 0:
return "YES"
return "NO"
if k == 3:
if m == n * (n - 1) // 2:
return "YES"
return "NO"
if m < n - 1:
return "NO"
return "YES"
def main():
num_test = int(parse_input())
result = []
for _ in range(num_test):
n, m, k = [int(i) for i in parse_input().split()]
result.append(func(n, m, k))
print("\n".join(map(str, result)))
# region fastio
# BUFSIZE = 8192
# class FastIO(IOBase):
# newlines = 0
# def __init__(self, file):
# self._fd = file.fileno()
# self.buffer = BytesIO()
# self.writable = "x" in file.mode or "r" not in file.mode
# self.write = self.buffer.write if self.writable else None
# def read(self):
# while True:
# b = os.read(self._fd, max(os.fstat(self._fd).st_size, BUFSIZE))
# if not b:
# break
# ptr = self.buffer.tell()
# self.buffer.seek(0, 2), self.buffer.write(b), self.buffer.seek(ptr)
# self.newlines = 0
# return self.buffer.read()
# def readline(self):
# while self.newlines == 0:
# b = os.read(self._fd, max(os.fstat(self._fd).st_size, BUFSIZE))
# self.newlines = b.count(b"\n") + (not b)
# ptr = self.buffer.tell()
# self.buffer.seek(0, 2), self.buffer.write(b), self.buffer.seek(ptr)
# self.newlines -= 1
# return self.buffer.readline()
# def flush(self):
# if self.writable:
# os.write(self._fd, self.buffer.getvalue())
# self.buffer.truncate(0), self.buffer.seek(0)
# class IOWrapper(IOBase):
# def __init__(self, file):
# self.buffer = FastIO(file)
# self.flush = self.buffer.flush
# self.writable = self.buffer.writable
# self.write = lambda s: self.buffer.write(s.encode("ascii"))
# self.read = lambda: self.buffer.read().decode("ascii")
# self.readline = lambda: self.buffer.readline().decode("ascii")
# sys.stdin, sys.stdout = IOWrapper(sys.stdin), IOWrapper(sys.stdout)
parse_input = lambda: sys.stdin.readline().rstrip("\r\n")
# endregion
if __name__ == "__main__":
main()
| 27.108696
| 81
| 0.54571
|
11b0557d9eb8bbbd3d933caeeea88b071fcab8a8
| 11,933
|
py
|
Python
|
src/sagemaker/deserializers.py
|
anirudh2290/sagemaker-python-sdk
|
5b15f3006efe90fbba43da7841ff5f0ad790a78e
|
[
"Apache-2.0"
] | null | null | null |
src/sagemaker/deserializers.py
|
anirudh2290/sagemaker-python-sdk
|
5b15f3006efe90fbba43da7841ff5f0ad790a78e
|
[
"Apache-2.0"
] | 1
|
2019-04-23T19:32:17.000Z
|
2019-04-23T19:32:17.000Z
|
src/sagemaker/deserializers.py
|
anirudh2290/sagemaker-python-sdk
|
5b15f3006efe90fbba43da7841ff5f0ad790a78e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Implements methods for deserializing data returned from an inference endpoint."""
from __future__ import absolute_import
import csv
import abc
import codecs
import io
import json
import numpy as np
from six import with_metaclass
from sagemaker.utils import DeferredError
try:
import pandas
except ImportError as e:
pandas = DeferredError(e)
class BaseDeserializer(abc.ABC):
"""Abstract base class for creation of new deserializers.
Provides a skeleton for customization requiring the overriding of the method
deserialize and the class attribute ACCEPT.
"""
@abc.abstractmethod
def deserialize(self, stream, content_type):
"""Deserialize data received from an inference endpoint.
Args:
stream (botocore.response.StreamingBody): Data to be deserialized.
content_type (str): The MIME type of the data.
Returns:
object: The data deserialized into an object.
"""
@property
@abc.abstractmethod
def ACCEPT(self):
"""The content types that are expected from the inference endpoint."""
class SimpleBaseDeserializer(with_metaclass(abc.ABCMeta, BaseDeserializer)):
"""Abstract base class for creation of new deserializers.
This class extends the API of :class:~`sagemaker.deserializers.BaseDeserializer` with more
user-friendly options for setting the ACCEPT content type header, in situations where it can be
provided at init and freely updated.
"""
def __init__(self, accept="*/*"):
"""Initialize a ``SimpleBaseDeserializer`` instance.
Args:
accept (union[str, tuple[str]]): The MIME type (or tuple of allowable MIME types) that
is expected from the inference endpoint (default: "*/*").
"""
super(SimpleBaseDeserializer, self).__init__()
self.accept = accept
@property
def ACCEPT(self):
"""The tuple of possible content types that are expected from the inference endpoint."""
if isinstance(self.accept, str):
return (self.accept,)
return self.accept
class StringDeserializer(SimpleBaseDeserializer):
"""Deserialize data from an inference endpoint into a decoded string."""
def __init__(self, encoding="UTF-8", accept="application/json"):
"""Initialize a ``StringDeserializer`` instance.
Args:
encoding (str): The string encoding to use (default: UTF-8).
accept (union[str, tuple[str]]): The MIME type (or tuple of allowable MIME types) that
is expected from the inference endpoint (default: "application/json").
"""
super(StringDeserializer, self).__init__(accept=accept)
self.encoding = encoding
def deserialize(self, stream, content_type):
"""Deserialize data from an inference endpoint into a decoded string.
Args:
stream (botocore.response.StreamingBody): Data to be deserialized.
content_type (str): The MIME type of the data.
Returns:
str: The data deserialized into a decoded string.
"""
try:
return stream.read().decode(self.encoding)
finally:
stream.close()
class BytesDeserializer(SimpleBaseDeserializer):
"""Deserialize a stream of bytes into a bytes object."""
def deserialize(self, stream, content_type):
"""Read a stream of bytes returned from an inference endpoint.
Args:
stream (botocore.response.StreamingBody): A stream of bytes.
content_type (str): The MIME type of the data.
Returns:
bytes: The bytes object read from the stream.
"""
try:
return stream.read()
finally:
stream.close()
class CSVDeserializer(SimpleBaseDeserializer):
"""Deserialize a stream of bytes into a list of lists.
Consider using :class:~`sagemaker.deserializers.NumpyDeserializer` or
:class:~`sagemaker.deserializers.PandasDeserializer` instead, if you'd like to convert text/csv
responses directly into other data types.
"""
def __init__(self, encoding="utf-8", accept="text/csv"):
"""Initialize a ``CSVDeserializer`` instance.
Args:
encoding (str): The string encoding to use (default: "utf-8").
accept (union[str, tuple[str]]): The MIME type (or tuple of allowable MIME types) that
is expected from the inference endpoint (default: "text/csv").
"""
super(CSVDeserializer, self).__init__(accept=accept)
self.encoding = encoding
def deserialize(self, stream, content_type):
"""Deserialize data from an inference endpoint into a list of lists.
Args:
stream (botocore.response.StreamingBody): Data to be deserialized.
content_type (str): The MIME type of the data.
Returns:
list: The data deserialized into a list of lists representing the
contents of a CSV file.
"""
try:
decoded_string = stream.read().decode(self.encoding)
return list(csv.reader(decoded_string.splitlines()))
finally:
stream.close()
class StreamDeserializer(SimpleBaseDeserializer):
"""Directly return the data and content-type received from an inference endpoint.
It is the user's responsibility to close the data stream once they're done
reading it.
"""
def deserialize(self, stream, content_type):
"""Returns a stream of the response body and the MIME type of the data.
Args:
stream (botocore.response.StreamingBody): A stream of bytes.
content_type (str): The MIME type of the data.
Returns:
tuple: A two-tuple containing the stream and content-type.
"""
return stream, content_type
class NumpyDeserializer(SimpleBaseDeserializer):
"""Deserialize a stream of data in .npy or UTF-8 CSV/JSON format to a numpy array."""
def __init__(self, dtype=None, accept="application/x-npy", allow_pickle=True):
"""Initialize a ``NumpyDeserializer`` instance.
Args:
dtype (str): The dtype of the data (default: None).
accept (union[str, tuple[str]]): The MIME type (or tuple of allowable MIME types) that
is expected from the inference endpoint (default: "application/x-npy").
allow_pickle (bool): Allow loading pickled object arrays (default: True).
"""
super(NumpyDeserializer, self).__init__(accept=accept)
self.dtype = dtype
self.allow_pickle = allow_pickle
def deserialize(self, stream, content_type):
"""Deserialize data from an inference endpoint into a NumPy array.
Args:
stream (botocore.response.StreamingBody): Data to be deserialized.
content_type (str): The MIME type of the data.
Returns:
numpy.ndarray: The data deserialized into a NumPy array.
"""
try:
if content_type == "text/csv":
return np.genfromtxt(
codecs.getreader("utf-8")(stream), delimiter=",", dtype=self.dtype
)
if content_type == "application/json":
return np.array(json.load(codecs.getreader("utf-8")(stream)), dtype=self.dtype)
if content_type == "application/x-npy":
return np.load(io.BytesIO(stream.read()), allow_pickle=self.allow_pickle)
finally:
stream.close()
raise ValueError("%s cannot read content type %s." % (__class__.__name__, content_type))
class JSONDeserializer(SimpleBaseDeserializer):
"""Deserialize JSON data from an inference endpoint into a Python object."""
def __init__(self, accept="application/json"):
"""Initialize a ``JSONDeserializer`` instance.
Args:
accept (union[str, tuple[str]]): The MIME type (or tuple of allowable MIME types) that
is expected from the inference endpoint (default: "application/json").
"""
super(JSONDeserializer, self).__init__(accept=accept)
def deserialize(self, stream, content_type):
"""Deserialize JSON data from an inference endpoint into a Python object.
Args:
stream (botocore.response.StreamingBody): Data to be deserialized.
content_type (str): The MIME type of the data.
Returns:
object: The JSON-formatted data deserialized into a Python object.
"""
try:
return json.load(codecs.getreader("utf-8")(stream))
finally:
stream.close()
class PandasDeserializer(SimpleBaseDeserializer):
"""Deserialize CSV or JSON data from an inference endpoint into a pandas dataframe."""
def __init__(self, accept=("text/csv", "application/json")):
"""Initialize a ``PandasDeserializer`` instance.
Args:
accept (union[str, tuple[str]]): The MIME type (or tuple of allowable MIME types) that
is expected from the inference endpoint (default: ("text/csv","application/json")).
"""
super(PandasDeserializer, self).__init__(accept=accept)
def deserialize(self, stream, content_type):
"""Deserialize CSV or JSON data from an inference endpoint into a pandas
dataframe.
If the data is JSON, the data should be formatted in the 'columns' orient.
See https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.read_json.html
Args:
stream (botocore.response.StreamingBody): Data to be deserialized.
content_type (str): The MIME type of the data.
Returns:
pandas.DataFrame: The data deserialized into a pandas DataFrame.
"""
if content_type == "text/csv":
return pandas.read_csv(stream)
if content_type == "application/json":
return pandas.read_json(stream)
raise ValueError("%s cannot read content type %s." % (__class__.__name__, content_type))
class JSONLinesDeserializer(SimpleBaseDeserializer):
"""Deserialize JSON lines data from an inference endpoint."""
def __init__(self, accept="application/jsonlines"):
"""Initialize a ``JSONLinesDeserializer`` instance.
Args:
accept (union[str, tuple[str]]): The MIME type (or tuple of allowable MIME types) that
is expected from the inference endpoint (default: ("text/csv","application/json")).
"""
super(JSONLinesDeserializer, self).__init__(accept=accept)
def deserialize(self, stream, content_type):
"""Deserialize JSON lines data from an inference endpoint.
See https://docs.python.org/3/library/json.html#py-to-json-table to
understand how JSON values are converted to Python objects.
Args:
stream (botocore.response.StreamingBody): Data to be deserialized.
content_type (str): The MIME type of the data.
Returns:
list: A list of JSON serializable objects.
"""
try:
body = stream.read().decode("utf-8")
lines = body.rstrip().split("\n")
return [json.loads(line) for line in lines]
finally:
stream.close()
| 36.604294
| 99
| 0.648957
|
c831a8d0b74e99855c85b735037b0fe584856f1b
| 37,917
|
py
|
Python
|
azure/mgmt/network/v2017_06_01/operations/network_interfaces_operations.py
|
EnjoyLifeFund/Debian_py36_packages
|
1985d4c73fabd5f08f54b922e73a9306e09c77a5
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 1
|
2022-01-25T22:52:58.000Z
|
2022-01-25T22:52:58.000Z
|
azure/mgmt/network/v2017_06_01/operations/network_interfaces_operations.py
|
EnjoyLifeFund/Debian_py36_packages
|
1985d4c73fabd5f08f54b922e73a9306e09c77a5
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
azure/mgmt/network/v2017_06_01/operations/network_interfaces_operations.py
|
EnjoyLifeFund/Debian_py36_packages
|
1985d4c73fabd5f08f54b922e73a9306e09c77a5
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_operation import AzureOperationPoller
from .. import models
class NetworkInterfacesOperations(object):
"""NetworkInterfacesOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def delete(
self, resource_group_name, network_interface_name, custom_headers=None, raw=False, **operation_config):
"""Deletes the specified network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns None or
ClientRawResponse if raw=true
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-06-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.delete(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get(
self, resource_group_name, network_interface_name, expand=None, custom_headers=None, raw=False, **operation_config):
"""Gets information about the specified network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: NetworkInterface or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.network.v2017_06_01.models.NetworkInterface or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-06-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('NetworkInterface', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, network_interface_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param parameters: Parameters supplied to the create or update network
interface operation.
:type parameters:
~azure.mgmt.network.v2017_06_01.models.NetworkInterface
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns
NetworkInterface or ClientRawResponse if raw=true
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2017_06_01.models.NetworkInterface]
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-06-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'NetworkInterface')
# Construct and send request
def long_running_send():
request = self._client.put(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('NetworkInterface', response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkInterface', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def list_all(
self, custom_headers=None, raw=False, **operation_config):
"""Gets all network interfaces in a subscription.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of NetworkInterface
:rtype:
~azure.mgmt.network.v2017_06_01.models.NetworkInterfacePaged[~azure.mgmt.network.v2017_06_01.models.NetworkInterface]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-06-01"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkInterfaces'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.NetworkInterfacePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.NetworkInterfacePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Gets all network interfaces in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of NetworkInterface
:rtype:
~azure.mgmt.network.v2017_06_01.models.NetworkInterfacePaged[~azure.mgmt.network.v2017_06_01.models.NetworkInterface]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-06-01"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.NetworkInterfacePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.NetworkInterfacePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def get_effective_route_table(
self, resource_group_name, network_interface_name, custom_headers=None, raw=False, **operation_config):
"""Gets all route tables applied to a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns
EffectiveRouteListResult or ClientRawResponse if raw=true
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2017_06_01.models.EffectiveRouteListResult]
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-06-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveRouteTable'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('EffectiveRouteListResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def list_effective_network_security_groups(
self, resource_group_name, network_interface_name, custom_headers=None, raw=False, **operation_config):
"""Gets all network security groups applied to a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns
EffectiveNetworkSecurityGroupListResult or ClientRawResponse if
raw=true
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2017_06_01.models.EffectiveNetworkSecurityGroupListResult]
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-06-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveNetworkSecurityGroups'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('EffectiveNetworkSecurityGroupListResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def list_virtual_machine_scale_set_vm_network_interfaces(
self, resource_group_name, virtual_machine_scale_set_name, virtualmachine_index, custom_headers=None, raw=False, **operation_config):
"""Gets information about all network interfaces in a virtual machine in a
virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine
scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of NetworkInterface
:rtype:
~azure.mgmt.network.v2017_06_01.models.NetworkInterfacePaged[~azure.mgmt.network.v2017_06_01.models.NetworkInterface]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-03-30"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.NetworkInterfacePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.NetworkInterfacePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list_virtual_machine_scale_set_network_interfaces(
self, resource_group_name, virtual_machine_scale_set_name, custom_headers=None, raw=False, **operation_config):
"""Gets all network interfaces in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine
scale set.
:type virtual_machine_scale_set_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of NetworkInterface
:rtype:
~azure.mgmt.network.v2017_06_01.models.NetworkInterfacePaged[~azure.mgmt.network.v2017_06_01.models.NetworkInterface]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-03-30"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/networkInterfaces'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.NetworkInterfacePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.NetworkInterfacePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def get_virtual_machine_scale_set_network_interface(
self, resource_group_name, virtual_machine_scale_set_name, virtualmachine_index, network_interface_name, expand=None, custom_headers=None, raw=False, **operation_config):
"""Get the specified network interface in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine
scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: NetworkInterface or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.network.v2017_06_01.models.NetworkInterface or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-03-30"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('NetworkInterface', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
| 46.753391
| 242
| 0.667326
|
a240b024a352a4aa72537dbee8b1bc60599a36c6
| 466
|
py
|
Python
|
env/lib/python3.8/site-packages/plotly/validators/histogram/error_y/_visible.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 76
|
2020-07-06T14:44:05.000Z
|
2022-02-14T15:30:21.000Z
|
env/lib/python3.8/site-packages/plotly/validators/histogram/error_y/_visible.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 11
|
2020-08-09T02:30:14.000Z
|
2022-03-12T00:50:14.000Z
|
env/lib/python3.8/site-packages/plotly/validators/histogram/error_y/_visible.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 11
|
2020-07-12T16:18:07.000Z
|
2022-02-05T16:48:35.000Z
|
import _plotly_utils.basevalidators
class VisibleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self, plotly_name="visible", parent_name="histogram.error_y", **kwargs
):
super(VisibleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "info"),
**kwargs
)
| 31.066667
| 78
| 0.639485
|
69cc323696969cf9fcdd8535a0c0a63de57a7981
| 125
|
py
|
Python
|
app/engineer_django/main/gen.py
|
ministre/engineer
|
c23361527307f1a031a8d1ed658344b0ae0e8b89
|
[
"MIT"
] | null | null | null |
app/engineer_django/main/gen.py
|
ministre/engineer
|
c23361527307f1a031a8d1ed658344b0ae0e8b89
|
[
"MIT"
] | null | null | null |
app/engineer_django/main/gen.py
|
ministre/engineer
|
c23361527307f1a031a8d1ed658344b0ae0e8b89
|
[
"MIT"
] | null | null | null |
# there should be a proprietary function to generate password
def dynamic_pass(manufacturer: str, sn: str):
return 'foo'
| 31.25
| 61
| 0.76
|
a764d7f5fc62253229cf17b7bdaa4e391f2e9af3
| 92,625
|
py
|
Python
|
sdk/python/pulumi_azure_nextgen/servicefabric/v20200301/outputs.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_nextgen/servicefabric/v20200301/outputs.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_nextgen/servicefabric/v20200301/outputs.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'ApplicationDeltaHealthPolicyResponse',
'ApplicationHealthPolicyResponse',
'ApplicationMetricDescriptionResponse',
'ApplicationTypeVersionsCleanupPolicyResponse',
'ApplicationUpgradePolicyResponse',
'ApplicationUserAssignedIdentityResponse',
'ArmApplicationHealthPolicyResponse',
'ArmRollingUpgradeMonitoringPolicyResponse',
'ArmServiceTypeHealthPolicyResponse',
'AzureActiveDirectoryResponse',
'CertificateDescriptionResponse',
'ClientCertificateCommonNameResponse',
'ClientCertificateThumbprintResponse',
'ClusterHealthPolicyResponse',
'ClusterUpgradeDeltaHealthPolicyResponse',
'ClusterUpgradePolicyResponse',
'ClusterVersionDetailsResponse',
'DiagnosticsStorageAccountConfigResponse',
'EndpointRangeDescriptionResponse',
'ManagedIdentityResponse',
'NamedPartitionSchemeDescriptionResponse',
'NodeTypeDescriptionResponse',
'ServerCertificateCommonNameResponse',
'ServerCertificateCommonNamesResponse',
'ServiceCorrelationDescriptionResponse',
'ServiceLoadMetricDescriptionResponse',
'ServicePlacementPolicyDescriptionResponse',
'ServiceTypeDeltaHealthPolicyResponse',
'ServiceTypeHealthPolicyResponse',
'SettingsParameterDescriptionResponse',
'SettingsSectionDescriptionResponse',
'SingletonPartitionSchemeDescriptionResponse',
'UniformInt64RangePartitionSchemeDescriptionResponse',
'UserAssignedIdentityResponse',
]
@pulumi.output_type
class ApplicationDeltaHealthPolicyResponse(dict):
"""
Defines a delta health policy used to evaluate the health of an application or one of its child entities when upgrading the cluster.
"""
def __init__(__self__, *,
default_service_type_delta_health_policy: Optional['outputs.ServiceTypeDeltaHealthPolicyResponse'] = None,
service_type_delta_health_policies: Optional[Mapping[str, 'outputs.ServiceTypeDeltaHealthPolicyResponse']] = None):
"""
Defines a delta health policy used to evaluate the health of an application or one of its child entities when upgrading the cluster.
:param 'ServiceTypeDeltaHealthPolicyResponseArgs' default_service_type_delta_health_policy: The delta health policy used by default to evaluate the health of a service type when upgrading the cluster.
:param Mapping[str, 'ServiceTypeDeltaHealthPolicyResponseArgs'] service_type_delta_health_policies: The map with service type delta health policy per service type name. The map is empty by default.
"""
if default_service_type_delta_health_policy is not None:
pulumi.set(__self__, "default_service_type_delta_health_policy", default_service_type_delta_health_policy)
if service_type_delta_health_policies is not None:
pulumi.set(__self__, "service_type_delta_health_policies", service_type_delta_health_policies)
@property
@pulumi.getter(name="defaultServiceTypeDeltaHealthPolicy")
def default_service_type_delta_health_policy(self) -> Optional['outputs.ServiceTypeDeltaHealthPolicyResponse']:
"""
The delta health policy used by default to evaluate the health of a service type when upgrading the cluster.
"""
return pulumi.get(self, "default_service_type_delta_health_policy")
@property
@pulumi.getter(name="serviceTypeDeltaHealthPolicies")
def service_type_delta_health_policies(self) -> Optional[Mapping[str, 'outputs.ServiceTypeDeltaHealthPolicyResponse']]:
"""
The map with service type delta health policy per service type name. The map is empty by default.
"""
return pulumi.get(self, "service_type_delta_health_policies")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ApplicationHealthPolicyResponse(dict):
"""
Defines a health policy used to evaluate the health of an application or one of its children entities.
"""
def __init__(__self__, *,
default_service_type_health_policy: Optional['outputs.ServiceTypeHealthPolicyResponse'] = None,
service_type_health_policies: Optional[Mapping[str, 'outputs.ServiceTypeHealthPolicyResponse']] = None):
"""
Defines a health policy used to evaluate the health of an application or one of its children entities.
:param 'ServiceTypeHealthPolicyResponseArgs' default_service_type_health_policy: The health policy used by default to evaluate the health of a service type.
:param Mapping[str, 'ServiceTypeHealthPolicyResponseArgs'] service_type_health_policies: The map with service type health policy per service type name. The map is empty by default.
"""
if default_service_type_health_policy is not None:
pulumi.set(__self__, "default_service_type_health_policy", default_service_type_health_policy)
if service_type_health_policies is not None:
pulumi.set(__self__, "service_type_health_policies", service_type_health_policies)
@property
@pulumi.getter(name="defaultServiceTypeHealthPolicy")
def default_service_type_health_policy(self) -> Optional['outputs.ServiceTypeHealthPolicyResponse']:
"""
The health policy used by default to evaluate the health of a service type.
"""
return pulumi.get(self, "default_service_type_health_policy")
@property
@pulumi.getter(name="serviceTypeHealthPolicies")
def service_type_health_policies(self) -> Optional[Mapping[str, 'outputs.ServiceTypeHealthPolicyResponse']]:
"""
The map with service type health policy per service type name. The map is empty by default.
"""
return pulumi.get(self, "service_type_health_policies")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ApplicationMetricDescriptionResponse(dict):
"""
Describes capacity information for a custom resource balancing metric. This can be used to limit the total consumption of this metric by the services of this application.
"""
def __init__(__self__, *,
maximum_capacity: Optional[int] = None,
name: Optional[str] = None,
reservation_capacity: Optional[int] = None,
total_application_capacity: Optional[int] = None):
"""
Describes capacity information for a custom resource balancing metric. This can be used to limit the total consumption of this metric by the services of this application.
:param int maximum_capacity: The maximum node capacity for Service Fabric application.
This is the maximum Load for an instance of this application on a single node. Even if the capacity of node is greater than this value, Service Fabric will limit the total load of services within the application on each node to this value.
If set to zero, capacity for this metric is unlimited on each node.
When creating a new application with application capacity defined, the product of MaximumNodes and this value must always be smaller than or equal to TotalApplicationCapacity.
When updating existing application with application capacity, the product of MaximumNodes and this value must always be smaller than or equal to TotalApplicationCapacity.
:param str name: The name of the metric.
:param int reservation_capacity: The node reservation capacity for Service Fabric application.
This is the amount of load which is reserved on nodes which have instances of this application.
If MinimumNodes is specified, then the product of these values will be the capacity reserved in the cluster for the application.
If set to zero, no capacity is reserved for this metric.
When setting application capacity or when updating application capacity; this value must be smaller than or equal to MaximumCapacity for each metric.
:param int total_application_capacity: The total metric capacity for Service Fabric application.
This is the total metric capacity for this application in the cluster. Service Fabric will try to limit the sum of loads of services within the application to this value.
When creating a new application with application capacity defined, the product of MaximumNodes and MaximumCapacity must always be smaller than or equal to this value.
"""
if maximum_capacity is not None:
pulumi.set(__self__, "maximum_capacity", maximum_capacity)
if name is not None:
pulumi.set(__self__, "name", name)
if reservation_capacity is not None:
pulumi.set(__self__, "reservation_capacity", reservation_capacity)
if total_application_capacity is not None:
pulumi.set(__self__, "total_application_capacity", total_application_capacity)
@property
@pulumi.getter(name="maximumCapacity")
def maximum_capacity(self) -> Optional[int]:
"""
The maximum node capacity for Service Fabric application.
This is the maximum Load for an instance of this application on a single node. Even if the capacity of node is greater than this value, Service Fabric will limit the total load of services within the application on each node to this value.
If set to zero, capacity for this metric is unlimited on each node.
When creating a new application with application capacity defined, the product of MaximumNodes and this value must always be smaller than or equal to TotalApplicationCapacity.
When updating existing application with application capacity, the product of MaximumNodes and this value must always be smaller than or equal to TotalApplicationCapacity.
"""
return pulumi.get(self, "maximum_capacity")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The name of the metric.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="reservationCapacity")
def reservation_capacity(self) -> Optional[int]:
"""
The node reservation capacity for Service Fabric application.
This is the amount of load which is reserved on nodes which have instances of this application.
If MinimumNodes is specified, then the product of these values will be the capacity reserved in the cluster for the application.
If set to zero, no capacity is reserved for this metric.
When setting application capacity or when updating application capacity; this value must be smaller than or equal to MaximumCapacity for each metric.
"""
return pulumi.get(self, "reservation_capacity")
@property
@pulumi.getter(name="totalApplicationCapacity")
def total_application_capacity(self) -> Optional[int]:
"""
The total metric capacity for Service Fabric application.
This is the total metric capacity for this application in the cluster. Service Fabric will try to limit the sum of loads of services within the application to this value.
When creating a new application with application capacity defined, the product of MaximumNodes and MaximumCapacity must always be smaller than or equal to this value.
"""
return pulumi.get(self, "total_application_capacity")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ApplicationTypeVersionsCleanupPolicyResponse(dict):
def __init__(__self__, *,
max_unused_versions_to_keep: int):
"""
:param int max_unused_versions_to_keep: Number of unused versions per application type to keep.
"""
pulumi.set(__self__, "max_unused_versions_to_keep", max_unused_versions_to_keep)
@property
@pulumi.getter(name="maxUnusedVersionsToKeep")
def max_unused_versions_to_keep(self) -> int:
"""
Number of unused versions per application type to keep.
"""
return pulumi.get(self, "max_unused_versions_to_keep")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ApplicationUpgradePolicyResponse(dict):
"""
Describes the policy for a monitored application upgrade.
"""
def __init__(__self__, *,
application_health_policy: Optional['outputs.ArmApplicationHealthPolicyResponse'] = None,
force_restart: Optional[bool] = None,
rolling_upgrade_monitoring_policy: Optional['outputs.ArmRollingUpgradeMonitoringPolicyResponse'] = None,
upgrade_mode: Optional[str] = None,
upgrade_replica_set_check_timeout: Optional[str] = None):
"""
Describes the policy for a monitored application upgrade.
:param 'ArmApplicationHealthPolicyResponseArgs' application_health_policy: Defines a health policy used to evaluate the health of an application or one of its children entities.
:param bool force_restart: If true, then processes are forcefully restarted during upgrade even when the code version has not changed (the upgrade only changes configuration or data).
:param 'ArmRollingUpgradeMonitoringPolicyResponseArgs' rolling_upgrade_monitoring_policy: The policy used for monitoring the application upgrade
:param str upgrade_mode: The mode used to monitor health during a rolling upgrade. The values are UnmonitoredAuto, UnmonitoredManual, and Monitored.
:param str upgrade_replica_set_check_timeout: The maximum amount of time to block processing of an upgrade domain and prevent loss of availability when there are unexpected issues. When this timeout expires, processing of the upgrade domain will proceed regardless of availability loss issues. The timeout is reset at the start of each upgrade domain. Valid values are between 0 and 42949672925 inclusive. (unsigned 32-bit integer).
"""
if application_health_policy is not None:
pulumi.set(__self__, "application_health_policy", application_health_policy)
if force_restart is not None:
pulumi.set(__self__, "force_restart", force_restart)
if rolling_upgrade_monitoring_policy is not None:
pulumi.set(__self__, "rolling_upgrade_monitoring_policy", rolling_upgrade_monitoring_policy)
if upgrade_mode is not None:
pulumi.set(__self__, "upgrade_mode", upgrade_mode)
if upgrade_replica_set_check_timeout is not None:
pulumi.set(__self__, "upgrade_replica_set_check_timeout", upgrade_replica_set_check_timeout)
@property
@pulumi.getter(name="applicationHealthPolicy")
def application_health_policy(self) -> Optional['outputs.ArmApplicationHealthPolicyResponse']:
"""
Defines a health policy used to evaluate the health of an application or one of its children entities.
"""
return pulumi.get(self, "application_health_policy")
@property
@pulumi.getter(name="forceRestart")
def force_restart(self) -> Optional[bool]:
"""
If true, then processes are forcefully restarted during upgrade even when the code version has not changed (the upgrade only changes configuration or data).
"""
return pulumi.get(self, "force_restart")
@property
@pulumi.getter(name="rollingUpgradeMonitoringPolicy")
def rolling_upgrade_monitoring_policy(self) -> Optional['outputs.ArmRollingUpgradeMonitoringPolicyResponse']:
"""
The policy used for monitoring the application upgrade
"""
return pulumi.get(self, "rolling_upgrade_monitoring_policy")
@property
@pulumi.getter(name="upgradeMode")
def upgrade_mode(self) -> Optional[str]:
"""
The mode used to monitor health during a rolling upgrade. The values are UnmonitoredAuto, UnmonitoredManual, and Monitored.
"""
return pulumi.get(self, "upgrade_mode")
@property
@pulumi.getter(name="upgradeReplicaSetCheckTimeout")
def upgrade_replica_set_check_timeout(self) -> Optional[str]:
"""
The maximum amount of time to block processing of an upgrade domain and prevent loss of availability when there are unexpected issues. When this timeout expires, processing of the upgrade domain will proceed regardless of availability loss issues. The timeout is reset at the start of each upgrade domain. Valid values are between 0 and 42949672925 inclusive. (unsigned 32-bit integer).
"""
return pulumi.get(self, "upgrade_replica_set_check_timeout")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ApplicationUserAssignedIdentityResponse(dict):
def __init__(__self__, *,
name: str,
principal_id: str):
"""
:param str name: The friendly name of user assigned identity.
:param str principal_id: The principal id of user assigned identity.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "principal_id", principal_id)
@property
@pulumi.getter
def name(self) -> str:
"""
The friendly name of user assigned identity.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> str:
"""
The principal id of user assigned identity.
"""
return pulumi.get(self, "principal_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ArmApplicationHealthPolicyResponse(dict):
"""
Defines a health policy used to evaluate the health of an application or one of its children entities.
"""
def __init__(__self__, *,
consider_warning_as_error: Optional[bool] = None,
default_service_type_health_policy: Optional['outputs.ArmServiceTypeHealthPolicyResponse'] = None,
max_percent_unhealthy_deployed_applications: Optional[int] = None,
service_type_health_policy_map: Optional[Mapping[str, 'outputs.ArmServiceTypeHealthPolicyResponse']] = None):
"""
Defines a health policy used to evaluate the health of an application or one of its children entities.
:param bool consider_warning_as_error: Indicates whether warnings are treated with the same severity as errors.
:param 'ArmServiceTypeHealthPolicyResponseArgs' default_service_type_health_policy: The health policy used by default to evaluate the health of a service type.
:param int max_percent_unhealthy_deployed_applications: The maximum allowed percentage of unhealthy deployed applications. Allowed values are Byte values from zero to 100.
The percentage represents the maximum tolerated percentage of deployed applications that can be unhealthy before the application is considered in error.
This is calculated by dividing the number of unhealthy deployed applications over the number of nodes where the application is currently deployed on in the cluster.
The computation rounds up to tolerate one failure on small numbers of nodes. Default percentage is zero.
:param Mapping[str, 'ArmServiceTypeHealthPolicyResponseArgs'] service_type_health_policy_map: The map with service type health policy per service type name. The map is empty by default.
"""
if consider_warning_as_error is not None:
pulumi.set(__self__, "consider_warning_as_error", consider_warning_as_error)
if default_service_type_health_policy is not None:
pulumi.set(__self__, "default_service_type_health_policy", default_service_type_health_policy)
if max_percent_unhealthy_deployed_applications is not None:
pulumi.set(__self__, "max_percent_unhealthy_deployed_applications", max_percent_unhealthy_deployed_applications)
if service_type_health_policy_map is not None:
pulumi.set(__self__, "service_type_health_policy_map", service_type_health_policy_map)
@property
@pulumi.getter(name="considerWarningAsError")
def consider_warning_as_error(self) -> Optional[bool]:
"""
Indicates whether warnings are treated with the same severity as errors.
"""
return pulumi.get(self, "consider_warning_as_error")
@property
@pulumi.getter(name="defaultServiceTypeHealthPolicy")
def default_service_type_health_policy(self) -> Optional['outputs.ArmServiceTypeHealthPolicyResponse']:
"""
The health policy used by default to evaluate the health of a service type.
"""
return pulumi.get(self, "default_service_type_health_policy")
@property
@pulumi.getter(name="maxPercentUnhealthyDeployedApplications")
def max_percent_unhealthy_deployed_applications(self) -> Optional[int]:
"""
The maximum allowed percentage of unhealthy deployed applications. Allowed values are Byte values from zero to 100.
The percentage represents the maximum tolerated percentage of deployed applications that can be unhealthy before the application is considered in error.
This is calculated by dividing the number of unhealthy deployed applications over the number of nodes where the application is currently deployed on in the cluster.
The computation rounds up to tolerate one failure on small numbers of nodes. Default percentage is zero.
"""
return pulumi.get(self, "max_percent_unhealthy_deployed_applications")
@property
@pulumi.getter(name="serviceTypeHealthPolicyMap")
def service_type_health_policy_map(self) -> Optional[Mapping[str, 'outputs.ArmServiceTypeHealthPolicyResponse']]:
"""
The map with service type health policy per service type name. The map is empty by default.
"""
return pulumi.get(self, "service_type_health_policy_map")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ArmRollingUpgradeMonitoringPolicyResponse(dict):
"""
The policy used for monitoring the application upgrade
"""
def __init__(__self__, *,
failure_action: Optional[str] = None,
health_check_retry_timeout: Optional[str] = None,
health_check_stable_duration: Optional[str] = None,
health_check_wait_duration: Optional[str] = None,
upgrade_domain_timeout: Optional[str] = None,
upgrade_timeout: Optional[str] = None):
"""
The policy used for monitoring the application upgrade
:param str failure_action: The activation Mode of the service package
:param str health_check_retry_timeout: The amount of time to retry health evaluation when the application or cluster is unhealthy before FailureAction is executed. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
:param str health_check_stable_duration: The amount of time that the application or cluster must remain healthy before the upgrade proceeds to the next upgrade domain. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
:param str health_check_wait_duration: The amount of time to wait after completing an upgrade domain before applying health policies. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
:param str upgrade_domain_timeout: The amount of time each upgrade domain has to complete before FailureAction is executed. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
:param str upgrade_timeout: The amount of time the overall upgrade has to complete before FailureAction is executed. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
"""
if failure_action is not None:
pulumi.set(__self__, "failure_action", failure_action)
if health_check_retry_timeout is not None:
pulumi.set(__self__, "health_check_retry_timeout", health_check_retry_timeout)
if health_check_stable_duration is not None:
pulumi.set(__self__, "health_check_stable_duration", health_check_stable_duration)
if health_check_wait_duration is not None:
pulumi.set(__self__, "health_check_wait_duration", health_check_wait_duration)
if upgrade_domain_timeout is not None:
pulumi.set(__self__, "upgrade_domain_timeout", upgrade_domain_timeout)
if upgrade_timeout is not None:
pulumi.set(__self__, "upgrade_timeout", upgrade_timeout)
@property
@pulumi.getter(name="failureAction")
def failure_action(self) -> Optional[str]:
"""
The activation Mode of the service package
"""
return pulumi.get(self, "failure_action")
@property
@pulumi.getter(name="healthCheckRetryTimeout")
def health_check_retry_timeout(self) -> Optional[str]:
"""
The amount of time to retry health evaluation when the application or cluster is unhealthy before FailureAction is executed. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
"""
return pulumi.get(self, "health_check_retry_timeout")
@property
@pulumi.getter(name="healthCheckStableDuration")
def health_check_stable_duration(self) -> Optional[str]:
"""
The amount of time that the application or cluster must remain healthy before the upgrade proceeds to the next upgrade domain. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
"""
return pulumi.get(self, "health_check_stable_duration")
@property
@pulumi.getter(name="healthCheckWaitDuration")
def health_check_wait_duration(self) -> Optional[str]:
"""
The amount of time to wait after completing an upgrade domain before applying health policies. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
"""
return pulumi.get(self, "health_check_wait_duration")
@property
@pulumi.getter(name="upgradeDomainTimeout")
def upgrade_domain_timeout(self) -> Optional[str]:
"""
The amount of time each upgrade domain has to complete before FailureAction is executed. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
"""
return pulumi.get(self, "upgrade_domain_timeout")
@property
@pulumi.getter(name="upgradeTimeout")
def upgrade_timeout(self) -> Optional[str]:
"""
The amount of time the overall upgrade has to complete before FailureAction is executed. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
"""
return pulumi.get(self, "upgrade_timeout")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ArmServiceTypeHealthPolicyResponse(dict):
"""
Represents the health policy used to evaluate the health of services belonging to a service type.
"""
def __init__(__self__, *,
max_percent_unhealthy_partitions_per_service: Optional[int] = None,
max_percent_unhealthy_replicas_per_partition: Optional[int] = None,
max_percent_unhealthy_services: Optional[int] = None):
"""
Represents the health policy used to evaluate the health of services belonging to a service type.
:param int max_percent_unhealthy_partitions_per_service: The maximum percentage of partitions per service allowed to be unhealthy before your application is considered in error.
:param int max_percent_unhealthy_replicas_per_partition: The maximum percentage of replicas per partition allowed to be unhealthy before your application is considered in error.
:param int max_percent_unhealthy_services: The maximum percentage of services allowed to be unhealthy before your application is considered in error.
"""
if max_percent_unhealthy_partitions_per_service is not None:
pulumi.set(__self__, "max_percent_unhealthy_partitions_per_service", max_percent_unhealthy_partitions_per_service)
if max_percent_unhealthy_replicas_per_partition is not None:
pulumi.set(__self__, "max_percent_unhealthy_replicas_per_partition", max_percent_unhealthy_replicas_per_partition)
if max_percent_unhealthy_services is not None:
pulumi.set(__self__, "max_percent_unhealthy_services", max_percent_unhealthy_services)
@property
@pulumi.getter(name="maxPercentUnhealthyPartitionsPerService")
def max_percent_unhealthy_partitions_per_service(self) -> Optional[int]:
"""
The maximum percentage of partitions per service allowed to be unhealthy before your application is considered in error.
"""
return pulumi.get(self, "max_percent_unhealthy_partitions_per_service")
@property
@pulumi.getter(name="maxPercentUnhealthyReplicasPerPartition")
def max_percent_unhealthy_replicas_per_partition(self) -> Optional[int]:
"""
The maximum percentage of replicas per partition allowed to be unhealthy before your application is considered in error.
"""
return pulumi.get(self, "max_percent_unhealthy_replicas_per_partition")
@property
@pulumi.getter(name="maxPercentUnhealthyServices")
def max_percent_unhealthy_services(self) -> Optional[int]:
"""
The maximum percentage of services allowed to be unhealthy before your application is considered in error.
"""
return pulumi.get(self, "max_percent_unhealthy_services")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AzureActiveDirectoryResponse(dict):
"""
The settings to enable AAD authentication on the cluster.
"""
def __init__(__self__, *,
client_application: Optional[str] = None,
cluster_application: Optional[str] = None,
tenant_id: Optional[str] = None):
"""
The settings to enable AAD authentication on the cluster.
:param str client_application: Azure active directory client application id.
:param str cluster_application: Azure active directory cluster application id.
:param str tenant_id: Azure active directory tenant id.
"""
if client_application is not None:
pulumi.set(__self__, "client_application", client_application)
if cluster_application is not None:
pulumi.set(__self__, "cluster_application", cluster_application)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
@property
@pulumi.getter(name="clientApplication")
def client_application(self) -> Optional[str]:
"""
Azure active directory client application id.
"""
return pulumi.get(self, "client_application")
@property
@pulumi.getter(name="clusterApplication")
def cluster_application(self) -> Optional[str]:
"""
Azure active directory cluster application id.
"""
return pulumi.get(self, "cluster_application")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[str]:
"""
Azure active directory tenant id.
"""
return pulumi.get(self, "tenant_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class CertificateDescriptionResponse(dict):
"""
Describes the certificate details.
"""
def __init__(__self__, *,
thumbprint: str,
thumbprint_secondary: Optional[str] = None,
x509_store_name: Optional[str] = None):
"""
Describes the certificate details.
:param str thumbprint: Thumbprint of the primary certificate.
:param str thumbprint_secondary: Thumbprint of the secondary certificate.
:param str x509_store_name: The local certificate store location.
"""
pulumi.set(__self__, "thumbprint", thumbprint)
if thumbprint_secondary is not None:
pulumi.set(__self__, "thumbprint_secondary", thumbprint_secondary)
if x509_store_name is not None:
pulumi.set(__self__, "x509_store_name", x509_store_name)
@property
@pulumi.getter
def thumbprint(self) -> str:
"""
Thumbprint of the primary certificate.
"""
return pulumi.get(self, "thumbprint")
@property
@pulumi.getter(name="thumbprintSecondary")
def thumbprint_secondary(self) -> Optional[str]:
"""
Thumbprint of the secondary certificate.
"""
return pulumi.get(self, "thumbprint_secondary")
@property
@pulumi.getter(name="x509StoreName")
def x509_store_name(self) -> Optional[str]:
"""
The local certificate store location.
"""
return pulumi.get(self, "x509_store_name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ClientCertificateCommonNameResponse(dict):
"""
Describes the client certificate details using common name.
"""
def __init__(__self__, *,
certificate_common_name: str,
certificate_issuer_thumbprint: str,
is_admin: bool):
"""
Describes the client certificate details using common name.
:param str certificate_common_name: The common name of the client certificate.
:param str certificate_issuer_thumbprint: The issuer thumbprint of the client certificate.
:param bool is_admin: Indicates if the client certificate has admin access to the cluster. Non admin clients can perform only read only operations on the cluster.
"""
pulumi.set(__self__, "certificate_common_name", certificate_common_name)
pulumi.set(__self__, "certificate_issuer_thumbprint", certificate_issuer_thumbprint)
pulumi.set(__self__, "is_admin", is_admin)
@property
@pulumi.getter(name="certificateCommonName")
def certificate_common_name(self) -> str:
"""
The common name of the client certificate.
"""
return pulumi.get(self, "certificate_common_name")
@property
@pulumi.getter(name="certificateIssuerThumbprint")
def certificate_issuer_thumbprint(self) -> str:
"""
The issuer thumbprint of the client certificate.
"""
return pulumi.get(self, "certificate_issuer_thumbprint")
@property
@pulumi.getter(name="isAdmin")
def is_admin(self) -> bool:
"""
Indicates if the client certificate has admin access to the cluster. Non admin clients can perform only read only operations on the cluster.
"""
return pulumi.get(self, "is_admin")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ClientCertificateThumbprintResponse(dict):
"""
Describes the client certificate details using thumbprint.
"""
def __init__(__self__, *,
certificate_thumbprint: str,
is_admin: bool):
"""
Describes the client certificate details using thumbprint.
:param str certificate_thumbprint: The thumbprint of the client certificate.
:param bool is_admin: Indicates if the client certificate has admin access to the cluster. Non admin clients can perform only read only operations on the cluster.
"""
pulumi.set(__self__, "certificate_thumbprint", certificate_thumbprint)
pulumi.set(__self__, "is_admin", is_admin)
@property
@pulumi.getter(name="certificateThumbprint")
def certificate_thumbprint(self) -> str:
"""
The thumbprint of the client certificate.
"""
return pulumi.get(self, "certificate_thumbprint")
@property
@pulumi.getter(name="isAdmin")
def is_admin(self) -> bool:
"""
Indicates if the client certificate has admin access to the cluster. Non admin clients can perform only read only operations on the cluster.
"""
return pulumi.get(self, "is_admin")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ClusterHealthPolicyResponse(dict):
"""
Defines a health policy used to evaluate the health of the cluster or of a cluster node.
"""
def __init__(__self__, *,
application_health_policies: Optional[Mapping[str, 'outputs.ApplicationHealthPolicyResponse']] = None,
max_percent_unhealthy_applications: Optional[int] = None,
max_percent_unhealthy_nodes: Optional[int] = None):
"""
Defines a health policy used to evaluate the health of the cluster or of a cluster node.
:param Mapping[str, 'ApplicationHealthPolicyResponseArgs'] application_health_policies: Defines the application health policy map used to evaluate the health of an application or one of its children entities.
:param int max_percent_unhealthy_applications: The maximum allowed percentage of unhealthy applications before reporting an error. For example, to allow 10% of applications to be unhealthy, this value would be 10.
The percentage represents the maximum tolerated percentage of applications that can be unhealthy before the cluster is considered in error.
If the percentage is respected but there is at least one unhealthy application, the health is evaluated as Warning.
This is calculated by dividing the number of unhealthy applications over the total number of application instances in the cluster, excluding applications of application types that are included in the ApplicationTypeHealthPolicyMap.
The computation rounds up to tolerate one failure on small numbers of applications. Default percentage is zero.
:param int max_percent_unhealthy_nodes: The maximum allowed percentage of unhealthy nodes before reporting an error. For example, to allow 10% of nodes to be unhealthy, this value would be 10.
The percentage represents the maximum tolerated percentage of nodes that can be unhealthy before the cluster is considered in error.
If the percentage is respected but there is at least one unhealthy node, the health is evaluated as Warning.
The percentage is calculated by dividing the number of unhealthy nodes over the total number of nodes in the cluster.
The computation rounds up to tolerate one failure on small numbers of nodes. Default percentage is zero.
In large clusters, some nodes will always be down or out for repairs, so this percentage should be configured to tolerate that.
"""
if application_health_policies is not None:
pulumi.set(__self__, "application_health_policies", application_health_policies)
if max_percent_unhealthy_applications is not None:
pulumi.set(__self__, "max_percent_unhealthy_applications", max_percent_unhealthy_applications)
if max_percent_unhealthy_nodes is not None:
pulumi.set(__self__, "max_percent_unhealthy_nodes", max_percent_unhealthy_nodes)
@property
@pulumi.getter(name="applicationHealthPolicies")
def application_health_policies(self) -> Optional[Mapping[str, 'outputs.ApplicationHealthPolicyResponse']]:
"""
Defines the application health policy map used to evaluate the health of an application or one of its children entities.
"""
return pulumi.get(self, "application_health_policies")
@property
@pulumi.getter(name="maxPercentUnhealthyApplications")
def max_percent_unhealthy_applications(self) -> Optional[int]:
"""
The maximum allowed percentage of unhealthy applications before reporting an error. For example, to allow 10% of applications to be unhealthy, this value would be 10.
The percentage represents the maximum tolerated percentage of applications that can be unhealthy before the cluster is considered in error.
If the percentage is respected but there is at least one unhealthy application, the health is evaluated as Warning.
This is calculated by dividing the number of unhealthy applications over the total number of application instances in the cluster, excluding applications of application types that are included in the ApplicationTypeHealthPolicyMap.
The computation rounds up to tolerate one failure on small numbers of applications. Default percentage is zero.
"""
return pulumi.get(self, "max_percent_unhealthy_applications")
@property
@pulumi.getter(name="maxPercentUnhealthyNodes")
def max_percent_unhealthy_nodes(self) -> Optional[int]:
"""
The maximum allowed percentage of unhealthy nodes before reporting an error. For example, to allow 10% of nodes to be unhealthy, this value would be 10.
The percentage represents the maximum tolerated percentage of nodes that can be unhealthy before the cluster is considered in error.
If the percentage is respected but there is at least one unhealthy node, the health is evaluated as Warning.
The percentage is calculated by dividing the number of unhealthy nodes over the total number of nodes in the cluster.
The computation rounds up to tolerate one failure on small numbers of nodes. Default percentage is zero.
In large clusters, some nodes will always be down or out for repairs, so this percentage should be configured to tolerate that.
"""
return pulumi.get(self, "max_percent_unhealthy_nodes")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ClusterUpgradeDeltaHealthPolicyResponse(dict):
"""
Describes the delta health policies for the cluster upgrade.
"""
def __init__(__self__, *,
max_percent_delta_unhealthy_applications: int,
max_percent_delta_unhealthy_nodes: int,
max_percent_upgrade_domain_delta_unhealthy_nodes: int,
application_delta_health_policies: Optional[Mapping[str, 'outputs.ApplicationDeltaHealthPolicyResponse']] = None):
"""
Describes the delta health policies for the cluster upgrade.
:param int max_percent_delta_unhealthy_applications: The maximum allowed percentage of applications health degradation allowed during cluster upgrades.
The delta is measured between the state of the applications at the beginning of upgrade and the state of the applications at the time of the health evaluation.
The check is performed after every upgrade domain upgrade completion to make sure the global state of the cluster is within tolerated limits. System services are not included in this.
:param int max_percent_delta_unhealthy_nodes: The maximum allowed percentage of nodes health degradation allowed during cluster upgrades.
The delta is measured between the state of the nodes at the beginning of upgrade and the state of the nodes at the time of the health evaluation.
The check is performed after every upgrade domain upgrade completion to make sure the global state of the cluster is within tolerated limits.
:param int max_percent_upgrade_domain_delta_unhealthy_nodes: The maximum allowed percentage of upgrade domain nodes health degradation allowed during cluster upgrades.
The delta is measured between the state of the upgrade domain nodes at the beginning of upgrade and the state of the upgrade domain nodes at the time of the health evaluation.
The check is performed after every upgrade domain upgrade completion for all completed upgrade domains to make sure the state of the upgrade domains is within tolerated limits.
:param Mapping[str, 'ApplicationDeltaHealthPolicyResponseArgs'] application_delta_health_policies: Defines the application delta health policy map used to evaluate the health of an application or one of its child entities when upgrading the cluster.
"""
pulumi.set(__self__, "max_percent_delta_unhealthy_applications", max_percent_delta_unhealthy_applications)
pulumi.set(__self__, "max_percent_delta_unhealthy_nodes", max_percent_delta_unhealthy_nodes)
pulumi.set(__self__, "max_percent_upgrade_domain_delta_unhealthy_nodes", max_percent_upgrade_domain_delta_unhealthy_nodes)
if application_delta_health_policies is not None:
pulumi.set(__self__, "application_delta_health_policies", application_delta_health_policies)
@property
@pulumi.getter(name="maxPercentDeltaUnhealthyApplications")
def max_percent_delta_unhealthy_applications(self) -> int:
"""
The maximum allowed percentage of applications health degradation allowed during cluster upgrades.
The delta is measured between the state of the applications at the beginning of upgrade and the state of the applications at the time of the health evaluation.
The check is performed after every upgrade domain upgrade completion to make sure the global state of the cluster is within tolerated limits. System services are not included in this.
"""
return pulumi.get(self, "max_percent_delta_unhealthy_applications")
@property
@pulumi.getter(name="maxPercentDeltaUnhealthyNodes")
def max_percent_delta_unhealthy_nodes(self) -> int:
"""
The maximum allowed percentage of nodes health degradation allowed during cluster upgrades.
The delta is measured between the state of the nodes at the beginning of upgrade and the state of the nodes at the time of the health evaluation.
The check is performed after every upgrade domain upgrade completion to make sure the global state of the cluster is within tolerated limits.
"""
return pulumi.get(self, "max_percent_delta_unhealthy_nodes")
@property
@pulumi.getter(name="maxPercentUpgradeDomainDeltaUnhealthyNodes")
def max_percent_upgrade_domain_delta_unhealthy_nodes(self) -> int:
"""
The maximum allowed percentage of upgrade domain nodes health degradation allowed during cluster upgrades.
The delta is measured between the state of the upgrade domain nodes at the beginning of upgrade and the state of the upgrade domain nodes at the time of the health evaluation.
The check is performed after every upgrade domain upgrade completion for all completed upgrade domains to make sure the state of the upgrade domains is within tolerated limits.
"""
return pulumi.get(self, "max_percent_upgrade_domain_delta_unhealthy_nodes")
@property
@pulumi.getter(name="applicationDeltaHealthPolicies")
def application_delta_health_policies(self) -> Optional[Mapping[str, 'outputs.ApplicationDeltaHealthPolicyResponse']]:
"""
Defines the application delta health policy map used to evaluate the health of an application or one of its child entities when upgrading the cluster.
"""
return pulumi.get(self, "application_delta_health_policies")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ClusterUpgradePolicyResponse(dict):
"""
Describes the policy used when upgrading the cluster.
"""
def __init__(__self__, *,
health_check_retry_timeout: str,
health_check_stable_duration: str,
health_check_wait_duration: str,
health_policy: 'outputs.ClusterHealthPolicyResponse',
upgrade_domain_timeout: str,
upgrade_replica_set_check_timeout: str,
upgrade_timeout: str,
delta_health_policy: Optional['outputs.ClusterUpgradeDeltaHealthPolicyResponse'] = None,
force_restart: Optional[bool] = None):
"""
Describes the policy used when upgrading the cluster.
:param str health_check_retry_timeout: The amount of time to retry health evaluation when the application or cluster is unhealthy before the upgrade rolls back. The timeout can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
:param str health_check_stable_duration: The amount of time that the application or cluster must remain healthy before the upgrade proceeds to the next upgrade domain. The duration can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
:param str health_check_wait_duration: The length of time to wait after completing an upgrade domain before performing health checks. The duration can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
:param 'ClusterHealthPolicyResponseArgs' health_policy: The cluster health policy used when upgrading the cluster.
:param str upgrade_domain_timeout: The amount of time each upgrade domain has to complete before the upgrade rolls back. The timeout can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
:param str upgrade_replica_set_check_timeout: The maximum amount of time to block processing of an upgrade domain and prevent loss of availability when there are unexpected issues. When this timeout expires, processing of the upgrade domain will proceed regardless of availability loss issues. The timeout is reset at the start of each upgrade domain. The timeout can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
:param str upgrade_timeout: The amount of time the overall upgrade has to complete before the upgrade rolls back. The timeout can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
:param 'ClusterUpgradeDeltaHealthPolicyResponseArgs' delta_health_policy: The cluster delta health policy used when upgrading the cluster.
:param bool force_restart: If true, then processes are forcefully restarted during upgrade even when the code version has not changed (the upgrade only changes configuration or data).
"""
pulumi.set(__self__, "health_check_retry_timeout", health_check_retry_timeout)
pulumi.set(__self__, "health_check_stable_duration", health_check_stable_duration)
pulumi.set(__self__, "health_check_wait_duration", health_check_wait_duration)
pulumi.set(__self__, "health_policy", health_policy)
pulumi.set(__self__, "upgrade_domain_timeout", upgrade_domain_timeout)
pulumi.set(__self__, "upgrade_replica_set_check_timeout", upgrade_replica_set_check_timeout)
pulumi.set(__self__, "upgrade_timeout", upgrade_timeout)
if delta_health_policy is not None:
pulumi.set(__self__, "delta_health_policy", delta_health_policy)
if force_restart is not None:
pulumi.set(__self__, "force_restart", force_restart)
@property
@pulumi.getter(name="healthCheckRetryTimeout")
def health_check_retry_timeout(self) -> str:
"""
The amount of time to retry health evaluation when the application or cluster is unhealthy before the upgrade rolls back. The timeout can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
"""
return pulumi.get(self, "health_check_retry_timeout")
@property
@pulumi.getter(name="healthCheckStableDuration")
def health_check_stable_duration(self) -> str:
"""
The amount of time that the application or cluster must remain healthy before the upgrade proceeds to the next upgrade domain. The duration can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
"""
return pulumi.get(self, "health_check_stable_duration")
@property
@pulumi.getter(name="healthCheckWaitDuration")
def health_check_wait_duration(self) -> str:
"""
The length of time to wait after completing an upgrade domain before performing health checks. The duration can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
"""
return pulumi.get(self, "health_check_wait_duration")
@property
@pulumi.getter(name="healthPolicy")
def health_policy(self) -> 'outputs.ClusterHealthPolicyResponse':
"""
The cluster health policy used when upgrading the cluster.
"""
return pulumi.get(self, "health_policy")
@property
@pulumi.getter(name="upgradeDomainTimeout")
def upgrade_domain_timeout(self) -> str:
"""
The amount of time each upgrade domain has to complete before the upgrade rolls back. The timeout can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
"""
return pulumi.get(self, "upgrade_domain_timeout")
@property
@pulumi.getter(name="upgradeReplicaSetCheckTimeout")
def upgrade_replica_set_check_timeout(self) -> str:
"""
The maximum amount of time to block processing of an upgrade domain and prevent loss of availability when there are unexpected issues. When this timeout expires, processing of the upgrade domain will proceed regardless of availability loss issues. The timeout is reset at the start of each upgrade domain. The timeout can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
"""
return pulumi.get(self, "upgrade_replica_set_check_timeout")
@property
@pulumi.getter(name="upgradeTimeout")
def upgrade_timeout(self) -> str:
"""
The amount of time the overall upgrade has to complete before the upgrade rolls back. The timeout can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
"""
return pulumi.get(self, "upgrade_timeout")
@property
@pulumi.getter(name="deltaHealthPolicy")
def delta_health_policy(self) -> Optional['outputs.ClusterUpgradeDeltaHealthPolicyResponse']:
"""
The cluster delta health policy used when upgrading the cluster.
"""
return pulumi.get(self, "delta_health_policy")
@property
@pulumi.getter(name="forceRestart")
def force_restart(self) -> Optional[bool]:
"""
If true, then processes are forcefully restarted during upgrade even when the code version has not changed (the upgrade only changes configuration or data).
"""
return pulumi.get(self, "force_restart")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ClusterVersionDetailsResponse(dict):
"""
The detail of the Service Fabric runtime version result
"""
def __init__(__self__, *,
code_version: Optional[str] = None,
environment: Optional[str] = None,
support_expiry_utc: Optional[str] = None):
"""
The detail of the Service Fabric runtime version result
:param str code_version: The Service Fabric runtime version of the cluster.
:param str environment: Indicates if this version is for Windows or Linux operating system.
:param str support_expiry_utc: The date of expiry of support of the version.
"""
if code_version is not None:
pulumi.set(__self__, "code_version", code_version)
if environment is not None:
pulumi.set(__self__, "environment", environment)
if support_expiry_utc is not None:
pulumi.set(__self__, "support_expiry_utc", support_expiry_utc)
@property
@pulumi.getter(name="codeVersion")
def code_version(self) -> Optional[str]:
"""
The Service Fabric runtime version of the cluster.
"""
return pulumi.get(self, "code_version")
@property
@pulumi.getter
def environment(self) -> Optional[str]:
"""
Indicates if this version is for Windows or Linux operating system.
"""
return pulumi.get(self, "environment")
@property
@pulumi.getter(name="supportExpiryUtc")
def support_expiry_utc(self) -> Optional[str]:
"""
The date of expiry of support of the version.
"""
return pulumi.get(self, "support_expiry_utc")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DiagnosticsStorageAccountConfigResponse(dict):
"""
The storage account information for storing Service Fabric diagnostic logs.
"""
def __init__(__self__, *,
blob_endpoint: str,
protected_account_key_name: str,
queue_endpoint: str,
storage_account_name: str,
table_endpoint: str,
protected_account_key_name2: Optional[str] = None):
"""
The storage account information for storing Service Fabric diagnostic logs.
:param str blob_endpoint: The blob endpoint of the azure storage account.
:param str protected_account_key_name: The protected diagnostics storage key name.
:param str queue_endpoint: The queue endpoint of the azure storage account.
:param str storage_account_name: The Azure storage account name.
:param str table_endpoint: The table endpoint of the azure storage account.
:param str protected_account_key_name2: The secondary protected diagnostics storage key name. If one of the storage account keys is rotated the cluster will fallback to using the other.
"""
pulumi.set(__self__, "blob_endpoint", blob_endpoint)
pulumi.set(__self__, "protected_account_key_name", protected_account_key_name)
pulumi.set(__self__, "queue_endpoint", queue_endpoint)
pulumi.set(__self__, "storage_account_name", storage_account_name)
pulumi.set(__self__, "table_endpoint", table_endpoint)
if protected_account_key_name2 is not None:
pulumi.set(__self__, "protected_account_key_name2", protected_account_key_name2)
@property
@pulumi.getter(name="blobEndpoint")
def blob_endpoint(self) -> str:
"""
The blob endpoint of the azure storage account.
"""
return pulumi.get(self, "blob_endpoint")
@property
@pulumi.getter(name="protectedAccountKeyName")
def protected_account_key_name(self) -> str:
"""
The protected diagnostics storage key name.
"""
return pulumi.get(self, "protected_account_key_name")
@property
@pulumi.getter(name="queueEndpoint")
def queue_endpoint(self) -> str:
"""
The queue endpoint of the azure storage account.
"""
return pulumi.get(self, "queue_endpoint")
@property
@pulumi.getter(name="storageAccountName")
def storage_account_name(self) -> str:
"""
The Azure storage account name.
"""
return pulumi.get(self, "storage_account_name")
@property
@pulumi.getter(name="tableEndpoint")
def table_endpoint(self) -> str:
"""
The table endpoint of the azure storage account.
"""
return pulumi.get(self, "table_endpoint")
@property
@pulumi.getter(name="protectedAccountKeyName2")
def protected_account_key_name2(self) -> Optional[str]:
"""
The secondary protected diagnostics storage key name. If one of the storage account keys is rotated the cluster will fallback to using the other.
"""
return pulumi.get(self, "protected_account_key_name2")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class EndpointRangeDescriptionResponse(dict):
"""
Port range details
"""
def __init__(__self__, *,
end_port: int,
start_port: int):
"""
Port range details
:param int end_port: End port of a range of ports
:param int start_port: Starting port of a range of ports
"""
pulumi.set(__self__, "end_port", end_port)
pulumi.set(__self__, "start_port", start_port)
@property
@pulumi.getter(name="endPort")
def end_port(self) -> int:
"""
End port of a range of ports
"""
return pulumi.get(self, "end_port")
@property
@pulumi.getter(name="startPort")
def start_port(self) -> int:
"""
Starting port of a range of ports
"""
return pulumi.get(self, "start_port")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedIdentityResponse(dict):
"""
Describes the managed identities for an Azure resource.
"""
def __init__(__self__, *,
principal_id: str,
tenant_id: str,
type: Optional[str] = None,
user_assigned_identities: Optional[Mapping[str, 'outputs.UserAssignedIdentityResponse']] = None):
"""
Describes the managed identities for an Azure resource.
:param str principal_id: The principal id of the managed identity. This property will only be provided for a system assigned identity.
:param str tenant_id: The tenant id of the managed identity. This property will only be provided for a system assigned identity.
:param str type: The type of managed identity for the resource.
:param Mapping[str, 'UserAssignedIdentityResponseArgs'] user_assigned_identities: The list of user identities associated with the resource. The user identity dictionary key references will be ARM resource ids in the form:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
"""
pulumi.set(__self__, "principal_id", principal_id)
pulumi.set(__self__, "tenant_id", tenant_id)
if type is not None:
pulumi.set(__self__, "type", type)
if user_assigned_identities is not None:
pulumi.set(__self__, "user_assigned_identities", user_assigned_identities)
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> str:
"""
The principal id of the managed identity. This property will only be provided for a system assigned identity.
"""
return pulumi.get(self, "principal_id")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> str:
"""
The tenant id of the managed identity. This property will only be provided for a system assigned identity.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
The type of managed identity for the resource.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="userAssignedIdentities")
def user_assigned_identities(self) -> Optional[Mapping[str, 'outputs.UserAssignedIdentityResponse']]:
"""
The list of user identities associated with the resource. The user identity dictionary key references will be ARM resource ids in the form:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
"""
return pulumi.get(self, "user_assigned_identities")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class NamedPartitionSchemeDescriptionResponse(dict):
"""
Describes the named partition scheme of the service.
"""
def __init__(__self__, *,
count: int,
names: Sequence[str],
partition_scheme: str):
"""
Describes the named partition scheme of the service.
:param int count: The number of partitions.
:param Sequence[str] names: Array of size specified by the ‘count’ parameter, for the names of the partitions.
:param str partition_scheme: Specifies how the service is partitioned.
"""
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "names", names)
pulumi.set(__self__, "partition_scheme", 'Named')
@property
@pulumi.getter
def count(self) -> int:
"""
The number of partitions.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter
def names(self) -> Sequence[str]:
"""
Array of size specified by the ‘count’ parameter, for the names of the partitions.
"""
return pulumi.get(self, "names")
@property
@pulumi.getter(name="partitionScheme")
def partition_scheme(self) -> str:
"""
Specifies how the service is partitioned.
"""
return pulumi.get(self, "partition_scheme")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class NodeTypeDescriptionResponse(dict):
"""
Describes a node type in the cluster, each node type represents sub set of nodes in the cluster.
"""
def __init__(__self__, *,
client_connection_endpoint_port: int,
http_gateway_endpoint_port: int,
is_primary: bool,
name: str,
vm_instance_count: int,
application_ports: Optional['outputs.EndpointRangeDescriptionResponse'] = None,
capacities: Optional[Mapping[str, str]] = None,
durability_level: Optional[str] = None,
ephemeral_ports: Optional['outputs.EndpointRangeDescriptionResponse'] = None,
placement_properties: Optional[Mapping[str, str]] = None,
reverse_proxy_endpoint_port: Optional[int] = None):
"""
Describes a node type in the cluster, each node type represents sub set of nodes in the cluster.
:param int client_connection_endpoint_port: The TCP cluster management endpoint port.
:param int http_gateway_endpoint_port: The HTTP cluster management endpoint port.
:param bool is_primary: The node type on which system services will run. Only one node type should be marked as primary. Primary node type cannot be deleted or changed for existing clusters.
:param str name: The name of the node type.
:param int vm_instance_count: The number of nodes in the node type. This count should match the capacity property in the corresponding VirtualMachineScaleSet resource.
:param 'EndpointRangeDescriptionResponseArgs' application_ports: The range of ports from which cluster assigned port to Service Fabric applications.
:param Mapping[str, str] capacities: The capacity tags applied to the nodes in the node type, the cluster resource manager uses these tags to understand how much resource a node has.
:param str durability_level: The durability level of the node type. Learn about [DurabilityLevel](https://docs.microsoft.com/azure/service-fabric/service-fabric-cluster-capacity).
- Bronze - No privileges. This is the default.
- Silver - The infrastructure jobs can be paused for a duration of 10 minutes per UD.
- Gold - The infrastructure jobs can be paused for a duration of 2 hours per UD. Gold durability can be enabled only on full node VM skus like D15_V2, G5 etc.
:param 'EndpointRangeDescriptionResponseArgs' ephemeral_ports: The range of ephemeral ports that nodes in this node type should be configured with.
:param Mapping[str, str] placement_properties: The placement tags applied to nodes in the node type, which can be used to indicate where certain services (workload) should run.
:param int reverse_proxy_endpoint_port: The endpoint used by reverse proxy.
"""
pulumi.set(__self__, "client_connection_endpoint_port", client_connection_endpoint_port)
pulumi.set(__self__, "http_gateway_endpoint_port", http_gateway_endpoint_port)
pulumi.set(__self__, "is_primary", is_primary)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "vm_instance_count", vm_instance_count)
if application_ports is not None:
pulumi.set(__self__, "application_ports", application_ports)
if capacities is not None:
pulumi.set(__self__, "capacities", capacities)
if durability_level is not None:
pulumi.set(__self__, "durability_level", durability_level)
if ephemeral_ports is not None:
pulumi.set(__self__, "ephemeral_ports", ephemeral_ports)
if placement_properties is not None:
pulumi.set(__self__, "placement_properties", placement_properties)
if reverse_proxy_endpoint_port is not None:
pulumi.set(__self__, "reverse_proxy_endpoint_port", reverse_proxy_endpoint_port)
@property
@pulumi.getter(name="clientConnectionEndpointPort")
def client_connection_endpoint_port(self) -> int:
"""
The TCP cluster management endpoint port.
"""
return pulumi.get(self, "client_connection_endpoint_port")
@property
@pulumi.getter(name="httpGatewayEndpointPort")
def http_gateway_endpoint_port(self) -> int:
"""
The HTTP cluster management endpoint port.
"""
return pulumi.get(self, "http_gateway_endpoint_port")
@property
@pulumi.getter(name="isPrimary")
def is_primary(self) -> bool:
"""
The node type on which system services will run. Only one node type should be marked as primary. Primary node type cannot be deleted or changed for existing clusters.
"""
return pulumi.get(self, "is_primary")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the node type.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="vmInstanceCount")
def vm_instance_count(self) -> int:
"""
The number of nodes in the node type. This count should match the capacity property in the corresponding VirtualMachineScaleSet resource.
"""
return pulumi.get(self, "vm_instance_count")
@property
@pulumi.getter(name="applicationPorts")
def application_ports(self) -> Optional['outputs.EndpointRangeDescriptionResponse']:
"""
The range of ports from which cluster assigned port to Service Fabric applications.
"""
return pulumi.get(self, "application_ports")
@property
@pulumi.getter
def capacities(self) -> Optional[Mapping[str, str]]:
"""
The capacity tags applied to the nodes in the node type, the cluster resource manager uses these tags to understand how much resource a node has.
"""
return pulumi.get(self, "capacities")
@property
@pulumi.getter(name="durabilityLevel")
def durability_level(self) -> Optional[str]:
"""
The durability level of the node type. Learn about [DurabilityLevel](https://docs.microsoft.com/azure/service-fabric/service-fabric-cluster-capacity).
- Bronze - No privileges. This is the default.
- Silver - The infrastructure jobs can be paused for a duration of 10 minutes per UD.
- Gold - The infrastructure jobs can be paused for a duration of 2 hours per UD. Gold durability can be enabled only on full node VM skus like D15_V2, G5 etc.
"""
return pulumi.get(self, "durability_level")
@property
@pulumi.getter(name="ephemeralPorts")
def ephemeral_ports(self) -> Optional['outputs.EndpointRangeDescriptionResponse']:
"""
The range of ephemeral ports that nodes in this node type should be configured with.
"""
return pulumi.get(self, "ephemeral_ports")
@property
@pulumi.getter(name="placementProperties")
def placement_properties(self) -> Optional[Mapping[str, str]]:
"""
The placement tags applied to nodes in the node type, which can be used to indicate where certain services (workload) should run.
"""
return pulumi.get(self, "placement_properties")
@property
@pulumi.getter(name="reverseProxyEndpointPort")
def reverse_proxy_endpoint_port(self) -> Optional[int]:
"""
The endpoint used by reverse proxy.
"""
return pulumi.get(self, "reverse_proxy_endpoint_port")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServerCertificateCommonNameResponse(dict):
"""
Describes the server certificate details using common name.
"""
def __init__(__self__, *,
certificate_common_name: str,
certificate_issuer_thumbprint: str):
"""
Describes the server certificate details using common name.
:param str certificate_common_name: The common name of the server certificate.
:param str certificate_issuer_thumbprint: The issuer thumbprint of the server certificate.
"""
pulumi.set(__self__, "certificate_common_name", certificate_common_name)
pulumi.set(__self__, "certificate_issuer_thumbprint", certificate_issuer_thumbprint)
@property
@pulumi.getter(name="certificateCommonName")
def certificate_common_name(self) -> str:
"""
The common name of the server certificate.
"""
return pulumi.get(self, "certificate_common_name")
@property
@pulumi.getter(name="certificateIssuerThumbprint")
def certificate_issuer_thumbprint(self) -> str:
"""
The issuer thumbprint of the server certificate.
"""
return pulumi.get(self, "certificate_issuer_thumbprint")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServerCertificateCommonNamesResponse(dict):
"""
Describes a list of server certificates referenced by common name that are used to secure the cluster.
"""
def __init__(__self__, *,
common_names: Optional[Sequence['outputs.ServerCertificateCommonNameResponse']] = None,
x509_store_name: Optional[str] = None):
"""
Describes a list of server certificates referenced by common name that are used to secure the cluster.
:param Sequence['ServerCertificateCommonNameResponseArgs'] common_names: The list of server certificates referenced by common name that are used to secure the cluster.
:param str x509_store_name: The local certificate store location.
"""
if common_names is not None:
pulumi.set(__self__, "common_names", common_names)
if x509_store_name is not None:
pulumi.set(__self__, "x509_store_name", x509_store_name)
@property
@pulumi.getter(name="commonNames")
def common_names(self) -> Optional[Sequence['outputs.ServerCertificateCommonNameResponse']]:
"""
The list of server certificates referenced by common name that are used to secure the cluster.
"""
return pulumi.get(self, "common_names")
@property
@pulumi.getter(name="x509StoreName")
def x509_store_name(self) -> Optional[str]:
"""
The local certificate store location.
"""
return pulumi.get(self, "x509_store_name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServiceCorrelationDescriptionResponse(dict):
"""
Creates a particular correlation between services.
"""
def __init__(__self__, *,
scheme: str,
service_name: str):
"""
Creates a particular correlation between services.
:param str scheme: The ServiceCorrelationScheme which describes the relationship between this service and the service specified via ServiceName.
:param str service_name: The name of the service that the correlation relationship is established with.
"""
pulumi.set(__self__, "scheme", scheme)
pulumi.set(__self__, "service_name", service_name)
@property
@pulumi.getter
def scheme(self) -> str:
"""
The ServiceCorrelationScheme which describes the relationship between this service and the service specified via ServiceName.
"""
return pulumi.get(self, "scheme")
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> str:
"""
The name of the service that the correlation relationship is established with.
"""
return pulumi.get(self, "service_name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServiceLoadMetricDescriptionResponse(dict):
"""
Specifies a metric to load balance a service during runtime.
"""
def __init__(__self__, *,
name: str,
default_load: Optional[int] = None,
primary_default_load: Optional[int] = None,
secondary_default_load: Optional[int] = None,
weight: Optional[str] = None):
"""
Specifies a metric to load balance a service during runtime.
:param str name: The name of the metric. If the service chooses to report load during runtime, the load metric name should match the name that is specified in Name exactly. Note that metric names are case sensitive.
:param int default_load: Used only for Stateless services. The default amount of load, as a number, that this service creates for this metric.
:param int primary_default_load: Used only for Stateful services. The default amount of load, as a number, that this service creates for this metric when it is a Primary replica.
:param int secondary_default_load: Used only for Stateful services. The default amount of load, as a number, that this service creates for this metric when it is a Secondary replica.
:param str weight: The service load metric relative weight, compared to other metrics configured for this service, as a number.
"""
pulumi.set(__self__, "name", name)
if default_load is not None:
pulumi.set(__self__, "default_load", default_load)
if primary_default_load is not None:
pulumi.set(__self__, "primary_default_load", primary_default_load)
if secondary_default_load is not None:
pulumi.set(__self__, "secondary_default_load", secondary_default_load)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the metric. If the service chooses to report load during runtime, the load metric name should match the name that is specified in Name exactly. Note that metric names are case sensitive.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="defaultLoad")
def default_load(self) -> Optional[int]:
"""
Used only for Stateless services. The default amount of load, as a number, that this service creates for this metric.
"""
return pulumi.get(self, "default_load")
@property
@pulumi.getter(name="primaryDefaultLoad")
def primary_default_load(self) -> Optional[int]:
"""
Used only for Stateful services. The default amount of load, as a number, that this service creates for this metric when it is a Primary replica.
"""
return pulumi.get(self, "primary_default_load")
@property
@pulumi.getter(name="secondaryDefaultLoad")
def secondary_default_load(self) -> Optional[int]:
"""
Used only for Stateful services. The default amount of load, as a number, that this service creates for this metric when it is a Secondary replica.
"""
return pulumi.get(self, "secondary_default_load")
@property
@pulumi.getter
def weight(self) -> Optional[str]:
"""
The service load metric relative weight, compared to other metrics configured for this service, as a number.
"""
return pulumi.get(self, "weight")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServicePlacementPolicyDescriptionResponse(dict):
"""
Describes the policy to be used for placement of a Service Fabric service.
"""
def __init__(__self__, *,
type: str):
"""
Describes the policy to be used for placement of a Service Fabric service.
:param str type: The type of placement policy for a service fabric service. Following are the possible values.
"""
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def type(self) -> str:
"""
The type of placement policy for a service fabric service. Following are the possible values.
"""
return pulumi.get(self, "type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServiceTypeDeltaHealthPolicyResponse(dict):
"""
Represents the delta health policy used to evaluate the health of services belonging to a service type when upgrading the cluster.
"""
def __init__(__self__, *,
max_percent_delta_unhealthy_services: Optional[int] = None):
"""
Represents the delta health policy used to evaluate the health of services belonging to a service type when upgrading the cluster.
:param int max_percent_delta_unhealthy_services: The maximum allowed percentage of services health degradation allowed during cluster upgrades.
The delta is measured between the state of the services at the beginning of upgrade and the state of the services at the time of the health evaluation.
The check is performed after every upgrade domain upgrade completion to make sure the global state of the cluster is within tolerated limits.
"""
if max_percent_delta_unhealthy_services is not None:
pulumi.set(__self__, "max_percent_delta_unhealthy_services", max_percent_delta_unhealthy_services)
@property
@pulumi.getter(name="maxPercentDeltaUnhealthyServices")
def max_percent_delta_unhealthy_services(self) -> Optional[int]:
"""
The maximum allowed percentage of services health degradation allowed during cluster upgrades.
The delta is measured between the state of the services at the beginning of upgrade and the state of the services at the time of the health evaluation.
The check is performed after every upgrade domain upgrade completion to make sure the global state of the cluster is within tolerated limits.
"""
return pulumi.get(self, "max_percent_delta_unhealthy_services")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServiceTypeHealthPolicyResponse(dict):
"""
Represents the health policy used to evaluate the health of services belonging to a service type.
"""
def __init__(__self__, *,
max_percent_unhealthy_services: Optional[int] = None):
"""
Represents the health policy used to evaluate the health of services belonging to a service type.
:param int max_percent_unhealthy_services: The maximum percentage of services allowed to be unhealthy before your application is considered in error.
"""
if max_percent_unhealthy_services is not None:
pulumi.set(__self__, "max_percent_unhealthy_services", max_percent_unhealthy_services)
@property
@pulumi.getter(name="maxPercentUnhealthyServices")
def max_percent_unhealthy_services(self) -> Optional[int]:
"""
The maximum percentage of services allowed to be unhealthy before your application is considered in error.
"""
return pulumi.get(self, "max_percent_unhealthy_services")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SettingsParameterDescriptionResponse(dict):
"""
Describes a parameter in fabric settings of the cluster.
"""
def __init__(__self__, *,
name: str,
value: str):
"""
Describes a parameter in fabric settings of the cluster.
:param str name: The parameter name of fabric setting.
:param str value: The parameter value of fabric setting.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> str:
"""
The parameter name of fabric setting.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def value(self) -> str:
"""
The parameter value of fabric setting.
"""
return pulumi.get(self, "value")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SettingsSectionDescriptionResponse(dict):
"""
Describes a section in the fabric settings of the cluster.
"""
def __init__(__self__, *,
name: str,
parameters: Sequence['outputs.SettingsParameterDescriptionResponse']):
"""
Describes a section in the fabric settings of the cluster.
:param str name: The section name of the fabric settings.
:param Sequence['SettingsParameterDescriptionResponseArgs'] parameters: The collection of parameters in the section.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "parameters", parameters)
@property
@pulumi.getter
def name(self) -> str:
"""
The section name of the fabric settings.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def parameters(self) -> Sequence['outputs.SettingsParameterDescriptionResponse']:
"""
The collection of parameters in the section.
"""
return pulumi.get(self, "parameters")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SingletonPartitionSchemeDescriptionResponse(dict):
"""
Describes the partition scheme of a singleton-partitioned, or non-partitioned service.
"""
def __init__(__self__, *,
partition_scheme: str):
"""
Describes the partition scheme of a singleton-partitioned, or non-partitioned service.
:param str partition_scheme: Specifies how the service is partitioned.
"""
pulumi.set(__self__, "partition_scheme", 'Singleton')
@property
@pulumi.getter(name="partitionScheme")
def partition_scheme(self) -> str:
"""
Specifies how the service is partitioned.
"""
return pulumi.get(self, "partition_scheme")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class UniformInt64RangePartitionSchemeDescriptionResponse(dict):
"""
Describes a partitioning scheme where an integer range is allocated evenly across a number of partitions.
"""
def __init__(__self__, *,
count: int,
high_key: str,
low_key: str,
partition_scheme: str):
"""
Describes a partitioning scheme where an integer range is allocated evenly across a number of partitions.
:param int count: The number of partitions.
:param str high_key: String indicating the upper bound of the partition key range that
should be split between the partition ‘count’
:param str low_key: String indicating the lower bound of the partition key range that
should be split between the partition ‘count’
:param str partition_scheme: Specifies how the service is partitioned.
"""
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "high_key", high_key)
pulumi.set(__self__, "low_key", low_key)
pulumi.set(__self__, "partition_scheme", 'UniformInt64Range')
@property
@pulumi.getter
def count(self) -> int:
"""
The number of partitions.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter(name="highKey")
def high_key(self) -> str:
"""
String indicating the upper bound of the partition key range that
should be split between the partition ‘count’
"""
return pulumi.get(self, "high_key")
@property
@pulumi.getter(name="lowKey")
def low_key(self) -> str:
"""
String indicating the lower bound of the partition key range that
should be split between the partition ‘count’
"""
return pulumi.get(self, "low_key")
@property
@pulumi.getter(name="partitionScheme")
def partition_scheme(self) -> str:
"""
Specifies how the service is partitioned.
"""
return pulumi.get(self, "partition_scheme")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class UserAssignedIdentityResponse(dict):
def __init__(__self__, *,
client_id: str,
principal_id: str):
"""
:param str client_id: The client id of user assigned identity.
:param str principal_id: The principal id of user assigned identity.
"""
pulumi.set(__self__, "client_id", client_id)
pulumi.set(__self__, "principal_id", principal_id)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> str:
"""
The client id of user assigned identity.
"""
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> str:
"""
The principal id of user assigned identity.
"""
return pulumi.get(self, "principal_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
| 49.585118
| 440
| 0.704842
|
b2a880eae81a33ed5a79eeed4633ddfe6483ed58
| 2,204
|
py
|
Python
|
SmallPackage/SmallIO.py
|
MikiEEE/SmallOS
|
5eb78da1a132eafccb0e18511de999a98ecad3f7
|
[
"Unlicense"
] | null | null | null |
SmallPackage/SmallIO.py
|
MikiEEE/SmallOS
|
5eb78da1a132eafccb0e18511de999a98ecad3f7
|
[
"Unlicense"
] | 1
|
2020-04-29T02:08:18.000Z
|
2020-05-02T03:50:45.000Z
|
SmallPackage/SmallIO.py
|
MikiEEE/SmallOS
|
5eb78da1a132eafccb0e18511de999a98ecad3f7
|
[
"Unlicense"
] | null | null | null |
#Kernals will be swappable depending on what hardware you are running on.
class SmallIO():
'''
@class smallIO() - controls print Input-Output
***NOTE***
This will probably be moved into the kernal class as a variable.
And will most likley be piped into the different processes on a terminal
selection basis.
TODO: Turn appPrintQueue into a circular buffer.
'''
def __init__(self, buffer_length):
'''
@fucntion __init__() - sets up the terminal toggle
and printqueuing veriables.
'''
self.terminalToggle = False
self.appPrintQueue = list()
self.buffer_length = buffer_length
return
def print(self, *args):
'''
@function print() - Prints output to terminal for application display.
@param *args - takes in arguments, does not automatically add newline.
@return - void.
'''
msg = ''.join([str(arg) for arg in args])
if self.terminalToggle == False:
self.kernel.write(msg)
elif len(self.appPrintQueue) < self.buffer_length:
self.appPrintQueue.append(msg)
else:
self.appPrintQueue.pop(0)
self.appPrintQueue.append(msg)
return
def sPrint(self, *args):
'''
@function sPrint() - Prints output to terminal for OS-related display.
@param *args - takes in arguments, does not automatically add newline.
@return - void.
'''
if self.terminalToggle == True:
msg = ''.join([str(arg) for arg in args])
self.kernel.write(msg)
return
def toggleTerminal(self):
'''
@function toggleTerminal() - Toggles the terminal from displaying application output
to OS command output and vice-versa.
@return - void.
'''
self.terminalToggle = not self.terminalToggle
msg = ''.join('*' for x in range(16)) + '\n'
self.kernel.write(msg)
if self.terminalToggle == False:
for num in range(len(self.appPrintQueue)):
msg = self.appPrintQueue.pop(0)
self.print(msg)
return
| 31.485714
| 92
| 0.588475
|
eed2ac3db96045b5a8161358b99b3929431899c8
| 56,569
|
py
|
Python
|
mathics/builtin/arithmetic.py
|
rjalif199/Mathics
|
be0f08be246284489fab84fcd507f4bb3a1ba098
|
[
"Apache-2.0"
] | null | null | null |
mathics/builtin/arithmetic.py
|
rjalif199/Mathics
|
be0f08be246284489fab84fcd507f4bb3a1ba098
|
[
"Apache-2.0"
] | null | null | null |
mathics/builtin/arithmetic.py
|
rjalif199/Mathics
|
be0f08be246284489fab84fcd507f4bb3a1ba098
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# cython: language_level=3
"""
Mathematical Functions
Basic arithmetic functions, including complex number arithmetic.
"""
import sympy
import mpmath
import math
from mathics.builtin.base import (
Builtin,
Predefined,
BinaryOperator,
PrefixOperator,
PostfixOperator,
Test,
SympyFunction,
SympyConstant,
)
from mathics.core.expression import (
Complex,
Expression,
Integer,
Number,
Rational,
Real,
String,
Symbol,
SymbolFalse,
SymbolNull,
SymbolTrue,
from_python,
)
from mathics.core.numbers import min_prec, dps, SpecialValueError
from mathics.builtin.lists import _IterationFunction
from mathics.core.convert import from_sympy
class _MPMathFunction(SympyFunction):
attributes = ("Listable", "NumericFunction")
mpmath_name = None
nargs = 1
def get_mpmath_function(self, args):
if self.mpmath_name is None or len(args) != self.nargs:
return None
return getattr(mpmath, self.mpmath_name)
def apply(self, z, evaluation):
"%(name)s[z__]"
args = z.numerify(evaluation).get_sequence()
mpmath_function = self.get_mpmath_function(args)
result = None
# if no arguments are inexact attempt to use sympy
if all(not x.is_inexact() for x in args):
result = Expression(self.get_name(), *args).to_sympy()
result = self.prepare_mathics(result)
result = from_sympy(result)
# evaluate leaves to convert e.g. Plus[2, I] -> Complex[2, 1]
return result.evaluate_leaves(evaluation)
elif mpmath_function is None:
return
if not all(isinstance(arg, Number) for arg in args):
return
if any(arg.is_machine_precision() for arg in args):
# if any argument has machine precision then the entire calculation
# is done with machine precision.
float_args = [
arg.round().get_float_value(permit_complex=True) for arg in args
]
if None in float_args:
return
result = self.call_mpmath(mpmath_function, float_args)
if isinstance(result, (mpmath.mpc, mpmath.mpf)):
if mpmath.isinf(result) and isinstance(result, mpmath.mpc):
result = Symbol("ComplexInfinity")
elif mpmath.isinf(result) and result > 0:
result = Expression("DirectedInfinity", Integer(1))
elif mpmath.isinf(result) and result < 0:
result = Expression("DirectedInfinity", Integer(-1))
elif mpmath.isnan(result):
result = Symbol("Indeterminate")
else:
result = Number.from_mpmath(result)
else:
prec = min_prec(*args)
d = dps(prec)
args = [
Expression("N", arg, Integer(d)).evaluate(evaluation) for arg in args
]
with mpmath.workprec(prec):
mpmath_args = [x.to_mpmath() for x in args]
if None in mpmath_args:
return
result = self.call_mpmath(mpmath_function, mpmath_args)
if isinstance(result, (mpmath.mpc, mpmath.mpf)):
result = Number.from_mpmath(result, d)
return result
def call_mpmath(self, mpmath_function, mpmath_args):
try:
return mpmath_function(*mpmath_args)
except ValueError as exc:
text = str(exc)
if text == "gamma function pole":
return Symbol("ComplexInfinity")
else:
raise
except ZeroDivisionError:
return
except SpecialValueError as exc:
return Symbol(exc.name)
class _MPMathMultiFunction(_MPMathFunction):
sympy_names = None
mpmath_names = None
def get_sympy_names(self):
if self.sympy_names is None:
return [self.sympy_name]
return self.sympy_names.values()
def get_function(self, module, names, fallback_name, leaves):
try:
name = fallback_name
if names is not None:
name = names[len(leaves)]
return getattr(module, name)
except KeyError:
return None
def get_sympy_function(self, leaves):
return self.get_function(sympy, self.sympy_names, self.sympy_name, leaves)
def get_mpmath_function(self, leaves):
return self.get_function(mpmath, self.mpmath_names, self.mpmath_name, leaves)
class Plus(BinaryOperator, SympyFunction):
"""
<dl>
<dt>'Plus[$a$, $b$, ...]'</dt>
<dt>$a$ + $b$ + ...</dt>
<dd>represents the sum of the terms $a$, $b$, ...
</dl>
>> 1 + 2
= 3
'Plus' performs basic simplification of terms:
>> a + b + a
= 2 a + b
>> a + a + 3 * a
= 5 a
>> a + b + 4.5 + a + b + a + 2 + 1.5 b
= 6.5 + 3 a + 3.5 b
Apply 'Plus' on a list to sum up its elements:
>> Plus @@ {2, 4, 6}
= 12
The sum of the first 1000 integers:
>> Plus @@ Range[1000]
= 500500
'Plus' has default value 0:
>> DefaultValues[Plus]
= {HoldPattern[Default[Plus]] :> 0}
>> a /. n_. + x_ :> {n, x}
= {0, a}
The sum of 2 red circles and 3 red circles is...
>> 2 Graphics[{Red,Disk[]}] + 3 Graphics[{Red,Disk[]}]
= 5 -Graphics-
#> -2a - 2b
= -2 a - 2 b
#> -4+2x+2*Sqrt[3]
= -4 + 2 Sqrt[3] + 2 x
#> 2a-3b-c
= 2 a - 3 b - c
#> 2a+5d-3b-2c-e
= 2 a - 3 b - 2 c + 5 d - e
#> 1 - I * Sqrt[3]
= 1 - I Sqrt[3]
#> Head[3 + 2 I]
= Complex
#> N[Pi, 30] + N[E, 30]
= 5.85987448204883847382293085463
#> % // Precision
= 30.
"""
operator = "+"
precedence = 310
attributes = (
"Flat",
"Listable",
"NumericFunction",
"OneIdentity",
"Orderless",
"Protected",
)
default_formats = False
defaults = {
None: "0",
}
sympy_name = "Add"
def format_plus(self, items, evaluation):
"Plus[items__]"
def negate(item):
if item.has_form("Times", 1, None):
if isinstance(item.leaves[0], Number):
neg = -item.leaves[0]
if neg.same(Integer(1)):
if len(item.leaves) == 1:
return neg
else:
return Expression("Times", *item.leaves[1:])
else:
return Expression("Times", neg, *item.leaves[1:])
else:
return Expression("Times", -1, *item.leaves)
elif isinstance(item, Number):
return -item.to_sympy()
else:
return Expression("Times", -1, item)
def is_negative(value):
if isinstance(value, Complex):
real, imag = value.to_sympy().as_real_imag()
if real <= 0 and imag <= 0:
return True
elif isinstance(value, Number) and value.to_sympy() < 0:
return True
return False
items = items.get_sequence()
values = [Expression("HoldForm", item) for item in items[:1]]
ops = []
for item in items[1:]:
if (
item.has_form("Times", 1, None) and is_negative(item.leaves[0])
) or is_negative(item):
item = negate(item)
op = "-"
else:
op = "+"
values.append(Expression("HoldForm", item))
ops.append(String(op))
return Expression(
"Infix",
Expression("List", *values),
Expression("List", *ops),
310,
Symbol("Left"),
)
def apply(self, items, evaluation):
"Plus[items___]"
items = items.numerify(evaluation).get_sequence()
leaves = []
last_item = last_count = None
prec = min_prec(*items)
is_machine_precision = any(item.is_machine_precision() for item in items)
numbers = []
def append_last():
if last_item is not None:
if last_count == 1:
leaves.append(last_item)
else:
if last_item.has_form("Times", None):
leaves.append(
Expression(
"Times", from_sympy(last_count), *last_item.leaves
)
)
else:
leaves.append(
Expression("Times", from_sympy(last_count), last_item)
)
for item in items:
if isinstance(item, Number):
numbers.append(item)
else:
count = rest = None
if item.has_form("Times", None):
for leaf in item.leaves:
if isinstance(leaf, Number):
count = leaf.to_sympy()
rest = item.get_mutable_leaves()
rest.remove(leaf)
if len(rest) == 1:
rest = rest[0]
else:
rest.sort()
rest = Expression("Times", *rest)
break
if count is None:
count = sympy.Integer(1)
rest = item
if last_item is not None and last_item == rest:
last_count = last_count + count
else:
append_last()
last_item = rest
last_count = count
append_last()
if numbers:
if prec is not None:
if is_machine_precision:
numbers = [item.to_mpmath() for item in numbers]
number = mpmath.fsum(numbers)
number = Number.from_mpmath(number)
else:
with mpmath.workprec(prec):
numbers = [item.to_mpmath() for item in numbers]
number = mpmath.fsum(numbers)
number = Number.from_mpmath(number, dps(prec))
else:
number = from_sympy(sum(item.to_sympy() for item in numbers))
else:
number = Integer(0)
if not number.same(Integer(0)):
leaves.insert(0, number)
if not leaves:
return Integer(0)
elif len(leaves) == 1:
return leaves[0]
else:
leaves.sort()
return Expression("Plus", *leaves)
class Subtract(BinaryOperator):
"""
<dl>
<dt>'Subtract[$a$, $b$]'</dt>
<dt>$a$ - $b$</dt>
<dd>represents the subtraction of $b$ from $a$.</dd>
</dl>
>> 5 - 3
= 2
>> a - b // FullForm
= Plus[a, Times[-1, b]]
>> a - b - c
= a - b - c
>> a - (b - c)
= a - b + c
"""
operator = "-"
precedence_parse = 311
precedence = 310
attributes = ("Listable", "NumericFunction")
grouping = "Left"
rules = {
"Subtract[x_, y_]": "Plus[x, Times[-1, y]]",
}
class Minus(PrefixOperator):
"""
<dl>
<dt>'Minus[$expr$]'
<dd> is the negation of $expr$.
</dl>
>> -a //FullForm
= Times[-1, a]
'Minus' automatically distributes:
>> -(x - 2/3)
= 2 / 3 - x
'Minus' threads over lists:
>> -Range[10]
= {-1, -2, -3, -4, -5, -6, -7, -8, -9, -10}
"""
operator = "-"
precedence = 480
attributes = ("Listable", "NumericFunction")
rules = {
"Minus[x_]": "Times[-1, x]",
}
formats = {
"Minus[x_]": 'Prefix[{HoldForm[x]}, "-", 480]',
# don't put e.g. -2/3 in parentheses
"Minus[expr_Divide]": 'Prefix[{HoldForm[expr]}, "-", 399]',
"Minus[Infix[expr_, op_, 400, grouping_]]": (
'Prefix[{Infix[expr, op, 400, grouping]}, "-", 399]'
),
}
def apply_int(self, x, evaluation):
"Minus[x_Integer]"
return Integer(-x.to_sympy())
def create_infix(items, operator, prec, grouping):
if len(items) == 1:
return items[0]
else:
return Expression(
"Infix",
Expression("List", *items),
String(operator),
prec,
Symbol(grouping),
)
class Times(BinaryOperator, SympyFunction):
"""
<dl>
<dt>'Times[$a$, $b$, ...]'</dt>
<dt>'$a$ * $b$ * ...'</dt>
<dt>'$a$ $b$ ...'</dt>
<dd>represents the product of the terms $a$, $b$, ...
</dl>
>> 10 * 2
= 20
>> 10 2
= 20
>> a * a
= a ^ 2
>> x ^ 10 * x ^ -2
= x ^ 8
>> {1, 2, 3} * 4
= {4, 8, 12}
>> Times @@ {1, 2, 3, 4}
= 24
>> IntegerLength[Times@@Range[5000]]
= 16326
'Times' has default value 1:
>> DefaultValues[Times]
= {HoldPattern[Default[Times]] :> 1}
>> a /. n_. * x_ :> {n, x}
= {1, a}
#> -a*b // FullForm
= Times[-1, a, b]
#> -(x - 2/3)
= 2 / 3 - x
#> -x*2
= -2 x
#> -(h/2) // FullForm
= Times[Rational[-1, 2], h]
#> x / x
= 1
#> 2x^2 / x^2
= 2
#> 3. Pi
= 9.42478
#> Head[3 * I]
= Complex
#> Head[Times[I, 1/2]]
= Complex
#> Head[Pi * I]
= Times
#> 3 * a //InputForm
= 3*a
#> 3 * a //OutputForm
= 3 a
#> -2.123456789 x
= -2.12346 x
#> -2.123456789 I
= 0. - 2.12346 I
#> N[Pi, 30] * I
= 3.14159265358979323846264338328 I
#> N[I Pi, 30]
= 3.14159265358979323846264338328 I
#> N[Pi * E, 30]
= 8.53973422267356706546355086955
#> N[Pi, 30] * N[E, 30]
= 8.53973422267356706546355086955
#> N[Pi, 30] * E
= 8.53973422267356706546355086955
#> % // Precision
= 30.
"""
operator = "*"
operator_display = " "
precedence = 400
attributes = (
"Flat",
"Listable",
"NumericFunction",
"OneIdentity",
"Orderless",
"Protected",
)
defaults = {
None: "1",
}
default_formats = False
sympy_name = "Mul"
rules = {}
formats = {}
def format_times(self, items, evaluation, op="\u2062"):
"Times[items__]"
def inverse(item):
if item.has_form("Power", 2) and isinstance( # noqa
item.leaves[1], (Integer, Rational, Real)
):
neg = -item.leaves[1]
if neg.same(Integer(1)):
return item.leaves[0]
else:
return Expression("Power", item.leaves[0], neg)
else:
return item
items = items.get_sequence()
positive = []
negative = []
for item in items:
if (
item.has_form("Power", 2)
and isinstance(item.leaves[1], (Integer, Rational, Real))
and item.leaves[1].to_sympy() < 0
): # nopep8
negative.append(inverse(item))
elif isinstance(item, Rational):
numerator = item.numerator()
if not numerator.same(Integer(1)):
positive.append(numerator)
negative.append(item.denominator())
else:
positive.append(item)
if positive and positive[0].get_int_value() == -1:
del positive[0]
minus = True
else:
minus = False
positive = [Expression("HoldForm", item) for item in positive]
negative = [Expression("HoldForm", item) for item in negative]
if positive:
positive = create_infix(positive, op, 400, "None")
else:
positive = Integer(1)
if negative:
negative = create_infix(negative, op, 400, "None")
result = Expression(
"Divide",
Expression("HoldForm", positive),
Expression("HoldForm", negative),
)
else:
result = positive
if minus:
result = Expression(
"Minus", result
) # Expression('PrecedenceForm', result, 481))
result = Expression("HoldForm", result)
return result
def format_inputform(self, items, evaluation):
"InputForm: Times[items__]"
return self.format_times(items, evaluation, op="*")
def format_standardform(self, items, evaluation):
"StandardForm: Times[items__]"
return self.format_times(items, evaluation, op=" ")
def format_outputform(self, items, evaluation):
"OutputForm: Times[items__]"
return self.format_times(items, evaluation, op=" ")
def apply(self, items, evaluation):
"Times[items___]"
items = items.numerify(evaluation).get_sequence()
leaves = []
numbers = []
prec = min_prec(*items)
is_machine_precision = any(item.is_machine_precision() for item in items)
# find numbers and simplify Times -> Power
for item in items:
if isinstance(item, Number):
numbers.append(item)
elif leaves and item == leaves[-1]:
leaves[-1] = Expression("Power", leaves[-1], Integer(2))
elif (
leaves
and item.has_form("Power", 2)
and leaves[-1].has_form("Power", 2)
and item.leaves[0].same(leaves[-1].leaves[0])
):
leaves[-1] = Expression(
"Power",
leaves[-1].leaves[0],
Expression("Plus", item.leaves[1], leaves[-1].leaves[1]),
)
elif (
leaves and item.has_form("Power", 2) and item.leaves[0].same(leaves[-1])
):
leaves[-1] = Expression(
"Power", leaves[-1], Expression("Plus", item.leaves[1], Integer(1))
)
elif (
leaves
and leaves[-1].has_form("Power", 2)
and leaves[-1].leaves[0].same(item)
):
leaves[-1] = Expression(
"Power", item, Expression("Plus", Integer(1), leaves[-1].leaves[1])
)
else:
leaves.append(item)
if numbers:
if prec is not None:
if is_machine_precision:
numbers = [item.to_mpmath() for item in numbers]
number = mpmath.fprod(numbers)
number = Number.from_mpmath(number)
else:
with mpmath.workprec(prec):
numbers = [item.to_mpmath() for item in numbers]
number = mpmath.fprod(numbers)
number = Number.from_mpmath(number, dps(prec))
else:
number = sympy.Mul(*[item.to_sympy() for item in numbers])
number = from_sympy(number)
else:
number = Integer(1)
if number.same(Integer(1)):
number = None
elif number.is_zero:
return number
elif number.same(Integer(-1)) and leaves and leaves[0].has_form("Plus", None):
leaves[0] = Expression(
leaves[0].get_head(),
*[Expression("Times", Integer(-1), leaf) for leaf in leaves[0].leaves]
)
number = None
for leaf in leaves:
leaf.clear_cache()
if number is not None:
leaves.insert(0, number)
if not leaves:
return Integer(1)
elif len(leaves) == 1:
return leaves[0]
else:
return Expression("Times", *leaves)
class Divide(BinaryOperator):
"""
<dl>
<dt>'Divide[$a$, $b$]'</dt>
<dt>'$a$ / $b$'</dt>
<dd>represents the division of $a$ by $b$.
</dl>
>> 30 / 5
= 6
>> 1 / 8
= 1 / 8
>> Pi / 4
= Pi / 4
Use 'N' or a decimal point to force numeric evaluation:
>> Pi / 4.0
= 0.785398
>> 1 / 8
= 1 / 8
>> N[%]
= 0.125
Nested divisions:
>> a / b / c
= a / (b c)
>> a / (b / c)
= a c / b
>> a / b / (c / (d / e))
= a d / (b c e)
>> a / (b ^ 2 * c ^ 3 / e)
= a e / (b ^ 2 c ^ 3)
#> 1 / 4.0
= 0.25
#> 10 / 3 // FullForm
= Rational[10, 3]
#> a / b // FullForm
= Times[a, Power[b, -1]]
"""
operator = "/"
precedence = 470
attributes = ("Listable", "NumericFunction")
grouping = "Left"
default_formats = False
rules = {
"Divide[x_, y_]": "Times[x, Power[y, -1]]",
"MakeBoxes[Divide[x_, y_], f:StandardForm|TraditionalForm]": (
"FractionBox[MakeBoxes[x, f], MakeBoxes[y, f]]"
),
}
formats = {
(("InputForm", "OutputForm"), "Divide[x_, y_]"): (
'Infix[{HoldForm[x], HoldForm[y]}, "/", 400, Left]'
),
}
class Power(BinaryOperator, _MPMathFunction):
"""
<dl>
<dt>'Power[$a$, $b$]'</dt>
<dt>'$a$ ^ $b$'</dt>
<dd>represents $a$ raised to the power of $b$.
</dl>
>> 4 ^ (1/2)
= 2
>> 4 ^ (1/3)
= 2 ^ (2 / 3)
>> 3^123
= 48519278097689642681155855396759336072749841943521979872827
>> (y ^ 2) ^ (1/2)
= Sqrt[y ^ 2]
>> (y ^ 2) ^ 3
= y ^ 6
>> Plot[Evaluate[Table[x^y, {y, 1, 5}]], {x, -1.5, 1.5}, AspectRatio -> 1]
= -Graphics-
Use a decimal point to force numeric evaluation:
>> 4.0 ^ (1/3)
= 1.5874
'Power' has default value 1 for its second argument:
>> DefaultValues[Power]
= {HoldPattern[Default[Power, 2]] :> 1}
>> a /. x_ ^ n_. :> {x, n}
= {a, 1}
'Power' can be used with complex numbers:
>> (1.5 + 1.0 I) ^ 3.5
= -3.68294 + 6.95139 I
>> (1.5 + 1.0 I) ^ (3.5 + 1.5 I)
= -3.19182 + 0.645659 I
#> 1/0
: Infinite expression 1 / 0 encountered.
= ComplexInfinity
#> 0 ^ -2
: Infinite expression 1 / 0 ^ 2 encountered.
= ComplexInfinity
#> 0 ^ (-1/2)
: Infinite expression 1 / Sqrt[0] encountered.
= ComplexInfinity
#> 0 ^ -Pi
: Infinite expression 1 / 0 ^ 3.14159 encountered.
= ComplexInfinity
#> 0 ^ (2 I E)
: Indeterminate expression 0 ^ (0. + 5.43656 I) encountered.
= Indeterminate
#> 0 ^ - (Pi + 2 E I)
: Infinite expression 0 ^ (-3.14159 - 5.43656 I) encountered.
= ComplexInfinity
#> 0 ^ 0
: Indeterminate expression 0 ^ 0 encountered.
= Indeterminate
#> Sqrt[-3+2. I]
= 0.550251 + 1.81735 I
#> Sqrt[-3+2 I]
= Sqrt[-3 + 2 I]
#> (3/2+1/2I)^2
= 2 + 3 I / 2
#> I ^ I
= I ^ I
#> 2 ^ 2.0
= 4.
#> Pi ^ 4.
= 97.4091
#> a ^ b
= a ^ b
"""
operator = "^"
precedence = 590
attributes = ("Listable", "NumericFunction", "OneIdentity")
grouping = "Right"
default_formats = False
sympy_name = "Pow"
mpmath_name = "power"
nargs = 2
messages = {
"infy": "Infinite expression `1` encountered.",
"indet": "Indeterminate expression `1` encountered.",
}
defaults = {
2: "1",
}
formats = {
Expression(
"Power",
Expression("Pattern", Symbol("x"), Expression("Blank")),
Rational(1, 2),
): "HoldForm[Sqrt[x]]",
(("InputForm", "OutputForm"), "x_ ^ y_"): (
'Infix[{HoldForm[x], HoldForm[y]}, "^", 590, Right]'
),
("", "x_ ^ y_"): (
"PrecedenceForm[Superscript[OuterPrecedenceForm[HoldForm[x], 590],"
" HoldForm[y]], 590]"
),
("", "x_ ^ y_?Negative"): (
"HoldForm[Divide[1, #]]&[If[y==-1, HoldForm[x], HoldForm[x]^-y]]"
),
}
rules = {
"Power[]": "1",
"Power[x_]": "x",
}
def apply_check(self, x, y, evaluation):
"Power[x_, y_]"
# Power uses _MPMathFunction but does some error checking first
if isinstance(x, Number) and x.is_zero:
if isinstance(y, Number):
y_err = y
else:
y_err = Expression("N", y).evaluate(evaluation)
if isinstance(y_err, Number):
py_y = y_err.round_to_float(permit_complex=True).real
if py_y > 0:
return x
elif py_y == 0.0:
evaluation.message("Power", "indet", Expression("Power", x, y_err))
return Symbol("Indeterminate")
elif py_y < 0:
evaluation.message("Power", "infy", Expression("Power", x, y_err))
return Symbol("ComplexInfinity")
result = self.apply(Expression("Sequence", x, y), evaluation)
if result is None or result != SymbolNull:
return result
class Sqrt(SympyFunction):
"""
<dl>
<dt>'Sqrt[$expr$]'
<dd>returns the square root of $expr$.
</dl>
>> Sqrt[4]
= 2
>> Sqrt[5]
= Sqrt[5]
>> Sqrt[5] // N
= 2.23607
>> Sqrt[a]^2
= a
Complex numbers:
>> Sqrt[-4]
= 2 I
>> I == Sqrt[-1]
= True
>> Plot[Sqrt[a^2], {a, -2, 2}]
= -Graphics-
#> N[Sqrt[2], 50]
= 1.4142135623730950488016887242096980785696718753769
"""
attributes = ("Listable", "NumericFunction")
rules = {
"Sqrt[x_]": "x ^ (1/2)",
"MakeBoxes[Sqrt[x_], f:StandardForm|TraditionalForm]": (
"SqrtBox[MakeBoxes[x, f]]"
),
}
class CubeRoot(Builtin):
"""
<dl>
<dt>'CubeRoot[$n$]'
<dd>finds the real-valued cube root of the given $n$.
</dl>
>> CubeRoot[16]
= 2 2 ^ (1 / 3)
#> CubeRoot[-5]
= -5 ^ (1 / 3)
#> CubeRoot[-510000]
= -10 510 ^ (1 / 3)
#> CubeRoot[-5.1]
= -1.7213
#> CubeRoot[b]
= b ^ (1 / 3)
#> CubeRoot[-0.5]
= -0.793701
#> CubeRoot[3 + 4 I]
: The parameter 3 + 4 I should be real valued.
= (3 + 4 I) ^ (1 / 3)
"""
attributes = {"Listable", "NumericFunction", "ReadProtected"}
messages = {
"preal": "The parameter `1` should be real valued.",
}
rules = {
"CubeRoot[n_?NumericQ]": "If[n > 0, Power[n, Divide[1, 3]], Times[-1, Power[Times[-1, n], Divide[1, 3]]]]",
"CubeRoot[n_]": "Power[n, Divide[1, 3]]",
"MakeBoxes[CubeRoot[x_], f:StandardForm|TraditionalForm]": (
"RadicalBox[MakeBoxes[x, f], 3]"
),
}
def apply(self, n, evaluation):
"CubeRoot[n_Complex]"
evaluation.message("CubeRoot", "preal", n)
return Expression("Power", n, Expression("Divide", 1, 3))
class Infinity(SympyConstant):
"""
<dl>
<dt>'Infinity'
<dd>represents an infinite real quantity.
</dl>
>> 1 / Infinity
= 0
>> Infinity + 100
= Infinity
Use 'Infinity' in sum and limit calculations:
>> Sum[1/x^2, {x, 1, Infinity}]
= Pi ^ 2 / 6
#> FullForm[Infinity]
= DirectedInfinity[1]
#> (2 + 3.5*I) / Infinity
= 0. + 0. I
#> Infinity + Infinity
= Infinity
#> Infinity / Infinity
: Indeterminate expression 0 Infinity encountered.
= Indeterminate
"""
sympy_name = "oo"
python_equivalent = math.inf
rules = {
"Infinity": "DirectedInfinity[1]",
"MakeBoxes[Infinity, f:StandardForm|TraditionalForm]": ('"\\[Infinity]"'),
}
class ComplexInfinity(SympyConstant):
"""
<dl>
<dt>'ComplexInfinity'
<dd>represents an infinite complex quantity of undetermined direction.
</dl>
>> 1 / ComplexInfinity
= 0
>> ComplexInfinity * Infinity
= ComplexInfinity
>> FullForm[ComplexInfinity]
= DirectedInfinity[]
## Issue689
#> ComplexInfinity + ComplexInfinity
: Indeterminate expression ComplexInfinity + ComplexInfinity encountered.
= Indeterminate
#> ComplexInfinity + Infinity
: Indeterminate expression ComplexInfinity + Infinity encountered.
= Indeterminate
"""
sympy_name = "zoo"
rules = {
"ComplexInfinity": "DirectedInfinity[]",
}
class DirectedInfinity(SympyFunction):
"""
<dl>
<dt>'DirectedInfinity[$z$]'</dt>
<dd>represents an infinite multiple of the complex number $z$.
<dt>'DirectedInfinity[]'</dt>
<dd>is the same as 'ComplexInfinity'.</dd>
</dl>
>> DirectedInfinity[1]
= Infinity
>> DirectedInfinity[]
= ComplexInfinity
>> DirectedInfinity[1 + I]
= (1 / 2 + I / 2) Sqrt[2] Infinity
>> 1 / DirectedInfinity[1 + I]
= 0
>> DirectedInfinity[1] + DirectedInfinity[-1]
: Indeterminate expression -Infinity + Infinity encountered.
= Indeterminate
#> DirectedInfinity[1+I]+DirectedInfinity[2+I]
= (2 / 5 + I / 5) Sqrt[5] Infinity + (1 / 2 + I / 2) Sqrt[2] Infinity
#> DirectedInfinity[Sqrt[3]]
= Infinity
"""
rules = {
"DirectedInfinity[args___] ^ -1": "0",
"0 * DirectedInfinity[args___]": "Message[Infinity::indet, Unevaluated[0 DirectedInfinity[args]]]; Indeterminate",
"DirectedInfinity[a_?NumericQ] /; N[Abs[a]] != 1": "DirectedInfinity[a / Abs[a]]",
"DirectedInfinity[a_] * DirectedInfinity[b_]": "DirectedInfinity[a*b]",
"DirectedInfinity[] * DirectedInfinity[args___]": "DirectedInfinity[]",
"DirectedInfinity[0]": "DirectedInfinity[]",
"z_?NumberQ * DirectedInfinity[]": "DirectedInfinity[]",
"z_?NumberQ * DirectedInfinity[a_]": "DirectedInfinity[z * a]",
"DirectedInfinity[a_] + DirectedInfinity[b_] /; b == -a": (
"Message[Infinity::indet,"
" Unevaluated[DirectedInfinity[a] + DirectedInfinity[b]]];"
"Indeterminate"
),
"DirectedInfinity[] + DirectedInfinity[args___]": (
"Message[Infinity::indet,"
" Unevaluated[DirectedInfinity[] + DirectedInfinity[args]]];"
"Indeterminate"
),
"DirectedInfinity[args___] + _?NumberQ": "DirectedInfinity[args]",
}
formats = {
"DirectedInfinity[1]": "HoldForm[Infinity]",
"DirectedInfinity[-1]": "HoldForm[-Infinity]",
"DirectedInfinity[]": "HoldForm[ComplexInfinity]",
"DirectedInfinity[z_?NumericQ]": "HoldForm[z Infinity]",
}
def to_sympy(self, expr, **kwargs):
if len(expr.leaves) == 1:
dir = expr.leaves[0].get_int_value()
if dir == 1:
return sympy.oo
elif dir == -1:
return -sympy.oo
class Re(SympyFunction):
"""
<dl>
<dt>'Re[$z$]'
<dd>returns the real component of the complex number $z$.
</dl>
>> Re[3+4I]
= 3
>> Plot[{Cos[a], Re[E^(I a)]}, {a, 0, 2 Pi}]
= -Graphics-
#> Im[0.5 + 2.3 I]
= 2.3
#> % // Precision
= MachinePrecision
"""
attributes = ("Listable", "NumericFunction")
def apply_complex(self, number, evaluation):
"Re[number_Complex]"
return number.real
def apply_number(self, number, evaluation):
"Re[number_?NumberQ]"
return number
def apply(self, number, evaluation):
"Re[number_]"
return from_sympy(sympy.re(number.to_sympy().expand(complex=True)))
class Im(SympyFunction):
"""
<dl>
<dt>'Im[$z$]'
<dd>returns the imaginary component of the complex number $z$.
</dl>
>> Im[3+4I]
= 4
>> Plot[{Sin[a], Im[E^(I a)]}, {a, 0, 2 Pi}]
= -Graphics-
#> Re[0.5 + 2.3 I]
= 0.5
#> % // Precision
= MachinePrecision
"""
attributes = ("Listable", "NumericFunction")
def apply_complex(self, number, evaluation):
"Im[number_Complex]"
return number.imag
def apply_number(self, number, evaluation):
"Im[number_?NumberQ]"
return Integer(0)
def apply(self, number, evaluation):
"Im[number_]"
return from_sympy(sympy.im(number.to_sympy().expand(complex=True)))
class Conjugate(_MPMathFunction):
"""
<dl>
<dt>'Conjugate[$z$]'
<dd>returns the complex conjugate of the complex number $z$.
</dl>
>> Conjugate[3 + 4 I]
= 3 - 4 I
>> Conjugate[3]
= 3
>> Conjugate[a + b * I]
= Conjugate[a] - I Conjugate[b]
>> Conjugate[{{1, 2 + I 4, a + I b}, {I}}]
= {{1, 2 - 4 I, Conjugate[a] - I Conjugate[b]}, {-I}}
## Issue #272
#> {Conjugate[Pi], Conjugate[E]}
= {Pi, E}
>> Conjugate[1.5 + 2.5 I]
= 1.5 - 2.5 I
"""
mpmath_name = "conj"
class Abs(_MPMathFunction):
"""
<dl>
<dt>'Abs[$x$]'
<dd>returns the absolute value of $x$.
</dl>
>> Abs[-3]
= 3
'Abs' returns the magnitude of complex numbers:
>> Abs[3 + I]
= Sqrt[10]
>> Abs[3.0 + I]
= 3.16228
>> Plot[Abs[x], {x, -4, 4}]
= -Graphics-
#> Abs[I]
= 1
#> Abs[a - b]
= Abs[a - b]
#> Abs[Sqrt[3]]
= Sqrt[3]
"""
sympy_name = "Abs"
mpmath_name = "fabs" # mpmath actually uses python abs(x) / x.__abs__()
class Sign(Builtin):
"""
<dl>
<dt>'Sign[$x$]'
<dd>return -1, 0, or 1 depending on whether $x$ is negative, zero, or positive.
</dl>
>> Sign[19]
= 1
>> Sign[-6]
= -1
>> Sign[0]
= 0
>> Sign[{-5, -10, 15, 20, 0}]
= {-1, -1, 1, 1, 0}
#> Sign[{1, 2.3, 4/5, {-6.7, 0}, {8/9, -10}}]
= {1, 1, 1, {-1, 0}, {1, -1}}
>> Sign[3 - 4*I]
= 3 / 5 - 4 I / 5
#> Sign[1 - 4*I] == (1/17 - 4 I/17) Sqrt[17]
= True
#> Sign[4, 5, 6]
: Sign called with 3 arguments; 1 argument is expected.
= Sign[4, 5, 6]
#> Sign["20"]
= Sign[20]
"""
# Sympy and mpmath do not give the desired form of complex number
# sympy_name = 'sign'
# mpmath_name = 'sign'
attributes = ("Listable", "NumericFunction")
messages = {
"argx": "Sign called with `1` arguments; 1 argument is expected.",
}
def apply(self, x, evaluation):
"Sign[x_]"
if isinstance(x, Complex):
return Expression("Times", x, Expression("Power", Expression("Abs", x), -1))
sympy_x = x.to_sympy()
if sympy_x is None:
return None
return from_sympy(sympy.sign(sympy_x))
def apply_error(self, x, seqs, evaluation):
"Sign[x_, seqs__]"
return evaluation.message("Sign", "argx", Integer(len(seqs.get_sequence()) + 1))
class I(Predefined):
"""
<dl>
<dt>'I'
<dd>represents the imaginary number 'Sqrt[-1]'.
</dl>
>> I^2
= -1
>> (3+I)*(3-I)
= 10
"""
python_equivalent = 1j
def evaluate(self, evaluation):
return Complex(Integer(0), Integer(1))
class Indeterminate(SympyConstant):
"""
<dl>
<dt>'Indeterminate'
<dd>represents an indeterminate result.
</dl>
>> 0^0
: Indeterminate expression 0 ^ 0 encountered.
= Indeterminate
>> Tan[Indeterminate]
= Indeterminate
"""
sympy_name = "nan"
class NumberQ(Test):
"""
<dl>
<dt>'NumberQ[$expr$]'
<dd>returns 'True' if $expr$ is an explicit number, and 'False' otherwise.
</dl>
>> NumberQ[3+I]
= True
>> NumberQ[5!]
= True
>> NumberQ[Pi]
= False
"""
def test(self, expr):
return isinstance(expr, Number)
class PossibleZeroQ(SympyFunction):
"""
<dl>
<dt>'PossibleZeroQ[$expr$]'
<dd>returns 'True' if basic symbolic and numerical methods suggest that expr has value zero, and 'False' otherwise.
</dl>
Test whether a numeric expression is zero:
>> PossibleZeroQ[E^(I Pi/4) - (-1)^(1/4)]
= True
The determination is approximate.
Test whether a symbolic expression is likely to be identically zero:
>> PossibleZeroQ[(x + 1) (x - 1) - x^2 + 1]
= True
>> PossibleZeroQ[(E + Pi)^2 - E^2 - Pi^2 - 2 E Pi]
= True
Show that a numeric expression is nonzero:
>> PossibleZeroQ[E^Pi - Pi^E]
= False
>> PossibleZeroQ[1/x + 1/y - (x + y)/(x y)]
= True
Decide that a numeric expression is zero, based on approximate computations:
>> PossibleZeroQ[2^(2 I) - 2^(-2 I) - 2 I Sin[Log[4]]]
= True
>> PossibleZeroQ[Sqrt[x^2] - x]
= False
"""
sympy_name = "_iszero"
def apply(self, expr, evaluation):
"%(name)s[expr_]"
from sympy.matrices.utilities import _iszero
sympy_expr = expr.to_sympy()
result = _iszero(sympy_expr)
if result is None:
# Can't get exact answer, so try approximate equal
numeric_val = Expression("N", expr).evaluate(evaluation)
if numeric_val and hasattr(numeric_val, "is_approx_zero"):
result = numeric_val.is_approx_zero
elif (
Expression("NumericQ", numeric_val).evaluate(evaluation) == SymbolFalse
):
return (
SymbolTrue
if Expression("Simplify", expr).evaluate(evaluation) == Integer(0)
else SymbolFalse
)
return from_python(result)
class RealNumberQ(Test):
"""
<dl>
<dt>'RealNumberQ[$expr$]'
<dd>returns 'True' if $expr$ is an explicit number with no imaginary component.
</dl>
>> RealNumberQ[10]
= True
>> RealNumberQ[4.0]
= True
>> RealNumberQ[1+I]
= False
>> RealNumberQ[0 * I]
= True
>> RealNumberQ[0.0 * I]
= False
"""
def test(self, expr):
return isinstance(expr, (Integer, Rational, Real))
class MachineNumberQ(Test):
"""
<dl>
<dt>'MachineNumberQ[$expr$]'
<dd>returns 'True' if $expr$ is a machine-precision real or complex number.
</dl>
= True
>> MachineNumberQ[3.14159265358979324]
= False
>> MachineNumberQ[1.5 + 2.3 I]
= True
>> MachineNumberQ[2.71828182845904524 + 3.14159265358979324 I]
= False
#> MachineNumberQ[1.5 + 3.14159265358979324 I]
= True
#> MachineNumberQ[1.5 + 5 I]
= True
"""
def test(self, expr):
return expr.is_machine_precision()
class ExactNumberQ(Test):
"""
<dl>
<dt>'ExactNumberQ[$expr$]'
<dd>returns 'True' if $expr$ is an exact number, and 'False' otherwise.
</dl>
>> ExactNumberQ[10]
= True
>> ExactNumberQ[4.0]
= False
>> ExactNumberQ[n]
= False
'ExactNumberQ' can be applied to complex numbers:
>> ExactNumberQ[1 + I]
= True
>> ExactNumberQ[1 + 1. I]
= False
"""
def test(self, expr):
return isinstance(expr, Number) and not expr.is_inexact()
class InexactNumberQ(Test):
"""
<dl>
<dt>'InexactNumberQ[$expr$]'
<dd>returns 'True' if $expr$ is not an exact number, and 'False' otherwise.
</dl>
>> InexactNumberQ[a]
= False
>> InexactNumberQ[3.0]
= True
>> InexactNumberQ[2/3]
= False
'InexactNumberQ' can be applied to complex numbers:
>> InexactNumberQ[4.0+I]
= True
"""
def test(self, expr):
return isinstance(expr, Number) and expr.is_inexact()
class IntegerQ(Test):
"""
<dl>
<dt>'IntegerQ[$expr$]'
<dd>returns 'True' if $expr$ is an integer, and 'False' otherwise.
</dl>
>> IntegerQ[3]
= True
>> IntegerQ[Pi]
= False
"""
def test(self, expr):
return isinstance(expr, Integer)
class Integer_(Builtin):
"""
<dl>
<dt>'Integer'
<dd>is the head of integers.
</dl>
>> Head[5]
= Integer
## Test large Integer comparison bug
#> {a, b} = {2^10000, 2^10000 + 1}; {a == b, a < b, a <= b}
= {False, True, True}
"""
name = "Integer"
class Real_(Builtin):
"""
<dl>
<dt>'Real'
<dd>is the head of real (inexact) numbers.
</dl>
>> x = 3. ^ -20;
>> InputForm[x]
= 2.8679719907924413*^-10
>> Head[x]
= Real
## Formatting tests
#> 1. * 10^6
= 1.*^6
#> 1. * 10^5
= 100000.
#> -1. * 10^6
= -1.*^6
#> -1. * 10^5
= -100000.
#> 1. * 10^-6
= 1.*^-6
#> 1. * 10^-5
= 0.00001
#> -1. * 10^-6
= -1.*^-6
#> -1. * 10^-5
= -0.00001
## Mathematica treats zero strangely
#> 0.0000000000000
= 0.
#> 0.0000000000000000000000000000
= 0.*^-28
## Parse *^ Notation
#> 1.5*^24
= 1.5*^24
#> 1.5*^+24
= 1.5*^24
#> 1.5*^-24
= 1.5*^-24
## Don't accept *^ with spaces
#> 1.5 *^10
: "1.5 *" cannot be followed by "^10" (line 1 of "<test>").
#> 1.5*^ 10
: "1.5*" cannot be followed by "^ 10" (line 1 of "<test>").
## Issue654
#> 1^^2
: Requested base 1 in 1^^2 should be between 2 and 36.
: Expression cannot begin with "1^^2" (line 1 of "<test>").
#> 2^^0101
= 5
#> 2^^01210
: Digit at position 3 in 01210 is too large to be used in base 2.
: Expression cannot begin with "2^^01210" (line 1 of "<test>").
#> 16^^5g
: Digit at position 2 in 5g is too large to be used in base 16.
: Expression cannot begin with "16^^5g" (line 1 of "<test>").
#> 36^^0123456789abcDEFxyzXYZ
= 14142263610074677021975869033659
#> 37^^3
: Requested base 37 in 37^^3 should be between 2 and 36.
: Expression cannot begin with "37^^3" (line 1 of "<test>").
"""
name = "Real"
class Rational_(Builtin):
"""
<dl>
<dt>'Rational'</dt>
<dd>is the head of rational numbers.</dd>
<dt>'Rational[$a$, $b$]'</dt>
<dd>constructs the rational number $a$ / $b$.</dd>
</dl>
>> Head[1/2]
= Rational
>> Rational[1, 2]
= 1 / 2
#> -2/3
= -2 / 3
"""
name = "Rational"
def apply(self, n, m, evaluation):
"Rational[n_Integer, m_Integer]"
if m.to_sympy() == 1:
return Integer(n.to_sympy())
else:
return Rational(n.to_sympy(), m.to_sympy())
class Complex_(Builtin):
"""
<dl>
<dt>'Complex'
<dd>is the head of complex numbers.
<dt>'Complex[$a$, $b$]'
<dd>constructs the complex number '$a$ + I $b$'.
</dl>
>> Head[2 + 3*I]
= Complex
>> Complex[1, 2/3]
= 1 + 2 I / 3
>> Abs[Complex[3, 4]]
= 5
#> OutputForm[Complex[2.0 ^ 40, 3]]
= 1.09951*^12 + 3. I
#> InputForm[Complex[2.0 ^ 40, 3]]
= 1.099511627776*^12 + 3.*I
#> -2 / 3 - I
= -2 / 3 - I
#> Complex[10, 0]
= 10
#> 0. + I
= 0. + 1. I
#> 1 + 0 I
= 1
#> Head[%]
= Integer
#> Complex[0.0, 0.0]
= 0. + 0. I
#> 0. I
= 0. + 0. I
#> 0. + 0. I
= 0. + 0. I
#> 1. + 0. I
= 1. + 0. I
#> 0. + 1. I
= 0. + 1. I
## Check Nesting Complex
#> Complex[1, Complex[0, 1]]
= 0
#> Complex[1, Complex[1, 0]]
= 1 + I
#> Complex[1, Complex[1, 1]]
= I
"""
name = "Complex"
def apply(self, r, i, evaluation):
"Complex[r_?NumberQ, i_?NumberQ]"
if isinstance(r, Complex) or isinstance(i, Complex):
sym_form = r.to_sympy() + sympy.I * i.to_sympy()
r, i = sym_form.simplify().as_real_imag()
r, i = from_sympy(r), from_sympy(i)
return Complex(r, i)
class Factorial(PostfixOperator, _MPMathFunction):
"""
<dl>
<dt>'Factorial[$n$]'
<dt>'$n$!'
<dd>computes the factorial of $n$.
</dl>
>> 20!
= 2432902008176640000
'Factorial' handles numeric (real and complex) values using the gamma function:
>> 10.5!
= 1.18994*^7
>> (-3.0+1.5*I)!
= 0.0427943 - 0.00461565 I
However, the value at poles is 'ComplexInfinity':
>> (-1.)!
= ComplexInfinity
'Factorial' has the same operator ('!') as 'Not', but with higher precedence:
>> !a! //FullForm
= Not[Factorial[a]]
#> 0!
= 1
"""
operator = "!"
precedence = 610
mpmath_name = "factorial"
class Gamma(_MPMathMultiFunction):
"""
<dl>
<dt>'Gamma[$z$]'
<dd>is the gamma function on the complex number $z$.
<dt>'Gamma[$z$, $x$]'
<dd>is the upper incomplete gamma function.
<dt>'Gamma[$z$, $x0$, $x1$]'
<dd>is equivalent to 'Gamma[$z$, $x0$] - Gamma[$z$, $x1$]'.
</dl>
'Gamma[$z$]' is equivalent to '($z$ - 1)!':
>> Simplify[Gamma[z] - (z - 1)!]
= 0
Exact arguments:
>> Gamma[8]
= 5040
>> Gamma[1/2]
= Sqrt[Pi]
>> Gamma[1, x]
= E ^ (-x)
>> Gamma[0, x]
= ExpIntegralE[1, x]
Numeric arguments:
>> Gamma[123.78]
= 4.21078*^204
>> Gamma[1. + I]
= 0.498016 - 0.15495 I
Both 'Gamma' and 'Factorial' functions are continuous:
>> Plot[{Gamma[x], x!}, {x, 0, 4}]
= -Graphics-
## Issue 203
#> N[Gamma[24/10], 100]
= 1.242169344504305404913070252268300492431517240992022966055507541481863694148882652446155342679460339
#> N[N[Gamma[24/10],100]/N[Gamma[14/10],100],100]
= 1.400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
#> % // Precision
= 100.
#> Gamma[1.*^20]
: Overflow occurred in computation.
= Overflow[]
## Needs mpmath support for lowergamma
#> Gamma[1., 2.]
= Gamma[1., 2.]
"""
mpmath_names = {
1: "gamma",
}
sympy_names = {
1: "gamma",
2: "uppergamma",
}
rules = {
"Gamma[z_, x0_, x1_]": "Gamma[z, x0] - Gamma[z, x1]",
"Gamma[1 + z_]": "z!",
}
def get_sympy_names(self):
return ["gamma", "uppergamma", "lowergamma"]
def from_sympy(self, sympy_name, leaves):
if sympy_name == "lowergamma":
# lowergamma(z, x) -> Gamma[z, 0, x]
z, x = leaves
return Expression(self.get_name(), z, Integer(0), x)
else:
return Expression(self.get_name(), *leaves)
class Pochhammer(SympyFunction):
"""
<dl>
<dt>'Pochhammer[$a$, $n$]'
<dd>is the Pochhammer symbol (a)_n.
</dl>
>> Pochhammer[4, 8]
= 6652800
"""
sympy_name = "RisingFactorial"
rules = {
"Pochhammer[a_, n_]": "Gamma[a + n] / Gamma[a]",
}
class HarmonicNumber(_MPMathFunction):
"""
<dl>
<dt>'HarmonicNumber[n]'
<dd>returns the $n$th harmonic number.
</dl>
>> Table[HarmonicNumber[n], {n, 8}]
= {1, 3 / 2, 11 / 6, 25 / 12, 137 / 60, 49 / 20, 363 / 140, 761 / 280}
>> HarmonicNumber[3.8]
= 2.03806
#> HarmonicNumber[-1.5]
= 0.613706
"""
rules = {
"HarmonicNumber[-1]": "ComplexInfinity",
}
sympy_name = "harmonic"
mpmath_name = "harmonic"
class Sum(_IterationFunction, SympyFunction):
"""
<dl>
<dt>'Sum[$expr$, {$i$, $imin$, $imax$}]'
<dd>evaluates the discrete sum of $expr$ with $i$ ranging from $imin$ to $imax$.
<dt>'Sum[$expr$, {$i$, $imax$}]'
<dd>same as 'Sum[$expr$, {$i$, 1, $imax$}]'.
<dt>'Sum[$expr$, {$i$, $imin$, $imax$, $di$}]'
<dd>$i$ ranges from $imin$ to $imax$ in steps of $di$.
<dt>'Sum[$expr$, {$i$, $imin$, $imax$}, {$j$, $jmin$, $jmax$}, ...]'
<dd>evaluates $expr$ as a multiple sum, with {$i$, ...}, {$j$, ...}, ... being in outermost-to-innermost order.
</dl>
>> Sum[k, {k, 1, 10}]
= 55
Double sum:
>> Sum[i * j, {i, 1, 10}, {j, 1, 10}]
= 3025
Symbolic sums are evaluated:
>> Sum[k, {k, 1, n}]
= n (1 + n) / 2
>> Sum[k, {k, n, 2 n}]
= 3 n (1 + n) / 2
>> Sum[k, {k, I, I + 1}]
= 1 + 2 I
>> Sum[1 / k ^ 2, {k, 1, n}]
= HarmonicNumber[n, 2]
>> Sum[f[i], {i, 1, 7}]
= f[1] + f[2] + f[3] + f[4] + f[5] + f[6] + f[7]
Verify algebraic identities:
>> Sum[x ^ 2, {x, 1, y}] - y * (y + 1) * (2 * y + 1) / 6
= 0
>> (-1 + a^n) Sum[a^(k n), {k, 0, m-1}] // Simplify
= -1 + (a ^ n) ^ m
Infinite sums:
>> Sum[1 / 2 ^ i, {i, 1, Infinity}]
= 1
>> Sum[1 / k ^ 2, {k, 1, Infinity}]
= Pi ^ 2 / 6
#> a=Sum[x^k*Sum[y^l,{l,0,4}],{k,0,4}]]
: "a=Sum[x^k*Sum[y^l,{l,0,4}],{k,0,4}]" cannot be followed by "]" (line 1 of "<test>").
## Issue431
#> Sum[2^(-i), {i, 1, \\[Infinity]}]
= 1
## Issue302
#> Sum[i / Log[i], {i, 1, Infinity}]
= Sum[i / Log[i], {i, 1, Infinity}]
#> Sum[Cos[Pi i], {i, 1, Infinity}]
= Sum[Cos[Pi i], {i, 1, Infinity}]
## Combinatorica V0.9 issue in computing NumberofInvolutions
>> Sum[k!, {k, 0, Quotient[4, 2]}]
= 4
"""
# Do not throw warning message for symbolic iteration bounds
throw_iterb = False
sympy_name = "Sum"
rules = _IterationFunction.rules.copy()
rules.update(
{
"MakeBoxes[Sum[f_, {i_, a_, b_, 1}],"
" form:StandardForm|TraditionalForm]": (
r'RowBox[{SubsuperscriptBox["\\[Sum]",'
r' RowBox[{MakeBoxes[i, form], "=", MakeBoxes[a, form]}],'
r" MakeBoxes[b, form]], MakeBoxes[f, form]}]"
),
}
)
def get_result(self, items):
return Expression("Plus", *items)
def to_sympy(self, expr, **kwargs):
"""
Perform summation via sympy.summation
"""
if expr.has_form("Sum", 2) and expr.leaves[1].has_form("List", 3):
index = expr.leaves[1]
arg_kwargs = kwargs.copy()
arg_kwargs["convert_all_global_functions"] = True
arg = expr.leaves[0].to_sympy(**arg_kwargs)
evaluation = kwargs.get("evaluation", None)
# Handle summation parameters: variable, min, max
var_min_max = index.leaves[:3]
bounds = [expr.to_sympy(**kwargs) for expr in var_min_max]
if evaluation:
# Min and max might be Mathics expressions. If so, evaluate them.
for i in (1, 2):
expr = var_min_max[i]
if not isinstance(expr, Symbol):
expr = expr.evaluate(evaluation)
value = expr.to_sympy(**kwargs)
bounds[i] = value
if arg is not None and None not in bounds:
return sympy.summation(arg, bounds)
class Product(_IterationFunction, SympyFunction):
"""
<dl>
<dt>'Product[$expr$, {$i$, $imin$, $imax$}]'
<dd>evaluates the discrete product of $expr$ with $i$ ranging from $imin$ to $imax$.
<dt>'Product[$expr$, {$i$, $imax$}]'
<dd>same as 'Product[$expr$, {$i$, 1, $imax$}]'.
<dt>'Product[$expr$, {$i$, $imin$, $imax$, $di$}]'
<dd>$i$ ranges from $imin$ to $imax$ in steps of $di$.
<dt>'Product[$expr$, {$i$, $imin$, $imax$}, {$j$, $jmin$, $jmax$}, ...]'
<dd>evaluates $expr$ as a multiple product, with {$i$, ...}, {$j$, ...}, ... being in outermost-to-innermost order.
</dl>
>> Product[k, {k, 1, 10}]
= 3628800
>> 10!
= 3628800
>> Product[x^k, {k, 2, 20, 2}]
= x ^ 110
>> Product[2 ^ i, {i, 1, n}]
= 2 ^ (n / 2 + n ^ 2 / 2)
>> Product[f[i], {i, 1, 7}]
= f[1] f[2] f[3] f[4] f[5] f[6] f[7]
Symbolic products involving the factorial are evaluated:
>> Product[k, {k, 3, n}]
= n! / 2
Evaluate the $n$th primorial:
>> primorial[0] = 1;
>> primorial[n_Integer] := Product[Prime[k], {k, 1, n}];
>> primorial[12]
= 7420738134810
## Used to be a bug in sympy, but now it is solved exactly!
## Again a bug in sympy - regressions between 0.7.3 and 0.7.6 (and 0.7.7?)
## #> Product[1 + 1 / i ^ 2, {i, Infinity}]
## = 1 / ((-I)! I!)
"""
throw_iterb = False
sympy_name = "Product"
rules = _IterationFunction.rules.copy()
rules.update(
{
"MakeBoxes[Product[f_, {i_, a_, b_, 1}],"
" form:StandardForm|TraditionalForm]": (
r'RowBox[{SubsuperscriptBox["\\[Product]",'
r' RowBox[{MakeBoxes[i, form], "=", MakeBoxes[a, form]}],'
r" MakeBoxes[b, form]], MakeBoxes[f, form]}]"
),
}
)
def get_result(self, items):
return Expression("Times", *items)
def to_sympy(self, expr, **kwargs):
if expr.has_form("Product", 2) and expr.leaves[1].has_form("List", 3):
index = expr.leaves[1]
try:
e_kwargs = kwargs.copy()
e_kwargs["convert_all_global_functions"] = True
e = expr.leaves[0].to_sympy(**e_kwargs)
i = index.leaves[0].to_sympy(**kwargs)
start = index.leaves[1].to_sympy(**kwargs)
stop = index.leaves[2].to_sympy(**kwargs)
return sympy.product(e, (i, start, stop))
except ZeroDivisionError:
pass
class Piecewise(SympyFunction):
"""
<dl>
<dt>'Piecewise[{{expr1, cond1}, ...}]'
<dd>represents a piecewise function.
<dt>'Piecewise[{{expr1, cond1}, ...}, expr]'
<dd>represents a piecewise function with default 'expr'.
</dl>
Heaviside function
>> Piecewise[{{0, x <= 0}}, 1]
= Piecewise[{{0, x <= 0}}, 1]
## D[%, x]
## Piecewise({{0, Or[x < 0, x > 0]}}, Indeterminate).
>> Integrate[Piecewise[{{1, x <= 0}, {-1, x > 0}}], x]
= Piecewise[{{x, x <= 0}, {-x, True}}]
>> Integrate[Piecewise[{{1, x <= 0}, {-1, x > 0}}], {x, -1, 2}]
= -1
Piecewise defaults to 0 if no other case is matching.
>> Piecewise[{{1, False}}]
= 0
>> Plot[Piecewise[{{Log[x], x > 0}, {x*-0.5, x < 0}}], {x, -1, 1}]
= -Graphics-
>> Piecewise[{{0 ^ 0, False}}, -1]
= -1
"""
sympy_name = "Piecewise"
attributes = ("HoldAll",)
def apply(self, items, evaluation):
"Piecewise[items__]"
result = self.to_sympy(Expression("Piecewise", *items.get_sequence()))
if result is None:
return
if not isinstance(result, sympy.Piecewise):
return from_sympy(result)
def to_sympy(self, expr, **kwargs):
leaves = expr.leaves
if len(leaves) not in (1, 2):
return
sympy_cases = []
for case in leaves[0].leaves:
if case.get_head_name() != "System`List":
return
if len(case.leaves) != 2:
return
then, cond = case.leaves
sympy_cond = None
if isinstance(cond, Symbol):
if cond == SymbolTrue:
sympy_cond = True
elif cond == SymbolFalse:
sympy_cond = False
if sympy_cond is None:
sympy_cond = cond.to_sympy(**kwargs)
if not (sympy_cond.is_Relational or sympy_cond.is_Boolean):
return
sympy_cases.append((then.to_sympy(**kwargs), sympy_cond))
if len(leaves) == 2: # default case
sympy_cases.append((leaves[1].to_sympy(**kwargs), True))
else:
sympy_cases.append((Integer(0).to_sympy(**kwargs), True))
return sympy.Piecewise(*sympy_cases)
def from_sympy(self, sympy_name, args):
# Hack to get around weird sympy.Piecewise 'otherwise' behaviour
if str(args[-1].leaves[1]).startswith("System`_True__Dummy_"):
args[-1].leaves[1] = SymbolTrue
return Expression(self.get_name(), args)
class Boole(Builtin):
"""
<dl>
<dt>'Boole[expr]'
<dd>returns 1 if expr is True and 0 if expr is False.
</dl>
>> Boole[2 == 2]
= 1
>> Boole[7 < 5]
= 0
>> Boole[a == 7]
= Boole[a == 7]
"""
attributes = ("Listable",)
def apply(self, expr, evaluation):
"Boole[expr_]"
if isinstance(expr, Symbol):
if expr == SymbolTrue:
return Integer(1)
elif expr == SymbolFalse:
return Integer(0)
return None
| 25.689827
| 123
| 0.504605
|
92f421205a27a888e0371c2873d9d8c3a6b21a6f
| 1,649
|
py
|
Python
|
hcipy/optics/gaussian_beam.py
|
dskleingeld/hcipy
|
85cacfb7a8058506afb288e3acdf3b6059ba2b50
|
[
"MIT"
] | 1
|
2020-07-20T23:25:17.000Z
|
2020-07-20T23:25:17.000Z
|
hcipy/optics/gaussian_beam.py
|
dskleingeld/hcipy
|
85cacfb7a8058506afb288e3acdf3b6059ba2b50
|
[
"MIT"
] | null | null | null |
hcipy/optics/gaussian_beam.py
|
dskleingeld/hcipy
|
85cacfb7a8058506afb288e3acdf3b6059ba2b50
|
[
"MIT"
] | null | null | null |
import numpy as np
class GaussianBeam(object):
def __init__(self, w0, z, wavelength):
self.w0 = w0
self.z = z
self.wavelength = wavelength
@property
def beam_waist(self):
return self.w0
@beam_waist.setter
def beam_waist(self, w0):
self.w0 = w0
@property
def zR(self):
return np.pi * self.w0**2 / self.wavelength
@zR.setter
def zR(self, zR):
self.w0 = np.sqrt(zR * self.wavelength / np.pi)
rayleigh_distance = zR
@property
def q(self):
return self.z + 1j * self.zR
@q.setter
def q(self, q):
self.z = np.real(q)
self.zR = np.imag(q)
complex_beam_parameter = q
@property
def theta(self):
return self.wavelength / (np.pi * self.w0)
@theta.setter
def theta(self, theta):
self.w0 = self.wavelength / (theta * np.pi)
beam_divergence = theta
@property
def R(self):
return self.z * (1 + (self.zR / self.z)**2)
radius_of_curvature = R
@property
def psi(self):
return np.arctan(self.z / self.zR)
gouy_phase = psi
@property
def w(self):
return self.w0 * np.sqrt(1 + (self.z / self.zR)**2)
beam_radius = w
@property
def FWHM(self):
return self.w * np.sqrt(2 * np.log(2))
full_width_half_maximum = FWHM
@property
def k(self):
return 2 * np.pi / self.wavelength
@k.setter
def k(self, k):
self.wavelength = 2 * np.pi / k
wave_number = k
def evaluate(self, grid):
if grid.is_('cartesian'):
r2 = grid.x**2 + grid.y**2
else:
r2 = grid.as_('polar').r**2
K1 = self.w0 / self.w
K2 = np.exp(-r2 / self.w**2)
K3 = np.exp(-1j * (self.k * self.z + self.k * r2 / (2 * self.R) - self.psi))
return Field(K1 * K2 * K3, grid)
__call__ = evaluate
| 17.542553
| 78
| 0.628866
|
9b02f1935838e8d7bd350a30bb0c340594f9ecbb
| 3,630
|
py
|
Python
|
201501214_Atman/Assignment 5/SGD_MF.py
|
skywalker212/fitopsY
|
e9fa10253beec9d7fb2f2a47ee741ad1157ddfc5
|
[
"MIT"
] | 1
|
2020-10-09T04:09:18.000Z
|
2020-10-09T04:09:18.000Z
|
201501214_Atman/Assignment 5/SGD_MF.py
|
skywalker212/fitopsY
|
e9fa10253beec9d7fb2f2a47ee741ad1157ddfc5
|
[
"MIT"
] | null | null | null |
201501214_Atman/Assignment 5/SGD_MF.py
|
skywalker212/fitopsY
|
e9fa10253beec9d7fb2f2a47ee741ad1157ddfc5
|
[
"MIT"
] | null | null | null |
import numpy as np
import surprise # run 'pip install scikit-surprise' to install surprise
from surprise import BaselineOnly
from surprise import Dataset
from surprise import Reader
from surprise.model_selection import cross_validate
import time
from guppy import hpy
class MatrixFacto(surprise.AlgoBase):
'''A basic rating prediction algorithm based on matrix factorization.'''
def __init__(self, learning_rate, n_epochs, n_factors):
self.lr = learning_rate # learning rate for SGD
self.n_epochs = n_epochs # number of iterations of SGD
self.n_factors = n_factors # number of factors
self.skip_train = 0
def train(self, trainset):
'''Learn the vectors p_u and q_i with SGD'''
print('Fitting data willth SGD...')
# Randomly initialize the user and item factors.
p = np.random.normal(0, .1, (trainset.n_users, self.n_factors))
q = np.random.normal(0, .1, (trainset.n_items, self.n_factors))
# SGD procedure
for _ in range(self.n_epochs):
for u, i, r_ui in trainset.all_ratings():
err = r_ui - np.dot(p[u], q[i])
# Update vectors p_u and q_i
p[u] += self.lr * err * q[i]
q[i] += self.lr * err * p[u]
# Note: in the update of q_i, we should actually use the previous (non-updated) value of p_u.
# In practice it makes almost no difference.
self.p, self.q = p, q
self.trainset = trainset
def estimate(self, u, i):
'''Return the estmimated rating of user u for item i.'''
# return scalar product between p_u and q_i if user and item are known,
# else return the average of all ratings
if self.trainset.knows_user(u) and self.trainset.knows_item(i):
return np.dot(self.p[u], self.q[i])
else:
return self.trainset.global_mean
reader = Reader(line_format='user item rating timestamp', sep='\t')
print "-----------------------------------------------------"
print "Datasize = 10 Thousand Tuples"
data = Dataset.load_from_file('Sample_10k.data', reader=reader)
data.split(2) # split data for 2-folds cross validation
algo = MatrixFacto(learning_rate=.01, n_epochs=10, n_factors=10)
start_time = time.time()
surprise.evaluate(algo, data, measures=['RMSE'])
print("time taken for execution: {} seconds".format(time.time()-start_time))
h = hpy()
print h.heap()
print "-----------------------------------------------------"
print "-----------------------------------------------------"
print "Datasize = 100 Thousand Tuples"
data = Dataset.load_from_file('Sample_100k.data', reader=reader)
data.split(2) # split data for 2-folds cross validation
algo = MatrixFacto(learning_rate=.01, n_epochs=10, n_factors=10)
start_time = time.time()
surprise.evaluate(algo, data, measures=['RMSE'])
print("time taken for execution: {} seconds".format(time.time()-start_time))
h = hpy()
print h.heap()
print "-----------------------------------------------------"
print "-----------------------------------------------------"
print "Datasize = 1 Million Tuples"
data = Dataset.load_from_file('Sample_1m.data', reader=reader)
data.split(2) # split data for 2-folds cross validation
algo = MatrixFacto(learning_rate=.01, n_epochs=10, n_factors=10)
start_time = time.time()
surprise.evaluate(algo, data, measures=['RMSE'])
print("time taken for execution: {} seconds".format(time.time()-start_time))
h = hpy()
print h.heap()
print "-----------------------------------------------------"
| 39.89011
| 109
| 0.603857
|
fd1a0cfd70569e667d7ce8274542c0dc198424b0
| 47
|
py
|
Python
|
neutpy/crosssections/__init__.py
|
gt-frc/neutpy
|
4ae03fba5bdf34bd83ac0d88c5d6e53f3c708785
|
[
"MIT"
] | null | null | null |
neutpy/crosssections/__init__.py
|
gt-frc/neutpy
|
4ae03fba5bdf34bd83ac0d88c5d6e53f3c708785
|
[
"MIT"
] | 10
|
2020-08-05T21:29:02.000Z
|
2020-10-17T02:08:11.000Z
|
neutpy/crosssections/__init__.py
|
gt-frc/neutpy
|
4ae03fba5bdf34bd83ac0d88c5d6e53f3c708785
|
[
"MIT"
] | 1
|
2021-12-03T11:46:15.000Z
|
2021-12-03T11:46:15.000Z
|
#!/usr/bin/python
from .crosssections import *
| 15.666667
| 28
| 0.744681
|
b8afddab5c10ccdbb488449cecfaef5bbbc4a546
| 1,927
|
py
|
Python
|
web/result.py
|
swxs/home
|
652dab7c15ae9c2221c99405ce827be6ccaaccbb
|
[
"Apache-2.0"
] | 1
|
2021-09-23T10:28:34.000Z
|
2021-09-23T10:28:34.000Z
|
web/result.py
|
swxs/home
|
652dab7c15ae9c2221c99405ce827be6ccaaccbb
|
[
"Apache-2.0"
] | null | null | null |
web/result.py
|
swxs/home
|
652dab7c15ae9c2221c99405ce827be6ccaaccbb
|
[
"Apache-2.0"
] | 1
|
2021-09-23T10:28:36.000Z
|
2021-09-23T10:28:36.000Z
|
# -*- coding: utf-8 -*-
# @File : result.py
# @AUTH : swxs
# @Time : 2019/9/20 15:06
import json
import datetime
from bson import ObjectId
def encoder(obj):
if isinstance(obj, (datetime.datetime,)):
return obj.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(obj, (datetime.date,)):
return obj.strftime('%Y-%m-%d')
elif isinstance(obj, (ObjectId,)):
return str(obj)
else:
raise Exception("Not NotImplemented")
class ComplexEncoder(json.JSONEncoder):
def default(self, obj):
try:
return encoder(obj)
except Exception:
return super(ComplexEncoder, self).default(obj)
class ResultData(object):
"""
数据结果
"""
def __init__(self, code=0, msg=None, **kwargs):
self.code = code
self.msg = msg
self.kwargs = kwargs
@property
def data(self):
result = {}
result.update(vars(self))
result.update(self.kwargs)
result.__delitem__('kwargs')
return result
def __setitem__(self, key, value):
setattr(self, key, value)
def __repr__(self):
return str(self.data)
def __str__(self):
return str(self.data)
def to_json(self):
return json.dumps(self.data, cls=ComplexEncoder)
def to_thrift(self, thrift):
result = thrift()
result.code = self.code
result.msg = self.msg
if "data" in self.kwargs:
result.data = self.kwargs.get("data")
return result
class ExceptionData(ResultData):
"""
异常返回的结果
"""
def __init__(self, e):
super(ExceptionData, self).__init__(code=e.code, msg=str(e))
class SuccessData(ResultData):
"""
成功返回的结果
"""
def __init__(self, **kwargs):
super(SuccessData, self).__init__(code=0, data=kwargs)
def __setitem__(self, key, value):
self.kwargs['data'][key] = value
| 21.651685
| 68
| 0.586404
|
e19c63a31b1770d2cf9406b9e4a0b8937e55d1df
| 77,076
|
py
|
Python
|
v1alpha1/swagger_client/apis/grafeas_api.py
|
dinagraves/client-python
|
c4f77332b402e61a1e1700fa0c746183008d435c
|
[
"Apache-2.0"
] | 10
|
2017-11-19T20:21:16.000Z
|
2021-04-23T06:59:33.000Z
|
v1alpha1/swagger_client/apis/grafeas_api.py
|
dinagraves/client-python
|
c4f77332b402e61a1e1700fa0c746183008d435c
|
[
"Apache-2.0"
] | 1
|
2021-06-01T21:55:48.000Z
|
2021-06-01T21:55:48.000Z
|
v1alpha1/swagger_client/apis/grafeas_api.py
|
dinagraves/client-python
|
c4f77332b402e61a1e1700fa0c746183008d435c
|
[
"Apache-2.0"
] | 1
|
2019-08-11T16:51:36.000Z
|
2019-08-11T16:51:36.000Z
|
# coding: utf-8
"""
Grafeas API
An API to insert and retrieve annotations on cloud artifacts.
OpenAPI spec version: 0.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class GrafeasApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_note(self, projects_id, **kwargs):
"""
Creates a new note.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_note(projects_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `parent`. This field contains the projectId for example: \"project/{project_id} (required)
:param str note_id: The ID to use for this note.
:param Note note: The Note to be inserted
:return: Note
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_note_with_http_info(projects_id, **kwargs)
else:
(data) = self.create_note_with_http_info(projects_id, **kwargs)
return data
def create_note_with_http_info(self, projects_id, **kwargs):
"""
Creates a new note.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_note_with_http_info(projects_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `parent`. This field contains the projectId for example: \"project/{project_id} (required)
:param str note_id: The ID to use for this note.
:param Note note: The Note to be inserted
:return: Note
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['projects_id', 'note_id', 'note']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_note" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'projects_id' is set
if ('projects_id' not in params) or (params['projects_id'] is None):
raise ValueError("Missing the required parameter `projects_id` when calling `create_note`")
resource_path = '/v1alpha1/projects/{projectsId}/notes'.replace('{format}', 'json')
path_params = {}
if 'projects_id' in params:
path_params['projectsId'] = params['projects_id']
query_params = {}
if 'note_id' in params:
query_params['noteId'] = params['note_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'note' in params:
body_params = params['note']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Note',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def create_occurrence(self, projects_id, **kwargs):
"""
Creates a new occurrence.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_occurrence(projects_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `parent`. This field contains the projectId for example: \"projects/{project_id}\" (required)
:param Occurrence occurrence: The occurrence to be inserted
:return: Occurrence
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_occurrence_with_http_info(projects_id, **kwargs)
else:
(data) = self.create_occurrence_with_http_info(projects_id, **kwargs)
return data
def create_occurrence_with_http_info(self, projects_id, **kwargs):
"""
Creates a new occurrence.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_occurrence_with_http_info(projects_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `parent`. This field contains the projectId for example: \"projects/{project_id}\" (required)
:param Occurrence occurrence: The occurrence to be inserted
:return: Occurrence
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['projects_id', 'occurrence']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_occurrence" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'projects_id' is set
if ('projects_id' not in params) or (params['projects_id'] is None):
raise ValueError("Missing the required parameter `projects_id` when calling `create_occurrence`")
resource_path = '/v1alpha1/projects/{projectsId}/occurrences'.replace('{format}', 'json')
path_params = {}
if 'projects_id' in params:
path_params['projectsId'] = params['projects_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'occurrence' in params:
body_params = params['occurrence']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Occurrence',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def delete_note(self, projects_id, notes_id, **kwargs):
"""
Deletes the given note from the system.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_note(projects_id, notes_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the note in the form \"projects/{project_id}/notes/{note_id}\" (required)
:param str notes_id: Part of `name`. See documentation of `projectsId`. (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_note_with_http_info(projects_id, notes_id, **kwargs)
else:
(data) = self.delete_note_with_http_info(projects_id, notes_id, **kwargs)
return data
def delete_note_with_http_info(self, projects_id, notes_id, **kwargs):
"""
Deletes the given note from the system.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_note_with_http_info(projects_id, notes_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the note in the form \"projects/{project_id}/notes/{note_id}\" (required)
:param str notes_id: Part of `name`. See documentation of `projectsId`. (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['projects_id', 'notes_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_note" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'projects_id' is set
if ('projects_id' not in params) or (params['projects_id'] is None):
raise ValueError("Missing the required parameter `projects_id` when calling `delete_note`")
# verify the required parameter 'notes_id' is set
if ('notes_id' not in params) or (params['notes_id'] is None):
raise ValueError("Missing the required parameter `notes_id` when calling `delete_note`")
resource_path = '/v1alpha1/projects/{projectsId}/notes/{notesId}'.replace('{format}', 'json')
path_params = {}
if 'projects_id' in params:
path_params['projectsId'] = params['projects_id']
if 'notes_id' in params:
path_params['notesId'] = params['notes_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def delete_occurrence(self, projects_id, occurrences_id, **kwargs):
"""
Deletes the given occurrence from the system.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_occurrence(projects_id, occurrences_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the occurrence in the form \"projects/{project_id}/occurrences/{occurrence_id}\" (required)
:param str occurrences_id: Part of `name`. See documentation of `projectsId`. (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_occurrence_with_http_info(projects_id, occurrences_id, **kwargs)
else:
(data) = self.delete_occurrence_with_http_info(projects_id, occurrences_id, **kwargs)
return data
def delete_occurrence_with_http_info(self, projects_id, occurrences_id, **kwargs):
"""
Deletes the given occurrence from the system.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_occurrence_with_http_info(projects_id, occurrences_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the occurrence in the form \"projects/{project_id}/occurrences/{occurrence_id}\" (required)
:param str occurrences_id: Part of `name`. See documentation of `projectsId`. (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['projects_id', 'occurrences_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_occurrence" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'projects_id' is set
if ('projects_id' not in params) or (params['projects_id'] is None):
raise ValueError("Missing the required parameter `projects_id` when calling `delete_occurrence`")
# verify the required parameter 'occurrences_id' is set
if ('occurrences_id' not in params) or (params['occurrences_id'] is None):
raise ValueError("Missing the required parameter `occurrences_id` when calling `delete_occurrence`")
resource_path = '/v1alpha1/projects/{projectsId}/occurrences/{occurrencesId}'.replace('{format}', 'json')
path_params = {}
if 'projects_id' in params:
path_params['projectsId'] = params['projects_id']
if 'occurrences_id' in params:
path_params['occurrencesId'] = params['occurrences_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_note(self, projects_id, notes_id, **kwargs):
"""
Returns the requested occurrence
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_note(projects_id, notes_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the note in the form \"projects/{project_id}/notes/{note_id}\" (required)
:param str notes_id: Part of `name`. See documentation of `projectsId`. (required)
:return: Note
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_note_with_http_info(projects_id, notes_id, **kwargs)
else:
(data) = self.get_note_with_http_info(projects_id, notes_id, **kwargs)
return data
def get_note_with_http_info(self, projects_id, notes_id, **kwargs):
"""
Returns the requested occurrence
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_note_with_http_info(projects_id, notes_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the note in the form \"projects/{project_id}/notes/{note_id}\" (required)
:param str notes_id: Part of `name`. See documentation of `projectsId`. (required)
:return: Note
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['projects_id', 'notes_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_note" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'projects_id' is set
if ('projects_id' not in params) or (params['projects_id'] is None):
raise ValueError("Missing the required parameter `projects_id` when calling `get_note`")
# verify the required parameter 'notes_id' is set
if ('notes_id' not in params) or (params['notes_id'] is None):
raise ValueError("Missing the required parameter `notes_id` when calling `get_note`")
resource_path = '/v1alpha1/projects/{projectsId}/notes/{notesId}'.replace('{format}', 'json')
path_params = {}
if 'projects_id' in params:
path_params['projectsId'] = params['projects_id']
if 'notes_id' in params:
path_params['notesId'] = params['notes_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Note',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_occurrence(self, projects_id, occurrences_id, **kwargs):
"""
Returns the requested occurrence
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_occurrence(projects_id, occurrences_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the occurrence in the form \"projects/{project_id}/occurrences/{occurrence_id}\" (required)
:param str occurrences_id: Part of `name`. See documentation of `projectsId`. (required)
:return: Occurrence
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_occurrence_with_http_info(projects_id, occurrences_id, **kwargs)
else:
(data) = self.get_occurrence_with_http_info(projects_id, occurrences_id, **kwargs)
return data
def get_occurrence_with_http_info(self, projects_id, occurrences_id, **kwargs):
"""
Returns the requested occurrence
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_occurrence_with_http_info(projects_id, occurrences_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the occurrence in the form \"projects/{project_id}/occurrences/{occurrence_id}\" (required)
:param str occurrences_id: Part of `name`. See documentation of `projectsId`. (required)
:return: Occurrence
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['projects_id', 'occurrences_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_occurrence" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'projects_id' is set
if ('projects_id' not in params) or (params['projects_id'] is None):
raise ValueError("Missing the required parameter `projects_id` when calling `get_occurrence`")
# verify the required parameter 'occurrences_id' is set
if ('occurrences_id' not in params) or (params['occurrences_id'] is None):
raise ValueError("Missing the required parameter `occurrences_id` when calling `get_occurrence`")
resource_path = '/v1alpha1/projects/{projectsId}/occurrences/{occurrencesId}'.replace('{format}', 'json')
path_params = {}
if 'projects_id' in params:
path_params['projectsId'] = params['projects_id']
if 'occurrences_id' in params:
path_params['occurrencesId'] = params['occurrences_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Occurrence',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_occurrence_note(self, projects_id, occurrences_id, **kwargs):
"""
Gets the note that this occurrence is attached to.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_occurrence_note(projects_id, occurrences_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the occurrence in the form \"projects/{project_id}/occurrences/{occurrence_id}\" (required)
:param str occurrences_id: Part of `name`. See documentation of `projectsId`. (required)
:return: Note
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_occurrence_note_with_http_info(projects_id, occurrences_id, **kwargs)
else:
(data) = self.get_occurrence_note_with_http_info(projects_id, occurrences_id, **kwargs)
return data
def get_occurrence_note_with_http_info(self, projects_id, occurrences_id, **kwargs):
"""
Gets the note that this occurrence is attached to.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_occurrence_note_with_http_info(projects_id, occurrences_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the occurrence in the form \"projects/{project_id}/occurrences/{occurrence_id}\" (required)
:param str occurrences_id: Part of `name`. See documentation of `projectsId`. (required)
:return: Note
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['projects_id', 'occurrences_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_occurrence_note" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'projects_id' is set
if ('projects_id' not in params) or (params['projects_id'] is None):
raise ValueError("Missing the required parameter `projects_id` when calling `get_occurrence_note`")
# verify the required parameter 'occurrences_id' is set
if ('occurrences_id' not in params) or (params['occurrences_id'] is None):
raise ValueError("Missing the required parameter `occurrences_id` when calling `get_occurrence_note`")
resource_path = '/v1alpha1/projects/{projectsId}/occurrences/{occurrencesId}/notes'.replace('{format}', 'json')
path_params = {}
if 'projects_id' in params:
path_params['projectsId'] = params['projects_id']
if 'occurrences_id' in params:
path_params['occurrencesId'] = params['occurrences_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Note',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_operation(self, projects_id, operations_id, **kwargs):
"""
Returns the requested occurrence
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_operation(projects_id, operations_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the operation in the form \"projects/{project_id}/operations/{operation_id}\" (required)
:param str operations_id: Part of `name`. See documentation of `projectsId`. (required)
:return: Operation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_operation_with_http_info(projects_id, operations_id, **kwargs)
else:
(data) = self.get_operation_with_http_info(projects_id, operations_id, **kwargs)
return data
def get_operation_with_http_info(self, projects_id, operations_id, **kwargs):
"""
Returns the requested occurrence
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_operation_with_http_info(projects_id, operations_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the operation in the form \"projects/{project_id}/operations/{operation_id}\" (required)
:param str operations_id: Part of `name`. See documentation of `projectsId`. (required)
:return: Operation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['projects_id', 'operations_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_operation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'projects_id' is set
if ('projects_id' not in params) or (params['projects_id'] is None):
raise ValueError("Missing the required parameter `projects_id` when calling `get_operation`")
# verify the required parameter 'operations_id' is set
if ('operations_id' not in params) or (params['operations_id'] is None):
raise ValueError("Missing the required parameter `operations_id` when calling `get_operation`")
resource_path = '/v1alpha1/projects/{projectsId}/operations/{operationsId}'.replace('{format}', 'json')
path_params = {}
if 'projects_id' in params:
path_params['projectsId'] = params['projects_id']
if 'operations_id' in params:
path_params['operationsId'] = params['operations_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Operation',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def list_note_occurrences(self, projects_id, notes_id, **kwargs):
"""
Lists the names of Occurrences linked to a particular Note.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_note_occurrences(projects_id, notes_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name field will contain the note name for example: \"project/{project_id}/notes/{note_id}\" (required)
:param str notes_id: Part of `name`. See documentation of `projectsId`. (required)
:param str filter: The filter expression.
:param int page_size: Number of notes to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ListNoteOccurrencesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_note_occurrences_with_http_info(projects_id, notes_id, **kwargs)
else:
(data) = self.list_note_occurrences_with_http_info(projects_id, notes_id, **kwargs)
return data
def list_note_occurrences_with_http_info(self, projects_id, notes_id, **kwargs):
"""
Lists the names of Occurrences linked to a particular Note.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_note_occurrences_with_http_info(projects_id, notes_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name field will contain the note name for example: \"project/{project_id}/notes/{note_id}\" (required)
:param str notes_id: Part of `name`. See documentation of `projectsId`. (required)
:param str filter: The filter expression.
:param int page_size: Number of notes to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ListNoteOccurrencesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['projects_id', 'notes_id', 'filter', 'page_size', 'page_token']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_note_occurrences" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'projects_id' is set
if ('projects_id' not in params) or (params['projects_id'] is None):
raise ValueError("Missing the required parameter `projects_id` when calling `list_note_occurrences`")
# verify the required parameter 'notes_id' is set
if ('notes_id' not in params) or (params['notes_id'] is None):
raise ValueError("Missing the required parameter `notes_id` when calling `list_note_occurrences`")
resource_path = '/v1alpha1/projects/{projectsId}/notes/{notesId}/occurrences'.replace('{format}', 'json')
path_params = {}
if 'projects_id' in params:
path_params['projectsId'] = params['projects_id']
if 'notes_id' in params:
path_params['notesId'] = params['notes_id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'page_token' in params:
query_params['pageToken'] = params['page_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListNoteOccurrencesResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def list_notes(self, projects_id, **kwargs):
"""
Lists all notes for a given project.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_notes(projects_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `parent`. This field contains the projectId for example: \"project/{project_id} (required)
:param str filter: The filter expression.
:param int page_size: Number of notes to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ListNotesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_notes_with_http_info(projects_id, **kwargs)
else:
(data) = self.list_notes_with_http_info(projects_id, **kwargs)
return data
def list_notes_with_http_info(self, projects_id, **kwargs):
"""
Lists all notes for a given project.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_notes_with_http_info(projects_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `parent`. This field contains the projectId for example: \"project/{project_id} (required)
:param str filter: The filter expression.
:param int page_size: Number of notes to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ListNotesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['projects_id', 'filter', 'page_size', 'page_token']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_notes" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'projects_id' is set
if ('projects_id' not in params) or (params['projects_id'] is None):
raise ValueError("Missing the required parameter `projects_id` when calling `list_notes`")
resource_path = '/v1alpha1/projects/{projectsId}/notes'.replace('{format}', 'json')
path_params = {}
if 'projects_id' in params:
path_params['projectsId'] = params['projects_id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'page_token' in params:
query_params['pageToken'] = params['page_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListNotesResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def list_occurrences(self, projects_id, **kwargs):
"""
Lists active occurrences for a given project/Digest.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_occurrences(projects_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `parent`. This contains the projectId for example: projects/{project_id} (required)
:param str filter: The filter expression.
:param int page_size: Number of occurrences to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ListOccurrencesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_occurrences_with_http_info(projects_id, **kwargs)
else:
(data) = self.list_occurrences_with_http_info(projects_id, **kwargs)
return data
def list_occurrences_with_http_info(self, projects_id, **kwargs):
"""
Lists active occurrences for a given project/Digest.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_occurrences_with_http_info(projects_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `parent`. This contains the projectId for example: projects/{project_id} (required)
:param str filter: The filter expression.
:param int page_size: Number of occurrences to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ListOccurrencesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['projects_id', 'filter', 'page_size', 'page_token']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_occurrences" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'projects_id' is set
if ('projects_id' not in params) or (params['projects_id'] is None):
raise ValueError("Missing the required parameter `projects_id` when calling `list_occurrences`")
resource_path = '/v1alpha1/projects/{projectsId}/occurrences'.replace('{format}', 'json')
path_params = {}
if 'projects_id' in params:
path_params['projectsId'] = params['projects_id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'page_token' in params:
query_params['pageToken'] = params['page_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListOccurrencesResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def list_operations(self, projects_id, **kwargs):
"""
Lists all operations for a given project.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_operations(projects_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `parent`. This field contains the projectId for example: \"project/{project_id} (required)
:param str filter: The filter expression.
:param int page_size: Number of operations to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ListOperationsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_operations_with_http_info(projects_id, **kwargs)
else:
(data) = self.list_operations_with_http_info(projects_id, **kwargs)
return data
def list_operations_with_http_info(self, projects_id, **kwargs):
"""
Lists all operations for a given project.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_operations_with_http_info(projects_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `parent`. This field contains the projectId for example: \"project/{project_id} (required)
:param str filter: The filter expression.
:param int page_size: Number of operations to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ListOperationsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['projects_id', 'filter', 'page_size', 'page_token']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_operations" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'projects_id' is set
if ('projects_id' not in params) or (params['projects_id'] is None):
raise ValueError("Missing the required parameter `projects_id` when calling `list_operations`")
resource_path = '/v1alpha1/projects/{projectsId}/operations'.replace('{format}', 'json')
path_params = {}
if 'projects_id' in params:
path_params['projectsId'] = params['projects_id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'page_token' in params:
query_params['pageToken'] = params['page_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListOperationsResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def update_note(self, projects_id, notes_id, **kwargs):
"""
Updates an existing note.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_note(projects_id, notes_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the note. Should be of the form \"projects/{project_id}/notes/{note_id}\". (required)
:param str notes_id: Part of `name`. See documentation of `projectsId`. (required)
:param Note note: The updated note.
:return: Note
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_note_with_http_info(projects_id, notes_id, **kwargs)
else:
(data) = self.update_note_with_http_info(projects_id, notes_id, **kwargs)
return data
def update_note_with_http_info(self, projects_id, notes_id, **kwargs):
"""
Updates an existing note.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_note_with_http_info(projects_id, notes_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the note. Should be of the form \"projects/{project_id}/notes/{note_id}\". (required)
:param str notes_id: Part of `name`. See documentation of `projectsId`. (required)
:param Note note: The updated note.
:return: Note
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['projects_id', 'notes_id', 'note']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_note" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'projects_id' is set
if ('projects_id' not in params) or (params['projects_id'] is None):
raise ValueError("Missing the required parameter `projects_id` when calling `update_note`")
# verify the required parameter 'notes_id' is set
if ('notes_id' not in params) or (params['notes_id'] is None):
raise ValueError("Missing the required parameter `notes_id` when calling `update_note`")
resource_path = '/v1alpha1/projects/{projectsId}/notes/{notesId}'.replace('{format}', 'json')
path_params = {}
if 'projects_id' in params:
path_params['projectsId'] = params['projects_id']
if 'notes_id' in params:
path_params['notesId'] = params['notes_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'note' in params:
body_params = params['note']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Note',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def update_occurrence(self, projects_id, occurrences_id, **kwargs):
"""
Updates an existing occurrence.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_occurrence(projects_id, occurrences_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the occurrence. Should be of the form \"projects/{project_id}/occurrences/{occurrence_id}\". (required)
:param str occurrences_id: Part of `name`. See documentation of `projectsId`. (required)
:param Occurrence occurrence: The updated occurrence.
:return: Occurrence
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_occurrence_with_http_info(projects_id, occurrences_id, **kwargs)
else:
(data) = self.update_occurrence_with_http_info(projects_id, occurrences_id, **kwargs)
return data
def update_occurrence_with_http_info(self, projects_id, occurrences_id, **kwargs):
"""
Updates an existing occurrence.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_occurrence_with_http_info(projects_id, occurrences_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the occurrence. Should be of the form \"projects/{project_id}/occurrences/{occurrence_id}\". (required)
:param str occurrences_id: Part of `name`. See documentation of `projectsId`. (required)
:param Occurrence occurrence: The updated occurrence.
:return: Occurrence
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['projects_id', 'occurrences_id', 'occurrence']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_occurrence" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'projects_id' is set
if ('projects_id' not in params) or (params['projects_id'] is None):
raise ValueError("Missing the required parameter `projects_id` when calling `update_occurrence`")
# verify the required parameter 'occurrences_id' is set
if ('occurrences_id' not in params) or (params['occurrences_id'] is None):
raise ValueError("Missing the required parameter `occurrences_id` when calling `update_occurrence`")
resource_path = '/v1alpha1/projects/{projectsId}/occurrences/{occurrencesId}'.replace('{format}', 'json')
path_params = {}
if 'projects_id' in params:
path_params['projectsId'] = params['projects_id']
if 'occurrences_id' in params:
path_params['occurrencesId'] = params['occurrences_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'occurrence' in params:
body_params = params['occurrence']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Occurrence',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def update_operation(self, projects_id, operations_id, **kwargs):
"""
Updates an existing operation returns an error if operation does not exist. The only valid operations are to update mark the done bit change the result.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_operation(projects_id, operations_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the Operation. Should be of the form \"projects/{project_id}/operations/{operation_id}\". (required)
:param str operations_id: Part of `name`. See documentation of `projectsId`. (required)
:param UpdateOperationRequest body: The request body.
:return: Operation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_operation_with_http_info(projects_id, operations_id, **kwargs)
else:
(data) = self.update_operation_with_http_info(projects_id, operations_id, **kwargs)
return data
def update_operation_with_http_info(self, projects_id, operations_id, **kwargs):
"""
Updates an existing operation returns an error if operation does not exist. The only valid operations are to update mark the done bit change the result.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_operation_with_http_info(projects_id, operations_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str projects_id: Part of `name`. The name of the Operation. Should be of the form \"projects/{project_id}/operations/{operation_id}\". (required)
:param str operations_id: Part of `name`. See documentation of `projectsId`. (required)
:param UpdateOperationRequest body: The request body.
:return: Operation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['projects_id', 'operations_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_operation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'projects_id' is set
if ('projects_id' not in params) or (params['projects_id'] is None):
raise ValueError("Missing the required parameter `projects_id` when calling `update_operation`")
# verify the required parameter 'operations_id' is set
if ('operations_id' not in params) or (params['operations_id'] is None):
raise ValueError("Missing the required parameter `operations_id` when calling `update_operation`")
resource_path = '/v1alpha1/projects/{projectsId}/operations/{operationsId}'.replace('{format}', 'json')
path_params = {}
if 'projects_id' in params:
path_params['projectsId'] = params['projects_id']
if 'operations_id' in params:
path_params['operationsId'] = params['operations_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Operation',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| 43.917949
| 163
| 0.583606
|
4a8c330cb7574bcb9bc60585a13c8c147fc95a57
| 53
|
py
|
Python
|
mini-scripts/Python_-_Slicing_Strings.txt.py
|
Web-Dev-Collaborative/PYTHON_PRAC
|
856f902fb43dcccae168d34ee6aacc02427a7ac6
|
[
"MIT"
] | 5
|
2021-06-02T23:44:25.000Z
|
2021-12-27T16:21:57.000Z
|
mini-scripts/Python_-_Slicing_Strings.txt.py
|
Web-Dev-Collaborative/PYTHON_PRAC
|
856f902fb43dcccae168d34ee6aacc02427a7ac6
|
[
"MIT"
] | 22
|
2021-05-31T01:33:25.000Z
|
2021-10-18T18:32:39.000Z
|
mini-scripts/Python_-_Slicing_Strings.txt.py
|
Web-Dev-Collaborative/PYTHON_PRAC
|
856f902fb43dcccae168d34ee6aacc02427a7ac6
|
[
"MIT"
] | 3
|
2021-06-19T03:37:47.000Z
|
2021-08-31T00:49:51.000Z
|
b = "Hello, World!"
print(b[2:5])
# Author: Bryan G
| 13.25
| 19
| 0.584906
|
fc59c0421e075b8c9bb93e39eec483d3ab48d926
| 529
|
py
|
Python
|
Lab3/PY05-readfilefinal.py
|
JulianConneely/dataRepresentation
|
2ae9ea341a9824954c096f4b1bac07cf32748c7c
|
[
"Apache-2.0"
] | 1
|
2019-12-17T17:03:34.000Z
|
2019-12-17T17:03:34.000Z
|
Lab3/PY05-readfilefinal.py
|
JulianConneely/dataRepresentation
|
2ae9ea341a9824954c096f4b1bac07cf32748c7c
|
[
"Apache-2.0"
] | null | null | null |
Lab3/PY05-readfilefinal.py
|
JulianConneely/dataRepresentation
|
2ae9ea341a9824954c096f4b1bac07cf32748c7c
|
[
"Apache-2.0"
] | null | null | null |
from bs4 import BeautifulSoup
import csv
with open("../carviewer.html") as fp:
soup = BeautifulSoup(fp, 'html.parser')
#print (soup.tr)
employee_file = open('week02data.csv', mode='w')
employee_writer = csv.writer(employee_file, delimiter=',', quotechar ='"',quoting = csv.QUOTE_MINIMAL)
rows = soup.findAll("tr")
for row in rows:
cols = row.findAll("td")
datalist = []
for col in cols:
datalist.append(col.text)
employee_writer.writerow(datalist)
employee_file.close()
| 26.45
| 103
| 0.657845
|
de8e9399157543ac42329cea87dee4b87cd3f4d5
| 4,595
|
py
|
Python
|
Score Prediction/test_impact.py
|
EagleW/ReviewRobot
|
823bba4db99cd8c0f3b6d6d38f9f19cae1dd6778
|
[
"MIT"
] | 19
|
2020-10-21T10:22:28.000Z
|
2022-03-22T11:48:53.000Z
|
Score Prediction/test_impact.py
|
EagleW/ReviewRobot
|
823bba4db99cd8c0f3b6d6d38f9f19cae1dd6778
|
[
"MIT"
] | 3
|
2020-12-17T17:38:35.000Z
|
2020-12-17T17:46:31.000Z
|
Score Prediction/test_impact.py
|
EagleW/ReviewRobot
|
823bba4db99cd8c0f3b6d6d38f9f19cae1dd6778
|
[
"MIT"
] | 4
|
2020-12-18T11:17:02.000Z
|
2021-11-11T05:08:58.000Z
|
import os
import sys
import json
import time
import torch
import pickle
import argparse
import numpy as np
from torch import nn
from loader.logger import Tee
from collections import OrderedDict
from model.RNN_torch import Impact
from sklearn.metrics import accuracy_score, mean_squared_error
from loader.utils import create_impact
from loader.data_loader import load_impact
from loader.dataset import Impact_Dataset, Impact_Processor
parser = argparse.ArgumentParser()
parser.add_argument(
"--type", default="0",
type=int, help="0 for acl, 1 for iclr"
)
parser.add_argument(
"--word_dim", default="64",
type=int, help="Token embedding dimension"
)
parser.add_argument(
"--feature_dim", default="1",
type=int, help="Feature embedding dimension"
)
parser.add_argument(
"--hidden_dim", default="32",
type=int, help="Hidden dimension"
)
parser.add_argument(
"--model_dp", default="models/impact_models/",
help="model directory path"
)
parser.add_argument(
"--gpu", default="1",
type=int, help="default is 1. set 0 to disable use gpu."
)
parser.add_argument(
"--batch_size", default="32",
type=int, help="Batch size."
)
parser.add_argument(
"--dropout", default="0.1",
type=float, help="Dropout on the embeddings (0 = no dropout)"
)
parser.add_argument(
"--layer_dropout", default="0.2",
type=float, help="Dropout on the embeddings (0 = no dropout)"
)
parser.add_argument(
"--max_len", default="200",
type=int, help="Max length."
)
parser.add_argument(
"--freq", default="5",
type=int, help="Min freq."
)
parser.add_argument(
"--lr_rate", default="0.0005",
type=float, help="Learning rate"
)
parser.add_argument(
"--data_path", default="data",
help="data directory path"
)
parser.add_argument(
"--load", action='store_true', help="Load dataset."
)
parser.add_argument(
"--num_epochs", default="100",
type=int, help="Number of training epochs"
)
parser.add_argument(
"--model", default="best_dev_model.pth.tar",
help="Model location"
)
args = parser.parse_args()
parameters = OrderedDict()
parameters['freq'] = args.freq
parameters['w_dim'] = args.word_dim
parameters['f_dim'] = args.feature_dim
parameters['h_dim'] = args.hidden_dim
parameters['input_dropout'] = args.dropout
parameters['layer_dropout'] = args.layer_dropout
parameters['gpu'] = args.gpu == 1
parameters['batch_size'] = args.batch_size
parameters['max_len'] = args.max_len
parameters['lr_rate'] = args.lr_rate
parameters['num_class'] = 5
state = pickle.load(open('data/impact_dataset.pth', 'rb'))
mappings = state['mappings']
word2id = mappings['word2id']
id2word = mappings['id2word']
vocab_size = len(mappings['id2word'])
t_dataset = state['t_dataset']
f1_bins = mappings['f1']
f1_size = len(f1_bins) + 1
test_dataset = Impact_Dataset(t_dataset, word2id, f1_bins, parameters['max_len'])
apro_proc = Impact_Processor()
device = torch.device("cuda:1" if torch.cuda.is_available() and parameters['gpu'] else "cpu")
embed_layer_word = nn.Embedding(vocab_size, args.word_dim, padding_idx=0, sparse=False)
embed_layer_f1 = nn.Embedding(f1_size, args.feature_dim, padding_idx=0, sparse=False)
model = Impact(embed_layer_word, embed_layer_f1, **parameters)
model = model.to(device)
state = torch.load('models/impact_models/best/best_dev_model.pth.tar')
state_dict = state['model']
model.load_state_dict(state_dict)
model.eval()
test_loader = torch.utils.data.DataLoader(
test_dataset,
batch_size=args.batch_size,
shuffle=True,
num_workers=2,
pin_memory=False,
collate_fn=apro_proc.process
)
aspect_all_ys = []
aspect_all_ys_ = []
aspect_all_ys_avg = []
for batch_idx, (batch_txts, batch_lens, batch_f1s, batch_tgt, batch_ids, batch_avg) in enumerate(test_loader):
batch_txts =batch_txts.to(device)
batch_f1s = batch_f1s.to(device)
batch_tgt = batch_tgt.to(device)
prob = model(batch_txts, batch_lens, batch_f1s)
symbols = prob.topk(1)[1].squeeze(1)
aspect_all_ys_.extend(symbols.tolist())
aspect_all_ys.extend(batch_tgt.tolist())
aspect_all_ys_avg.extend(batch_avg)
r = accuracy_score(aspect_all_ys, aspect_all_ys_)
m = mean_squared_error(aspect_all_ys, aspect_all_ys_)
print()
print('\t accuracy %.4f' % (r))
print('\t mse %.4f' % (m))
decision_accuracy = 0
for ys_, ys in zip(aspect_all_ys_, aspect_all_ys_avg):
if (ys_+1 > 3 and ys > 3.5) or (ys_+1 <= 3 and ys <= 3.5):
decision_accuracy += 1
print('\t decision accuracy', decision_accuracy/len(aspect_all_ys_))
print(aspect_all_ys, aspect_all_ys_, aspect_all_ys_avg)
| 29.645161
| 110
| 0.722742
|
f2d7698cbf5afccd3de1aa17878110f73b6a88fd
| 671
|
py
|
Python
|
GETGMT.py
|
JupyterJones/Covid-19_Research_Notebooks
|
dda72440b67ecb38002918563dd64e445a7e1114
|
[
"CC0-1.0"
] | 1
|
2020-06-18T00:33:24.000Z
|
2020-06-18T00:33:24.000Z
|
GETGMT.py
|
JupyterJones/COVID-19-Jupyter-Notebooks
|
8b65ade0d4b2b69bb50ab377655497909e3d4a05
|
[
"MIT"
] | null | null | null |
GETGMT.py
|
JupyterJones/COVID-19-Jupyter-Notebooks
|
8b65ade0d4b2b69bb50ab377655497909e3d4a05
|
[
"MIT"
] | null | null | null |
"""
# USAGE:
# for yesterday's GMT
from GETGMT import *
print(GETYGMT())
>>> 05-14-2020
#todays GMT
from GETGMT import *
print(GETGMT())
>>> 05-15-2020
"""
from datetime import datetime
from datetime import date, timedelta
def GETYGMT():
"""
# USAGE:
# Yesterdays GMT
from GETGMT import *
print(GETYGMT())
>>> 05-14-2020
"""
yesterday = datetime.utcnow() - timedelta(days=1)
YesterdaysGMT=yesterday.strftime('%m-%d-%Y')
return YesterdaysGMT
def GETGMT():
"""
# USEAGE:
#Todays GMT
from GETGMT import *
print(GETGMT())
>>> 05-15-2020
"""
GMT=datetime.utcnow().strftime('%m-%d-%Y')
return GMT
| 19.171429
| 53
| 0.606557
|
73d7032b9739dc6dbefc280703343257e4db24ec
| 35
|
py
|
Python
|
CodeWars/7 Kyu/Thinking & Testing- A and B.py
|
anubhab-code/Competitive-Programming
|
de28cb7d44044b9e7d8bdb475da61e37c018ac35
|
[
"MIT"
] | null | null | null |
CodeWars/7 Kyu/Thinking & Testing- A and B.py
|
anubhab-code/Competitive-Programming
|
de28cb7d44044b9e7d8bdb475da61e37c018ac35
|
[
"MIT"
] | null | null | null |
CodeWars/7 Kyu/Thinking & Testing- A and B.py
|
anubhab-code/Competitive-Programming
|
de28cb7d44044b9e7d8bdb475da61e37c018ac35
|
[
"MIT"
] | null | null | null |
def testit (a, b):
return a | b
| 17.5
| 18
| 0.542857
|
e1d5ec87e1530a7c76f806ee97ccd3c35d6e8cf7
| 2,262
|
py
|
Python
|
2020/day12.py
|
tangarts/advent-of-code
|
5879fbec1a5377d1288666a357b029f6345d4a5d
|
[
"MIT"
] | null | null | null |
2020/day12.py
|
tangarts/advent-of-code
|
5879fbec1a5377d1288666a357b029f6345d4a5d
|
[
"MIT"
] | null | null | null |
2020/day12.py
|
tangarts/advent-of-code
|
5879fbec1a5377d1288666a357b029f6345d4a5d
|
[
"MIT"
] | null | null | null |
#%%
from typing import List, Tuple
from advent_of_code.core import mapt, parse_input
from advent_of_code.point import manhattan_distance
raw = """F10
N3
F7
R90
F11"""
test = parse_input(raw, parser=lambda l: (l[0], int(l[1:])))
day12 = parse_input("data/input12.txt", parser=lambda l: (l[0], int(l[1:])), test=False)
COMPASS = "NESW"
def rotate(current: str, action: str, angle: int) -> str:
"""rotate through NESW by mapping 90 degree turns right to moving
an position right in NESW
left is the oppose direction
"""
# can only rotate left or right
assert action in "RL"
idx = COMPASS.index(current)
offset = angle // 90
direction = 1 if action == "R" else -1
return COMPASS[(idx + direction * offset) % 4]
def test_rotate() -> None:
assert rotate("E", "R", 90) == "S"
assert rotate("E", "L", 90) == "N"
assert rotate("E", "L", 270) == "S"
assert rotate("E", "R", 270) == "N"
assert rotate("E", "R", 360) == "E"
assert rotate("E", "L", 360) == "E"
assert rotate("E", "R", 180) == "W"
assert rotate("E", "L", 180) == "W"
def part1(instructions: List[Tuple[str, int]]) -> int:
current = "E"
directions = {k: 0 for k in COMPASS}
for action, value in instructions:
if action == "F":
directions[current] += value
elif action in "NESW":
directions[action] += value
elif action in "LR":
current = rotate(current, action, value)
return abs(directions["N"] - directions["S"]) + abs(
directions["E"] - directions["W"]
)
assert part1(test) == 25
assert part1(day12) == 1133
# part 2
def part2(instructions: List[Tuple[str, int]]) -> int:
waypoint = {"E": 10, "N": 1, "W": 0, "S": 0}
ship = {k: 0 for k in COMPASS}
for action, value in instructions:
if action == "F":
for direction in "NESW":
ship[direction] += value * waypoint[direction]
elif action in "NESW":
waypoint[action] += value
elif action in "LR":
waypoint = {rotate(k, action, value): val for k, val in waypoint.items()}
return abs(ship["N"] - ship["S"]) + abs(ship["E"] - ship["W"])
assert part2(test) == 286
assert part2(day12) == 61053
| 27.253012
| 88
| 0.579576
|
7af1e7654e58d3a9211958108877766a94caff0b
| 41,217
|
py
|
Python
|
spotipy/client.py
|
austinkeeley/spotipy
|
aa580076c5f2796ad552a8d693c60ffa62493b43
|
[
"MIT"
] | 1
|
2020-06-06T04:51:15.000Z
|
2020-06-06T04:51:15.000Z
|
spotipy/client.py
|
austinkeeley/spotipy
|
aa580076c5f2796ad552a8d693c60ffa62493b43
|
[
"MIT"
] | null | null | null |
spotipy/client.py
|
austinkeeley/spotipy
|
aa580076c5f2796ad552a8d693c60ffa62493b43
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import print_function
import sys
import requests
import json
import time
import six
""" A simple and thin Python library for the Spotify Web API
"""
class SpotifyException(Exception):
def __init__(self, http_status, code, msg, headers=None):
self.http_status = http_status
self.code = code
self.msg = msg
# `headers` is used to support `Retry-After` in the event of a
# 429 status code.
if headers is None:
headers = {}
self.headers = headers
def __str__(self):
return 'http status: {0}, code:{1} - {2}'.format(
self.http_status, self.code, self.msg)
class Spotify(object):
"""
Example usage::
import spotipy
urn = 'spotify:artist:3jOstUTkEu2JkjvRdBA5Gu'
sp = spotipy.Spotify()
sp.trace = True # turn on tracing
sp.trace_out = True # turn on trace out
artist = sp.artist(urn)
print(artist)
user = sp.user('plamere')
print(user)
"""
trace = False # Enable tracing?
trace_out = False
max_get_retries = 10
def __init__(self, auth=None, requests_session=True,
client_credentials_manager=None, proxies=None, requests_timeout=None):
"""
Create a Spotify API object.
:param auth: An authorization token (optional)
:param requests_session:
A Requests session object or a truthy value to create one.
A falsy value disables sessions.
It should generally be a good idea to keep sessions enabled
for performance reasons (connection pooling).
:param client_credentials_manager:
SpotifyClientCredentials object
:param proxies:
Definition of proxies (optional)
:param requests_timeout:
Tell Requests to stop waiting for a response after a given number of seconds
"""
self.prefix = 'https://api.spotify.com/v1/'
self._auth = auth
self.client_credentials_manager = client_credentials_manager
self.proxies = proxies
self.requests_timeout = requests_timeout
if isinstance(requests_session, requests.Session):
self._session = requests_session
else:
if requests_session: # Build a new session.
self._session = requests.Session()
else: # Use the Requests API module as a "session".
from requests import api
self._session = api
def _auth_headers(self):
if self._auth:
return {'Authorization': 'Bearer {0}'.format(self._auth)}
elif self.client_credentials_manager:
token = self.client_credentials_manager.get_access_token()
return {'Authorization': 'Bearer {0}'.format(token)}
else:
return {}
def _internal_call(self, method, url, payload, params):
args = dict(params=params)
args["timeout"] = self.requests_timeout
if not url.startswith('http'):
url = self.prefix + url
headers = self._auth_headers()
headers['Content-Type'] = 'application/json'
if payload:
args["data"] = json.dumps(payload)
if self.trace_out:
print(url)
r = self._session.request(method, url, headers=headers, proxies=self.proxies, **args)
if self.trace: # pragma: no cover
print()
print ('headers', headers)
print ('http status', r.status_code)
print(method, r.url)
if payload:
print("DATA", json.dumps(payload))
try:
r.raise_for_status()
except:
if r.text and len(r.text) > 0 and r.text != 'null':
raise SpotifyException(r.status_code,
-1, '%s:\n %s' % (r.url, r.json()['error']['message']),
headers=r.headers)
else:
raise SpotifyException(r.status_code,
-1, '%s:\n %s' % (r.url, 'error'), headers=r.headers)
finally:
r.connection.close()
if r.text and len(r.text) > 0 and r.text != 'null':
results = r.json()
if self.trace: # pragma: no cover
print('RESP', results)
print()
return results
else:
return None
def _get(self, url, args=None, payload=None, **kwargs):
if args:
kwargs.update(args)
retries = self.max_get_retries
delay = 1
while retries > 0:
try:
return self._internal_call('GET', url, payload, kwargs)
except SpotifyException as e:
retries -= 1
status = e.http_status
# 429 means we hit a rate limit, backoff
if status == 429 or (status >= 500 and status < 600):
if retries < 0:
raise
else:
sleep_seconds = int(e.headers.get('Retry-After', delay))
print ('retrying ...' + str(sleep_seconds) + 'secs')
time.sleep(sleep_seconds + 1)
delay += 1
else:
raise
except Exception as e:
raise
print ('exception', str(e))
# some other exception. Requests have
# been know to throw a BadStatusLine exception
retries -= 1
if retries >= 0:
sleep_seconds = int(e.headers.get('Retry-After', delay))
print ('retrying ...' + str(delay) + 'secs')
time.sleep(sleep_seconds + 1)
delay += 1
else:
raise
def _post(self, url, args=None, payload=None, **kwargs):
if args:
kwargs.update(args)
return self._internal_call('POST', url, payload, kwargs)
def _delete(self, url, args=None, payload=None, **kwargs):
if args:
kwargs.update(args)
return self._internal_call('DELETE', url, payload, kwargs)
def _put(self, url, args=None, payload=None, **kwargs):
if args:
kwargs.update(args)
return self._internal_call('PUT', url, payload, kwargs)
def get_auth(self):
""" returns the authorization object associated with this client
"""
return self._auth
def set_auth(self, auth):
""" returns the authorization object associated with this client
"""
self._auth = auth
def next(self, result):
""" returns the next result given a paged result
Parameters:
- result - a previously returned paged result
"""
if result['next']:
return self._get(result['next'])
else:
return None
def previous(self, result):
""" returns the previous result given a paged result
Parameters:
- result - a previously returned paged result
"""
if result['previous']:
return self._get(result['previous'])
else:
return None
def _warn_old(self, msg):
print('warning:' + msg, file=sys.stderr)
def _warn(self, msg, *args):
print('warning:' + msg.format(*args), file=sys.stderr)
def track(self, track_id):
""" returns a single track given the track's ID, URI or URL
Parameters:
- track_id - a spotify URI, URL or ID
"""
trid = self._get_id('track', track_id)
return self._get('tracks/' + trid)
def tracks(self, tracks, market = None):
""" returns a list of tracks given a list of track IDs, URIs, or URLs
Parameters:
- tracks - a list of spotify URIs, URLs or IDs
- market - an ISO 3166-1 alpha-2 country code.
"""
tlist = [self._get_id('track', t) for t in tracks]
return self._get('tracks/?ids=' + ','.join(tlist), market = market)
def artist(self, artist_id):
""" returns a single artist given the artist's ID, URI or URL
Parameters:
- artist_id - an artist ID, URI or URL
"""
trid = self._get_id('artist', artist_id)
return self._get('artists/' + trid)
def artists(self, artists):
""" returns a list of artists given the artist IDs, URIs, or URLs
Parameters:
- artists - a list of artist IDs, URIs or URLs
"""
tlist = [self._get_id('artist', a) for a in artists]
return self._get('artists/?ids=' + ','.join(tlist))
def artist_albums(self, artist_id, album_type=None, country=None, limit=20,
offset=0):
""" Get Spotify catalog information about an artist's albums
Parameters:
- artist_id - the artist ID, URI or URL
- album_type - 'album', 'single', 'appears_on', 'compilation'
- country - limit the response to one particular country.
- limit - the number of albums to return
- offset - the index of the first album to return
"""
trid = self._get_id('artist', artist_id)
return self._get('artists/' + trid + '/albums', album_type=album_type,
country=country, limit=limit, offset=offset)
def artist_top_tracks(self, artist_id, country='US'):
""" Get Spotify catalog information about an artist's top 10 tracks
by country.
Parameters:
- artist_id - the artist ID, URI or URL
- country - limit the response to one particular country.
"""
trid = self._get_id('artist', artist_id)
return self._get('artists/' + trid + '/top-tracks', country=country)
def artist_related_artists(self, artist_id):
""" Get Spotify catalog information about artists similar to an
identified artist. Similarity is based on analysis of the
Spotify community's listening history.
Parameters:
- artist_id - the artist ID, URI or URL
"""
trid = self._get_id('artist', artist_id)
return self._get('artists/' + trid + '/related-artists')
def album(self, album_id):
""" returns a single album given the album's ID, URIs or URL
Parameters:
- album_id - the album ID, URI or URL
"""
trid = self._get_id('album', album_id)
return self._get('albums/' + trid)
def album_tracks(self, album_id, limit=50, offset=0):
""" Get Spotify catalog information about an album's tracks
Parameters:
- album_id - the album ID, URI or URL
- limit - the number of items to return
- offset - the index of the first item to return
"""
trid = self._get_id('album', album_id)
return self._get('albums/' + trid + '/tracks/', limit=limit,
offset=offset)
def albums(self, albums):
""" returns a list of albums given the album IDs, URIs, or URLs
Parameters:
- albums - a list of album IDs, URIs or URLs
"""
tlist = [self._get_id('album', a) for a in albums]
return self._get('albums/?ids=' + ','.join(tlist))
def search(self, q, limit=10, offset=0, type='track', market=None):
""" searches for an item
Parameters:
- q - the search query
- limit - the number of items to return
- offset - the index of the first item to return
- type - the type of item to return. One of 'artist', 'album',
'track' or 'playlist'
- market - An ISO 3166-1 alpha-2 country code or the string from_token.
"""
return self._get('search', q=q, limit=limit, offset=offset, type=type, market=market)
def user(self, user):
""" Gets basic profile information about a Spotify User
Parameters:
- user - the id of the usr
"""
return self._get('users/' + user)
def current_user_playlists(self, limit=50, offset=0):
""" Get current user playlists without required getting his profile
Parameters:
- limit - the number of items to return
- offset - the index of the first item to return
"""
return self._get("me/playlists", limit=limit, offset=offset)
def user_playlists(self, user, limit=50, offset=0):
""" Gets playlists of a user
Parameters:
- user - the id of the usr
- limit - the number of items to return
- offset - the index of the first item to return
"""
return self._get("users/%s/playlists" % user, limit=limit,
offset=offset)
def user_playlist(self, user, playlist_id=None, fields=None):
""" Gets playlist of a user
Parameters:
- user - the id of the user
- playlist_id - the id of the playlist
- fields - which fields to return
"""
if playlist_id is None:
return self._get("users/%s/starred" % (user), fields=fields)
plid = self._get_id('playlist', playlist_id)
return self._get("users/%s/playlists/%s" % (user, plid), fields=fields)
def user_playlist_tracks(self, user, playlist_id=None, fields=None,
limit=100, offset=0, market=None):
""" Get full details of the tracks of a playlist owned by a user.
Parameters:
- user - the id of the user
- playlist_id - the id of the playlist
- fields - which fields to return
- limit - the maximum number of tracks to return
- offset - the index of the first track to return
- market - an ISO 3166-1 alpha-2 country code.
"""
plid = self._get_id('playlist', playlist_id)
return self._get("users/%s/playlists/%s/tracks" % (user, plid),
limit=limit, offset=offset, fields=fields,
market=market)
def user_playlist_create(self, user, name, public=True, description=''):
""" Creates a playlist for a user
Parameters:
- user - the id of the user
- name - the name of the playlist
- public - is the created playlist public
- description - the description of the playlist
"""
data = {'name': name, 'public': public, 'description': description}
return self._post("users/%s/playlists" % (user,), payload=data)
def user_playlist_change_details(
self, user, playlist_id, name=None, public=None,
collaborative=None, description=None):
""" Changes a playlist's name and/or public/private state
Parameters:
- user - the id of the user
- playlist_id - the id of the playlist
- name - optional name of the playlist
- public - optional is the playlist public
- collaborative - optional is the playlist collaborative
- description - optional description of the playlist
"""
data = {}
if isinstance(name, six.string_types):
data['name'] = name
if isinstance(public, bool):
data['public'] = public
if isinstance(collaborative, bool):
data['collaborative'] = collaborative
if isinstance(description, six.string_types):
data['description'] = description
return self._put("users/%s/playlists/%s" % (user, playlist_id),
payload=data)
def user_playlist_unfollow(self, user, playlist_id):
""" Unfollows (deletes) a playlist for a user
Parameters:
- user - the id of the user
- name - the name of the playlist
"""
return self._delete("users/%s/playlists/%s/followers" % (user, playlist_id))
def user_playlist_add_tracks(self, user, playlist_id, tracks,
position=None):
""" Adds tracks to a playlist
Parameters:
- user - the id of the user
- playlist_id - the id of the playlist
- tracks - a list of track URIs, URLs or IDs
- position - the position to add the tracks
"""
plid = self._get_id('playlist', playlist_id)
ftracks = [self._get_uri('track', tid) for tid in tracks]
return self._post("users/%s/playlists/%s/tracks" % (user, plid),
payload=ftracks, position=position)
def user_playlist_replace_tracks(self, user, playlist_id, tracks):
""" Replace all tracks in a playlist
Parameters:
- user - the id of the user
- playlist_id - the id of the playlist
- tracks - the list of track ids to add to the playlist
"""
plid = self._get_id('playlist', playlist_id)
ftracks = [self._get_uri('track', tid) for tid in tracks]
payload = {"uris": ftracks}
return self._put("users/%s/playlists/%s/tracks" % (user, plid),
payload=payload)
def user_playlist_reorder_tracks(
self, user, playlist_id, range_start, insert_before,
range_length=1, snapshot_id=None):
""" Reorder tracks in a playlist
Parameters:
- user - the id of the user
- playlist_id - the id of the playlist
- range_start - the position of the first track to be reordered
- range_length - optional the number of tracks to be reordered (default: 1)
- insert_before - the position where the tracks should be inserted
- snapshot_id - optional playlist's snapshot ID
"""
plid = self._get_id('playlist', playlist_id)
payload = {"range_start": range_start,
"range_length": range_length,
"insert_before": insert_before}
if snapshot_id:
payload["snapshot_id"] = snapshot_id
return self._put("users/%s/playlists/%s/tracks" % (user, plid),
payload=payload)
def user_playlist_remove_all_occurrences_of_tracks(
self, user, playlist_id, tracks, snapshot_id=None):
""" Removes all occurrences of the given tracks from the given playlist
Parameters:
- user - the id of the user
- playlist_id - the id of the playlist
- tracks - the list of track ids to add to the playlist
- snapshot_id - optional id of the playlist snapshot
"""
plid = self._get_id('playlist', playlist_id)
ftracks = [self._get_uri('track', tid) for tid in tracks]
payload = {"tracks": [{"uri": track} for track in ftracks]}
if snapshot_id:
payload["snapshot_id"] = snapshot_id
return self._delete("users/%s/playlists/%s/tracks" % (user, plid),
payload=payload)
def user_playlist_remove_specific_occurrences_of_tracks(
self, user, playlist_id, tracks, snapshot_id=None):
""" Removes all occurrences of the given tracks from the given playlist
Parameters:
- user - the id of the user
- playlist_id - the id of the playlist
- tracks - an array of objects containing Spotify URIs of the tracks to remove with their current positions in the playlist. For example:
[ { "uri":"4iV5W9uYEdYUVa79Axb7Rh", "positions":[2] },
{ "uri":"1301WleyT98MSxVHPZCA6M", "positions":[7] } ]
- snapshot_id - optional id of the playlist snapshot
"""
plid = self._get_id('playlist', playlist_id)
ftracks = []
for tr in tracks:
ftracks.append({
"uri": self._get_uri("track", tr["uri"]),
"positions": tr["positions"],
})
payload = {"tracks": ftracks}
if snapshot_id:
payload["snapshot_id"] = snapshot_id
return self._delete("users/%s/playlists/%s/tracks" % (user, plid),
payload=payload)
def user_playlist_follow_playlist(self, playlist_owner_id, playlist_id):
"""
Add the current authenticated user as a follower of a playlist.
Parameters:
- playlist_owner_id - the user id of the playlist owner
- playlist_id - the id of the playlist
"""
return self._put("users/{}/playlists/{}/followers".format(playlist_owner_id, playlist_id))
def user_playlist_is_following(self, playlist_owner_id, playlist_id, user_ids):
"""
Check to see if the given users are following the given playlist
Parameters:
- playlist_owner_id - the user id of the playlist owner
- playlist_id - the id of the playlist
- user_ids - the ids of the users that you want to check to see if they follow the playlist. Maximum: 5 ids.
"""
return self._get("users/{}/playlists/{}/followers/contains?ids={}".format(playlist_owner_id, playlist_id, ','.join(user_ids)))
def me(self):
""" Get detailed profile information about the current user.
An alias for the 'current_user' method.
"""
return self._get('me/')
def current_user(self):
""" Get detailed profile information about the current user.
An alias for the 'me' method.
"""
return self.me()
def current_user_playing_track(self):
''' Get information about the current users currently playing track.
'''
return self._get('me/player/currently-playing')
def current_user_saved_albums(self, limit=20, offset=0):
""" Gets a list of the albums saved in the current authorized user's
"Your Music" library
Parameters:
- limit - the number of albums to return
- offset - the index of the first album to return
"""
return self._get('me/albums', limit=limit, offset=offset)
def current_user_saved_tracks(self, limit=20, offset=0):
""" Gets a list of the tracks saved in the current authorized user's
"Your Music" library
Parameters:
- limit - the number of tracks to return
- offset - the index of the first track to return
"""
return self._get('me/tracks', limit=limit, offset=offset)
def current_user_followed_artists(self, limit=20, after=None):
""" Gets a list of the artists followed by the current authorized user
Parameters:
- limit - the number of tracks to return
- after - ghe last artist ID retrieved from the previous request
"""
return self._get('me/following', type='artist', limit=limit,
after=after)
def current_user_saved_tracks_delete(self, tracks=None):
""" Remove one or more tracks from the current user's
"Your Music" library.
Parameters:
- tracks - a list of track URIs, URLs or IDs
"""
tlist = []
if tracks is not None:
tlist = [self._get_id('track', t) for t in tracks]
return self._delete('me/tracks/?ids=' + ','.join(tlist))
def current_user_saved_tracks_contains(self, tracks=None):
""" Check if one or more tracks is already saved in
the current Spotify user’s “Your Music” library.
Parameters:
- tracks - a list of track URIs, URLs or IDs
"""
tlist = []
if tracks is not None:
tlist = [self._get_id('track', t) for t in tracks]
return self._get('me/tracks/contains?ids=' + ','.join(tlist))
def current_user_saved_tracks_add(self, tracks=None):
""" Add one or more tracks to the current user's
"Your Music" library.
Parameters:
- tracks - a list of track URIs, URLs or IDs
"""
tlist = []
if tracks is not None:
tlist = [self._get_id('track', t) for t in tracks]
return self._put('me/tracks/?ids=' + ','.join(tlist))
def current_user_top_artists(self, limit=20, offset=0,
time_range='medium_term'):
""" Get the current user's top artists
Parameters:
- limit - the number of entities to return
- offset - the index of the first entity to return
- time_range - Over what time frame are the affinities computed
Valid-values: short_term, medium_term, long_term
"""
return self._get('me/top/artists', time_range=time_range, limit=limit,
offset=offset)
def current_user_top_tracks(self, limit=20, offset=0,
time_range='medium_term'):
""" Get the current user's top tracks
Parameters:
- limit - the number of entities to return
- offset - the index of the first entity to return
- time_range - Over what time frame are the affinities computed
Valid-values: short_term, medium_term, long_term
"""
return self._get('me/top/tracks', time_range=time_range, limit=limit,
offset=offset)
def current_user_recently_played(self, limit=50):
''' Get the current user's recently played tracks
Parameters:
- limit - the number of entities to return
'''
return self._get('me/player/recently-played', limit=limit)
def current_user_saved_albums_add(self, albums=[]):
""" Add one or more albums to the current user's
"Your Music" library.
Parameters:
- albums - a list of album URIs, URLs or IDs
"""
alist = [self._get_id('album', a) for a in albums]
r = self._put('me/albums?ids=' + ','.join(alist))
return r
def user_follow_artists(self, ids=[]):
''' Follow one or more artists
Parameters:
- ids - a list of artist IDs
'''
return self._put('me/following?type=artist&ids=' + ','.join(ids))
def user_follow_users(self, ids=[]):
''' Follow one or more users
Parameters:
- ids - a list of user IDs
'''
return self._put('me/following?type=user&ids=' + ','.join(ids))
def featured_playlists(self, locale=None, country=None, timestamp=None,
limit=20, offset=0):
""" Get a list of Spotify featured playlists
Parameters:
- locale - The desired language, consisting of a lowercase ISO
639 language code and an uppercase ISO 3166-1 alpha-2 country
code, joined by an underscore.
- country - An ISO 3166-1 alpha-2 country code.
- timestamp - A timestamp in ISO 8601 format:
yyyy-MM-ddTHH:mm:ss. Use this parameter to specify the user's
local time to get results tailored for that specific date and
time in the day
- limit - The maximum number of items to return. Default: 20.
Minimum: 1. Maximum: 50
- offset - The index of the first item to return. Default: 0
(the first object). Use with limit to get the next set of
items.
"""
return self._get('browse/featured-playlists', locale=locale,
country=country, timestamp=timestamp, limit=limit,
offset=offset)
def new_releases(self, country=None, limit=20, offset=0):
""" Get a list of new album releases featured in Spotify
Parameters:
- country - An ISO 3166-1 alpha-2 country code.
- limit - The maximum number of items to return. Default: 20.
Minimum: 1. Maximum: 50
- offset - The index of the first item to return. Default: 0
(the first object). Use with limit to get the next set of
items.
"""
return self._get('browse/new-releases', country=country, limit=limit,
offset=offset)
def categories(self, country=None, locale=None, limit=20, offset=0):
""" Get a list of new album releases featured in Spotify
Parameters:
- country - An ISO 3166-1 alpha-2 country code.
- locale - The desired language, consisting of an ISO 639
language code and an ISO 3166-1 alpha-2 country code, joined
by an underscore.
- limit - The maximum number of items to return. Default: 20.
Minimum: 1. Maximum: 50
- offset - The index of the first item to return. Default: 0
(the first object). Use with limit to get the next set of
items.
"""
return self._get('browse/categories', country=country, locale=locale,
limit=limit, offset=offset)
def category_playlists(self, category_id=None, country=None, limit=20,
offset=0):
""" Get a list of new album releases featured in Spotify
Parameters:
- category_id - The Spotify category ID for the category.
- country - An ISO 3166-1 alpha-2 country code.
- limit - The maximum number of items to return. Default: 20.
Minimum: 1. Maximum: 50
- offset - The index of the first item to return. Default: 0
(the first object). Use with limit to get the next set of
items.
"""
return self._get('browse/categories/' + category_id + '/playlists',
country=country, limit=limit, offset=offset)
def recommendations(self, seed_artists=None, seed_genres=None,
seed_tracks=None, limit=20, country=None, **kwargs):
""" Get a list of recommended tracks for one to five seeds.
Parameters:
- seed_artists - a list of artist IDs, URIs or URLs
- seed_tracks - a list of artist IDs, URIs or URLs
- seed_genres - a list of genre names. Available genres for
recommendations can be found by calling recommendation_genre_seeds
- country - An ISO 3166-1 alpha-2 country code. If provided, all
results will be playable in this country.
- limit - The maximum number of items to return. Default: 20.
Minimum: 1. Maximum: 100
- min/max/target_<attribute> - For the tuneable track attributes listed
in the documentation, these values provide filters and targeting on
results.
"""
params = dict(limit=limit)
if seed_artists:
params['seed_artists'] = ','.join(
[self._get_id('artist', a) for a in seed_artists])
if seed_genres:
params['seed_genres'] = ','.join(seed_genres)
if seed_tracks:
params['seed_tracks'] = ','.join(
[self._get_id('track', t) for t in seed_tracks])
if country:
params['market'] = country
for attribute in ["acousticness", "danceability", "duration_ms",
"energy", "instrumentalness", "key", "liveness",
"loudness", "mode", "popularity", "speechiness",
"tempo", "time_signature", "valence"]:
for prefix in ["min_", "max_", "target_"]:
param = prefix + attribute
if param in kwargs:
params[param] = kwargs[param]
return self._get('recommendations', **params)
def recommendation_genre_seeds(self):
""" Get a list of genres available for the recommendations function.
"""
return self._get('recommendations/available-genre-seeds')
def audio_analysis(self, track_id):
""" Get audio analysis for a track based upon its Spotify ID
Parameters:
- track_id - a track URI, URL or ID
"""
trid = self._get_id('track', track_id)
return self._get('audio-analysis/' + trid)
def audio_features(self, tracks=[]):
""" Get audio features for one or multiple tracks based upon their Spotify IDs
Parameters:
- tracks - a list of track URIs, URLs or IDs, maximum: 50 ids
"""
if isinstance(tracks, str):
trackid = self._get_id('track', tracks)
results = self._get('audio-features/?ids=' + trackid)
else:
tlist = [self._get_id('track', t) for t in tracks]
results = self._get('audio-features/?ids=' + ','.join(tlist))
# the response has changed, look for the new style first, and if
# its not there, fallback on the old style
if 'audio_features' in results:
return results['audio_features']
else:
return results
def audio_analysis(self, id):
""" Get audio analysis for a track based upon its Spotify ID
Parameters:
- id - a track URIs, URLs or IDs
"""
id = self._get_id('track', id)
return self._get('audio-analysis/'+id)
def devices(self):
''' Get a list of user's available devices.
'''
return self._get("me/player/devices")
def current_playback(self, market = None):
''' Get information about user's current playback.
Parameters:
- market - an ISO 3166-1 alpha-2 country code.
'''
return self._get("me/player", market = market)
def currently_playing(self, market = None):
''' Get user's currently playing track.
Parameters:
- market - an ISO 3166-1 alpha-2 country code.
'''
return self._get("me/player/currently-playing", market = market)
def transfer_playback(self, device_id, force_play = True):
''' Transfer playback to another device.
Note that the API accepts a list of device ids, but only
actually supports one.
Parameters:
- device_id - transfer playback to this device
- force_play - true: after transfer, play. false:
keep current state.
'''
data = {
'device_ids': [device_id],
'play': force_play
}
return self._put("me/player", payload=data)
def start_playback(self, device_id = None, context_uri = None, uris = None, offset = None, position_ms = None):
''' Start or resume user's playback.
Provide a `context_uri` to start playback or a album,
artist, or playlist.
Provide a `uris` list to start playback of one or more
tracks.
Provide `offset` as {"position": <int>} or {"uri": "<track uri>"}
to start playback at a particular offset.
Parameters:
- device_id - device target for playback
- context_uri - spotify context uri to play
- uris - spotify track uris
- offset - offset into context by index or track
'''
if context_uri is not None and uris is not None:
self._warn('specify either context uri or uris, not both')
return
if uris is not None and not isinstance(uris, list):
self._warn('uris must be a list')
return
data = {}
if context_uri is not None:
data['context_uri'] = context_uri
if uris is not None:
data['uris'] = uris
if offset is not None:
data['offset'] = offset
if position_ms is not None:
data['position_ms'] = position_ms
return self._put(self._append_device_id("me/player/play", device_id), payload=data)
def pause_playback(self, device_id = None):
''' Pause user's playback.
Parameters:
- device_id - device target for playback
'''
return self._put(self._append_device_id("me/player/pause", device_id))
def next_track(self, device_id = None):
''' Skip user's playback to next track.
Parameters:
- device_id - device target for playback
'''
return self._post(self._append_device_id("me/player/next", device_id))
def previous_track(self, device_id = None):
''' Skip user's playback to previous track.
Parameters:
- device_id - device target for playback
'''
return self._post(self._append_device_id("me/player/previous", device_id))
def seek_track(self, position_ms, device_id = None):
''' Seek to position in current track.
Parameters:
- position_ms - position in milliseconds to seek to
- device_id - device target for playback
'''
if not isinstance(position_ms, int):
self._warn('position_ms must be an integer')
return
return self._put(self._append_device_id("me/player/seek?position_ms=%s" % position_ms, device_id))
def repeat(self, state, device_id = None):
''' Set repeat mode for playback.
Parameters:
- state - `track`, `context`, or `off`
- device_id - device target for playback
'''
if state not in ['track', 'context', 'off']:
self._warn('invalid state')
return
self._put(self._append_device_id("me/player/repeat?state=%s" % state, device_id))
def volume(self, volume_percent, device_id = None):
''' Set playback volume.
Parameters:
- volume_percent - volume between 0 and 100
- device_id - device target for playback
'''
if not isinstance(volume_percent, int):
self._warn('volume must be an integer')
return
if volume_percent < 0 or volume_percent > 100:
self._warn('volume must be between 0 and 100, inclusive')
return
self._put(self._append_device_id("me/player/volume?volume_percent=%s" % volume_percent, device_id))
def shuffle(self, state, device_id = None):
''' Toggle playback shuffling.
Parameters:
- state - true or false
- device_id - device target for playback
'''
if not isinstance(state, bool):
self._warn('state must be a boolean')
return
state = str(state).lower()
self._put(self._append_device_id("me/player/shuffle?state=%s" % state, device_id))
def playlist(self, playlist_id):
#return self._get('playlists/%s' % playlist_id)
actual_id = self._get_id('playlist', playlist_id)
return self._get('playlists/' + actual_id)
def playlist_tracks(self, playlist_id):
actual_id = self._get_id('playlist', playlist_id)
return self._get('playlists/%s/tracks' % actual_id)
def _append_device_id(self, path, device_id):
''' Append device ID to API path.
Parameters:
- device_id - device id to append
'''
if device_id:
if '?' in path:
path += "&device_id=%s" % device_id
else:
path += "?device_id=%s" % device_id
return path
def _get_id(self, type, id):
fields = id.split(':')
if len(fields) >= 3:
if type != fields[-2]:
self._warn('expected id of type %s but found type %s %s',
type, fields[-2], id)
return fields[-1]
fields = id.split('/')
if len(fields) >= 3:
itype = fields[-2]
if type != itype:
self._warn('expected id of type %s but found type %s %s',
type, itype, id)
return fields[-1]
return id
def _get_uri(self, type, id):
return 'spotify:' + type + ":" + self._get_id(type, id)
| 38.163889
| 154
| 0.55858
|
9f8dd53dd58724bf726117d9b0381e6816c71073
| 4,039
|
py
|
Python
|
load.py
|
yqnhjy/Image-Recognition-Of-Birds
|
eaf0a125274e572cc5a12e8a2264af80e70a1a5d
|
[
"Apache-2.0"
] | null | null | null |
load.py
|
yqnhjy/Image-Recognition-Of-Birds
|
eaf0a125274e572cc5a12e8a2264af80e70a1a5d
|
[
"Apache-2.0"
] | null | null | null |
load.py
|
yqnhjy/Image-Recognition-Of-Birds
|
eaf0a125274e572cc5a12e8a2264af80e70a1a5d
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding = utf-8 -*-
# @Time : 2022/4/29 0029 11:58
# @Author : 小白
# @File : load.py
# @Software : PyCharm
import numpy as np
# 读取数据
import scipy.misc
import os
from PIL import Image
from torchvision import transforms
import torch
class CUB():
def __init__(self, root, is_train=True, data_len=None,transform=None, target_transform=None):
self.root = root
self.is_train = is_train
self.transform = transform
self.target_transform = target_transform
img_txt_file = open(os.path.join(self.root, 'images.txt'))
label_txt_file = open(os.path.join(self.root, 'image_class_labels.txt'))
train_val_file = open(os.path.join(self.root, 'train_test_split.txt'))
# 图片索引
img_name_list = []
for line in img_txt_file:
# 最后一个字符为换行符
img_name_list.append(line[:-1].split(' ')[-1])
# 标签索引,每个对应的标签减1,标签值从0开始
label_list = []
for line in label_txt_file:
label_list.append(int(line[:-1].split(' ')[-1]) - 1)
# 设置训练集和测试集
train_test_list = []
for line in train_val_file:
train_test_list.append(int(line[:-1].split(' ')[-1]))
# zip压缩合并,将数据与标签(训练集还是测试集)对应压缩
# zip() 函数用于将可迭代的对象作为参数,将对象中对应的元素打包成一个个元组,
# 然后返回由这些元组组成的对象,这样做的好处是节约了不少的内存。
# 我们可以使用 list() 转换来输出列表
# 如果 i 为 1,那么设为训练集
# 1为训练集,0为测试集
# zip压缩合并,将数据与标签(训练集还是测试集)对应压缩
# 如果 i 为 1,那么设为训练集
train_file_list = [x for i, x in zip(train_test_list, img_name_list) if i]
test_file_list = [x for i, x in zip(train_test_list, img_name_list) if not i]
train_label_list = [x for i, x in zip(train_test_list, label_list) if i][:data_len]
test_label_list = [x for i, x in zip(train_test_list, label_list) if not i][:data_len]
if self.is_train:
# scipy.misc.imread 图片读取出来为array类型,即numpy类型
self.train_img = [np.array(Image.open(os.path.join(self.root, 'images', train_file))) for train_file in
train_file_list[:data_len]]
# 读取训练集标签
self.train_label = train_label_list
if not self.is_train:
self.test_img = [np.array(Image.open(os.path.join(self.root, 'images', test_file))) for test_file in
test_file_list[:data_len]]
self.test_label = test_label_list
# 数据增强
def __getitem__(self,index):
# 训练集
if self.is_train:
img, target = self.train_img[index], self.train_label[index]
# 测试集
else:
img, target = self.test_img[index], self.test_label[index]
if len(img.shape) == 2:
# 灰度图像转为三通道
img = np.stack([img]*3,2)
# 转为 RGB 类型
img = Image.fromarray(img,mode='RGB')
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
def __len__(self):
if self.is_train:
return len(self.train_label)
else:
return len(self.test_label)
if __name__ == '__main__':
'''
dataset = CUB(root='./CUB_200_2011')
for data in dataset:
print(data[0].size(),data[1])
'''
# 以pytorch中DataLoader的方式读取数据集
transform_train = transforms.Compose([
transforms.Resize((224, 224)),
transforms.RandomCrop(224, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize([0.485,0.456,0.406], [0.229,0.224,0.225]),
])
dataset = CUB(root='./CUB_200_2011', is_train=False, transform=transform_train,)
print(len(dataset))
trainloader = torch.utils.data.DataLoader(dataset, batch_size=2, shuffle=True, num_workers=0,
drop_last=True)
print(len(trainloader))
| 35.429825
| 116
| 0.584798
|
3f2fa0b0da663deb5fd1b35eaa22a6766754995a
| 3,909
|
py
|
Python
|
src/table_schema.py
|
NoSchoolViolence/Content_Logger
|
0b943f43fe13717ec4f835797afb73936a77a409
|
[
"MIT"
] | null | null | null |
src/table_schema.py
|
NoSchoolViolence/Content_Logger
|
0b943f43fe13717ec4f835797afb73936a77a409
|
[
"MIT"
] | null | null | null |
src/table_schema.py
|
NoSchoolViolence/Content_Logger
|
0b943f43fe13717ec4f835797afb73936a77a409
|
[
"MIT"
] | 1
|
2021-03-12T19:58:01.000Z
|
2021-03-12T19:58:01.000Z
|
"""
Purpose: This file sets the SQL table schema for the pubmed downloader. It inserts
the data into a sql database for easy querying, extraction and to prevent redundancies in data.
Author: Natalie Chun
Created: 12 January 2019
"""
def get_pubmed_table_schema():
"""Table schema for pubmed will supercede the above."""
tables = {}
# table contains articles pulled up based on various search terms
tables['searchterms'] = """CREATE TABLE IF NOT EXISTS searchterms (
pmid INTEGER,
searchterm VARCHAR(100),
PRIMARY KEY(pmid, searchterm),
FOREIGN KEY(pmid) REFERENCES pubmeddata(pmid)
);
"""
# contains all of pubmed meta data for articles
tables['pubmeddata'] = """CREATE TABLE IF NOT EXISTS pubmeddata (
pmid INTEGER,
pubmedarticleset VARCHAR(10),
pubmedarticle VARCHAR(10),
medlinecitation VARCHAR(100),
daterevised VARCHAR(10),
year INTEGER,
month INTEGER,
day INTEGER,
article VARCHAR(100),
journal VARCHAR(50),
issn VARCHAR(10),
journalissue VARCHAR(10),
pubdate VARCHAR(10),
title VARCHAR(100),
isoabbreviation VARCHAR(50),
articletitle VARCHAR(100),
elocationid VARCHAR(20),
abstract VARCHAR(1000),
abstracttext VARCHAR(1000),
authorlist VARCHAR(200),
author VARCHAR(100),
lastname VARCHAR(20),
forename VARCHAR(20),
initials VARCHAR(5),
affiliationinfo VARCHAR(30),
affiliation VARCHAR(50),
language VARCHAR(5),
publicationtypelist VARCHAR(10),
publicationtype VARCHAR(10),
articledate VARCHAR(10),
medlinejournalinfo VARCHAR(20),
country VARCHAR(25),
medlineta VARCHAR(25),
nlmuniqueid INTEGER,
issnlinking VARCHAR(9),
pubmeddata VARCHAR(20),
history VARCHAR(10),
pubmedpubdate VARCHAR(10),
hour INTEGER,
minute INTEGER,
publicationstatus VARCHAR(15),
articleidlist VARCHAR(10),
articleid VARCHAR(30),
PRIMARY KEY (pmid)
);
"""
return tables
def get_pubmed_table_schema2():
"""Updated table schema for pubmed will supercede the above."""
tables = {}
# table contains articles pulled up based on various search terms
tables['searchterms'] = """CREATE TABLE IF NOT EXISTS searchterms (
pmid INTEGER,
searchterm VARCHAR(100),
PRIMARY KEY(pmid, searchterm),
FOREIGN KEY(pmid) REFERENCES pubmeddata(pmid)
);
"""
# contains all of pubmed meta data for articles
tables['pubmeddata'] = """CREATE TABLE IF NOT EXISTS pubmeddata (
pmid INTEGER,
medlinecitationstat VARCHAR(20),
medlinecitationowner VARCHAR(10),
daterevyear INTEGER,
daterevmonth INTEGER,
daterevday INTEGER,
articlepubmodel VARCHAR(100),
journal VARCHAR(50),
issn VARCHAR(10),
journalissuevol INTEGER,
pubdateyear INTEGER,
pubdatemonth VARCHAR(3),
title VARCHAR(100),
isoabbreviation VARCHAR(50),
articletitle VARCHAR(100),
medlinepgn INTEGER,
elocationidpii VARCHAR(20),
elocationiddoi VARCHAR(30),
authorlist VARCHAR(200),
authoraffiliation VARCHAR(300),
language VARCHAR(5),
publicationtype VARCHAR(10),
articledateyear INTEGER,
articledatemonth INTEGER,
articledateday INTEGER,
medlinejrnlcountry VARCHAR(25),
medlinejrnlta VARCHAR(25),
nlmuniqueid INTEGER,
issnlinking VARCHAR(9),
publicationstatus VARCHAR(15),
artidpubmed INTEGER,
artidpii VARCHAR(30),
artiddoi VARCHAR(30)
PRIMARY KEY (pmid)
);
"""
return tables
| 30.539063
| 95
| 0.623177
|
e7a7ccf4b155e749ae7c3adb0351762992aad040
| 2,911
|
py
|
Python
|
timew_to_csv.py
|
mpoquet/taskwarrior-scripts
|
e1eb007c5fb9086eb15803c04518e66c7dfaba6c
|
[
"MIT"
] | 2
|
2019-04-22T20:27:43.000Z
|
2021-05-25T08:14:58.000Z
|
timew_to_csv.py
|
mpoquet/taskwarrior-scripts
|
e1eb007c5fb9086eb15803c04518e66c7dfaba6c
|
[
"MIT"
] | 1
|
2019-04-15T07:51:46.000Z
|
2019-04-15T15:49:05.000Z
|
timew_to_csv.py
|
mpoquet/taskwarrior-scripts
|
e1eb007c5fb9086eb15803c04518e66c7dfaba6c
|
[
"MIT"
] | 1
|
2019-04-22T20:27:48.000Z
|
2019-04-22T20:27:48.000Z
|
#!/usr/bin/env nix-shell
#!nix-shell -i python -A pyEnv
'''Converts a timew JSON export to a convenient CSV to analyze and visualize.
JSON entries with multiple tags are split into several CSV entries.'''
import math
import subprocess
import sys
import pandas as pd
def is_entry_hooked_with_semantic_tags(tags):
'''Returns whether the entry was manually entered by the user or got from our taskw hook with semantic tags.'''
for tag in tags:
if tag.startswith('uuid:'):
uuid = ''.join(tag.split('uuid:')[1:])
return True, uuid
return False, None
def describe_interval(row):
'''Returns a description of a row regardless of its type (hooked or not).'''
if isinstance(row['task_description'], str):
return row['task_description']
return row['timew_tag']
def obtain_rich_df(taskw_df, timew_df):
'''Merge taskwarrior and timewarrior dataframes into one dataframe.'''
def add_hooked_or_not_row(row, rows):
'''Convenience function to create the enriched dataframe.'''
tags = row['tag']
is_hooked, uuid = is_entry_hooked_with_semantic_tags(tags)
if is_hooked:
new_row = {
'timew_interval_start': row['start'],
'timew_interval_end': row['end'],
'task_uuid': uuid,
}
rows.append(new_row)
else:
tag_to_keep = tags[0]
if (len(tags) > 1):
print("A manual entry has several tags. Only the first ({}) will be kept. Tags were {}".format(tag_to_keep, split_row), file=sys.stderr)
new_row = {
'timew_interval_start': row['start'],
'timew_interval_end': row['end'],
'timew_tag': tag_to_keep
}
rows.append(new_row)
new_rows = []
timew_df.apply(add_hooked_or_not_row, axis=1, args=(new_rows,))
new_df = pd.DataFrame(new_rows)
merged_df = pd.merge(new_df, taskw_df, on='task_uuid', how='left')
merged_df['task_description'] = merged_df.apply(lambda row: describe_interval(row), axis=1)
if 'timew_tag' in merged_df:
merged_df.drop('timew_tag', axis=1, inplace=True)
return merged_df
# Read input JSON from stdin (should be a timewarrior export).
time_df = pd.read_json(sys.stdin)
time_df.rename(columns={'tags':'tag'}, inplace=True)
# Call task to retrieve its database.
task_db_process = subprocess.run(["task", "export"], capture_output=True)
task_db = pd.read_json(task_db_process.stdout.decode('utf-8'))
task_db.rename(columns={col:'task_'+col for col in task_db.columns}, inplace=True)
task_db.rename(columns={'task_entry':'task_creation_date', 'task_modified':'task_last_modification_date'}, inplace=True)
# Generate a special merge of the two dataframes then write it on stdout.
rich_df = obtain_rich_df(task_db, time_df)
rich_df.to_csv(sys.stdout, index=False)
| 39.876712
| 152
| 0.665407
|
47dece53e710104776656ee77dcb44705afa5c6d
| 1,189
|
py
|
Python
|
time_handler.py
|
KermityOwen/CovidTracker
|
ef5bc2329cb8c4ef23dcd25d4b94f1ee4e2deeb4
|
[
"MIT"
] | null | null | null |
time_handler.py
|
KermityOwen/CovidTracker
|
ef5bc2329cb8c4ef23dcd25d4b94f1ee4e2deeb4
|
[
"MIT"
] | null | null | null |
time_handler.py
|
KermityOwen/CovidTracker
|
ef5bc2329cb8c4ef23dcd25d4b94f1ee4e2deeb4
|
[
"MIT"
] | null | null | null |
"""
Module contains functions for handling time format conversions and fetching current time in GMT UTC+0.
"""
import logging
import time
def hhmmss_to_seconds(hhmmss):
"""
Translates HHMMSS or HHMM formatted time into seconds.
Args:
hhmmss (str): String in HHMMSS or HHMM format
Returns:
seconds (int): Converted seconds
"""
hhmmss_split = hhmmss.split(":")
try:
seconds = (int(hhmmss_split[0]) * 60 * 60) + (int(hhmmss_split[1]) * 60) + int(hhmmss_split[2])
except IndexError: # If IndexError then it would be HHMM instead of HHMMSS so run below code \/
seconds = (int(hhmmss_split[0]) * 60 * 60) + (int(hhmmss_split[1]) * 60)
return seconds
def current_time_hhmmss():
"""
Fetches current time in GMT UTC+0
Returns:
(str): Current time in GMT UTC+0
"""
return str(time.gmtime().tm_hour) + ":" + str(time.gmtime().tm_min) + ":" + str(time.gmtime().tm_sec)
def current_time_seconds():
"""
Fetches current time in seconds
Returns:
(int): Current time in seconds
"""
logging.info("Current time fetched")
return hhmmss_to_seconds(current_time_hhmmss())
| 26.422222
| 105
| 0.644239
|
0d5c70f36c5ef6ac5e2ebedeaa67d4a1786f17df
| 1,066
|
py
|
Python
|
pod042_bot/migrations/versions/c58fb194c34b_.py
|
saber-nyan/pod042-bot_new
|
b8ab509b85ba88cba61f5c3db8c0235f3c073802
|
[
"WTFPL"
] | 1
|
2019-06-14T11:07:25.000Z
|
2019-06-14T11:07:25.000Z
|
pod042_bot/migrations/versions/c58fb194c34b_.py
|
saber-nyan/pod042-bot_new
|
b8ab509b85ba88cba61f5c3db8c0235f3c073802
|
[
"WTFPL"
] | null | null | null |
pod042_bot/migrations/versions/c58fb194c34b_.py
|
saber-nyan/pod042-bot_new
|
b8ab509b85ba88cba61f5c3db8c0235f3c073802
|
[
"WTFPL"
] | null | null | null |
"""empty message
Revision ID: c58fb194c34b
Revises: 9c628daec881
Create Date: 2020-04-29 18:11:17.154517
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = 'c58fb194c34b'
down_revision = '9c628daec881'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('chats', 'chat_id',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
autoincrement=True,
existing_server_default=sa.text("nextval('chats_chat_id_seq'::regclass)"))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('chats', 'chat_id',
existing_type=sa.BigInteger(),
type_=sa.INTEGER(),
autoincrement=True,
existing_server_default=sa.text("nextval('chats_chat_id_seq'::regclass)"))
# ### end Alembic commands ###
| 29.611111
| 94
| 0.617261
|
f0f5e786fd3e6a3515256c1966307e476fa7fcb0
| 1,423
|
py
|
Python
|
test/option--lw.py
|
moroten/scons
|
20927b42ed4f0cb87f51287fa3b4b6cf915afcf8
|
[
"MIT"
] | 1,403
|
2017-11-23T14:24:01.000Z
|
2022-03-30T20:59:39.000Z
|
test/option--lw.py
|
moroten/scons
|
20927b42ed4f0cb87f51287fa3b4b6cf915afcf8
|
[
"MIT"
] | 3,708
|
2017-11-27T13:47:12.000Z
|
2022-03-29T17:21:17.000Z
|
test/option--lw.py
|
moroten/scons
|
20927b42ed4f0cb87f51287fa3b4b6cf915afcf8
|
[
"MIT"
] | 281
|
2017-12-01T23:48:38.000Z
|
2022-03-31T15:25:44.000Z
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import TestSCons
test = TestSCons.TestSCons()
test.write('SConstruct', "")
test.option_not_yet_implemented('--list-where', '.')
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| 33.880952
| 73
| 0.767393
|
d8bd6e71928f962120717ff75902dfd40a059ac1
| 23,807
|
py
|
Python
|
pysnmp/ELTEX-MES-BRIDGE-STP-MULTIPROCESS-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/ELTEX-MES-BRIDGE-STP-MULTIPROCESS-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/ELTEX-MES-BRIDGE-STP-MULTIPROCESS-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module ELTEX-MES-BRIDGE-STP-MULTIPROCESS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ELTEX-MES-BRIDGE-STP-MULTIPROCESS-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:46:28 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueRangeConstraint, SingleValueConstraint, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsIntersection", "ValueSizeConstraint")
BridgeId, dot1dBasePort, Timeout = mibBuilder.importSymbols("BRIDGE-MIB", "BridgeId", "dot1dBasePort", "Timeout")
eltMesBridgeExtMIBObjects, = mibBuilder.importSymbols("ELTEX-MES-BRIDGE-EXT-MIB", "eltMesBridgeExtMIBObjects")
InterfaceIndex, ifIndex = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex", "ifIndex")
PortList, VlanIdOrNone = mibBuilder.importSymbols("Q-BRIDGE-MIB", "PortList", "VlanIdOrNone")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Integer32, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, ObjectIdentity, Bits, Counter32, Counter64, MibIdentifier, NotificationType, Gauge32, iso, IpAddress, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "ObjectIdentity", "Bits", "Counter32", "Counter64", "MibIdentifier", "NotificationType", "Gauge32", "iso", "IpAddress", "Unsigned32")
MacAddress, TruthValue, TextualConvention, DisplayString, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "MacAddress", "TruthValue", "TextualConvention", "DisplayString", "RowStatus")
eltMesStpMultiProcessMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2))
if mibBuilder.loadTexts: eltMesStpMultiProcessMIB.setLastUpdated('1310250000Z')
if mibBuilder.loadTexts: eltMesStpMultiProcessMIB.setOrganization('Eltex Ltd.')
eltMesDot1dStpMultiProcess = MibIdentifier((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1))
eltMesDot1sMstpMultiProcess = MibIdentifier((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2))
eltdot1dStpMultiProcessTable = MibTable((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1), )
if mibBuilder.loadTexts: eltdot1dStpMultiProcessTable.setStatus('current')
eltdot1dStpMultiProcessEntry = MibTableRow((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1), ).setIndexNames((0, "ELTEX-MES-BRIDGE-STP-MULTIPROCESS-MIB", "eltdot1dStpMultiProcessId"))
if mibBuilder.loadTexts: eltdot1dStpMultiProcessEntry.setStatus('current')
eltdot1dStpMultiProcessId = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessId.setStatus('current')
eltdot1dStpMultiProcessPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPriority.setStatus('current')
eltdot1dStpMultiProcessBridgeMaxAge = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1, 3), Timeout().subtype(subtypeSpec=ValueRangeConstraint(600, 4000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessBridgeMaxAge.setStatus('current')
eltdot1dStpMultiProcessBridgeHelloTime = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1, 4), Timeout().subtype(subtypeSpec=ValueRangeConstraint(100, 1000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessBridgeHelloTime.setStatus('current')
eltdot1dStpMultiProcessBridgeForwardDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1, 5), Timeout().subtype(subtypeSpec=ValueRangeConstraint(400, 3000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessBridgeForwardDelay.setStatus('current')
eltdot1dStpMultiProcessVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3))).clone(namedValues=NamedValues(("stpCompatible", 0), ("rstp", 2), ("mstp", 3))).clone('rstp')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessVersion.setStatus('current')
eltdot1dStpMultiProcessTimeSinceTopologyChange = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1, 7), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessTimeSinceTopologyChange.setStatus('current')
eltdot1dStpMultiProcessTopChanges = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessTopChanges.setStatus('current')
eltdot1dStpMultiProcessDesignatedRoot = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1, 9), BridgeId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessDesignatedRoot.setStatus('current')
eltdot1dStpMultiProcessRootCost = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessRootCost.setStatus('current')
eltdot1dStpMultiProcessRootPort = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessRootPort.setStatus('current')
eltdot1dStpMultiProcessMaxAge = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1, 12), Timeout()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessMaxAge.setStatus('current')
eltdot1dStpMultiProcessHelloTime = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1, 13), Timeout()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessHelloTime.setStatus('current')
eltdot1dStpMultiProcessHoldTime = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessHoldTime.setStatus('current')
eltdot1dStpMultiProcessForwardDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1, 15), Timeout()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessForwardDelay.setStatus('current')
eltdot1dStpMultiProcessRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1, 16), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessRowStatus.setStatus('current')
eltdot1dStpMultiProcessLastTopologyChangePort = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 1, 1, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessLastTopologyChangePort.setStatus('current')
eltdot1dStpMultiProcessPortTable = MibTable((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 2), )
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortTable.setStatus('current')
eltdot1dStpMultiProcessPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 2, 1), ).setIndexNames((0, "ELTEX-MES-BRIDGE-STP-MULTIPROCESS-MIB", "eltdot1dStpMultiProcessPortPort"))
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortEntry.setStatus('current')
eltdot1dStpMultiProcessPortPort = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4096))).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortPort.setStatus('current')
eltdot1dStpMultiProcessPortProcessId = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 31))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortProcessId.setStatus('current')
eltdot1dStpMultiProcessPortSharedProcessId1 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 31))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortSharedProcessId1.setStatus('current')
eltdot1dStpMultiProcessPortSharedProcessId2 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 31))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortSharedProcessId2.setStatus('current')
eltdot1dStpMultiProcessPortSharedProcessId3 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 31))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortSharedProcessId3.setStatus('current')
eltdot1dStpMultiProcessPortSharedProcessId4 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 31))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortSharedProcessId4.setStatus('current')
eltdot1dStpMultiProcessPortSharedProcessId5 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 31))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortSharedProcessId5.setStatus('current')
eltdot1dStpMultiProcessPortSharedProcessId6 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 31))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortSharedProcessId6.setStatus('current')
eltdot1dStpMultiProcessPortSharedProcessId7 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 31))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortSharedProcessId7.setStatus('current')
eltdot1dStpMultiProcessPortSharedProcessId8 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 2, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 31))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortSharedProcessId8.setStatus('current')
eltdot1dStpMultiProcessPortSharedProcessId9 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 2, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 31))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortSharedProcessId9.setStatus('current')
eltdot1dStpMultiProcessPortSharedProcessId10 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 2, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 31))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortSharedProcessId10.setStatus('current')
eltdot1dStpMultiProcessPortListTable = MibTable((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 3), )
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortListTable.setStatus('current')
eltdot1dStpMultiProcessPortListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 3, 1), ).setIndexNames((0, "ELTEX-MES-BRIDGE-STP-MULTIPROCESS-MIB", "eltdot1dStpMultiProcessPortListProcessId"))
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortListEntry.setStatus('current')
eltdot1dStpMultiProcessPortListProcessId = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortListProcessId.setStatus('current')
eltdot1dStpMultiProcessPortList = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 1, 3, 1, 2), PortList()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1dStpMultiProcessPortList.setStatus('current')
eltdot1sMstpMultiProcessInstanceTable = MibTable((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 1), )
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessInstanceTable.setStatus('current')
eltdot1sMstpMultiProcessInstanceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 1, 1), ).setIndexNames((0, "ELTEX-MES-BRIDGE-STP-MULTIPROCESS-MIB", "eltdot1sMstpMultiProcessInstanceProcessId"), (0, "ELTEX-MES-BRIDGE-STP-MULTIPROCESS-MIB", "eltdot1sMstpMultiProcessInstanceId"))
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessInstanceEntry.setStatus('current')
eltdot1sMstpMultiProcessInstanceProcessId = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessInstanceProcessId.setStatus('current')
eltdot1sMstpMultiProcessInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessInstanceId.setStatus('current')
eltdot1sMstpMultiProcessInstanceEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 1, 1, 3), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessInstanceEnable.setStatus('current')
eltdot1sMstpMultiProcessInstanceTimeSinceTopologyChange = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 1, 1, 4), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessInstanceTimeSinceTopologyChange.setStatus('current')
eltdot1sMstpMultiProcessInstanceTopChanges = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessInstanceTopChanges.setStatus('current')
eltdot1sMstpMultiProcessInstanceDesignatedRoot = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 1, 1, 6), BridgeId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessInstanceDesignatedRoot.setStatus('current')
eltdot1sMstpMultiProcessInstanceRootCost = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessInstanceRootCost.setStatus('current')
eltdot1sMstpMultiProcessInstanceRootPort = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessInstanceRootPort.setStatus('current')
eltdot1sMstpMultiProcessInstanceMaxAge = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 1, 1, 9), Timeout()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessInstanceMaxAge.setStatus('current')
eltdot1sMstpMultiProcessInstanceHelloTime = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 1, 1, 10), Timeout()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessInstanceHelloTime.setStatus('current')
eltdot1sMstpMultiProcessInstanceHoldTime = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessInstanceHoldTime.setStatus('current')
eltdot1sMstpMultiProcessInstanceForwardDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 1, 1, 12), Timeout()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessInstanceForwardDelay.setStatus('current')
eltdot1sMstpMultiProcessInstancePriority = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 1, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 61440))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessInstancePriority.setStatus('current')
eltdot1sMstpMultiProcessInstanceRemainingHopes = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 1, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessInstanceRemainingHopes.setStatus('current')
eltdot1sMstpMultiProcessInstanceLastTopologyChangePort = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 1, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessInstanceLastTopologyChangePort.setStatus('current')
eltdot1sMstpMultiProcessTable = MibTable((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 2), )
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessTable.setStatus('current')
eltdot1sMstpMultiProcessEntry = MibTableRow((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 2, 1), ).setIndexNames((0, "ELTEX-MES-BRIDGE-STP-MULTIPROCESS-MIB", "eltdot1sMstpMultiProcessId"))
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessEntry.setStatus('current')
eltdot1sMstpMultiProcessId = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessId.setStatus('current')
eltdot1sMstpMultiProcessMaxHopes = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 40)).clone(20)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessMaxHopes.setStatus('current')
eltdot1sMstpMultiProcessDesignatedMaxHopes = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 40))).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessDesignatedMaxHopes.setStatus('current')
eltdot1sMstpMultiProcessRegionalRoot = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 2, 1, 4), BridgeId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessRegionalRoot.setStatus('current')
eltdot1sMstpMultiProcessRegionalRootCost = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 2, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessRegionalRootCost.setStatus('current')
eltdot1sMstpMultiProcessRemainingHops = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 23, 1, 401, 0, 2, 2, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eltdot1sMstpMultiProcessRemainingHops.setStatus('current')
mibBuilder.exportSymbols("ELTEX-MES-BRIDGE-STP-MULTIPROCESS-MIB", eltdot1dStpMultiProcessBridgeForwardDelay=eltdot1dStpMultiProcessBridgeForwardDelay, eltdot1dStpMultiProcessForwardDelay=eltdot1dStpMultiProcessForwardDelay, eltdot1sMstpMultiProcessRegionalRootCost=eltdot1sMstpMultiProcessRegionalRootCost, eltdot1dStpMultiProcessTimeSinceTopologyChange=eltdot1dStpMultiProcessTimeSinceTopologyChange, eltdot1dStpMultiProcessPortPort=eltdot1dStpMultiProcessPortPort, eltdot1sMstpMultiProcessInstancePriority=eltdot1sMstpMultiProcessInstancePriority, eltdot1dStpMultiProcessRootPort=eltdot1dStpMultiProcessRootPort, eltdot1dStpMultiProcessTable=eltdot1dStpMultiProcessTable, eltdot1sMstpMultiProcessMaxHopes=eltdot1sMstpMultiProcessMaxHopes, eltdot1dStpMultiProcessMaxAge=eltdot1dStpMultiProcessMaxAge, eltdot1dStpMultiProcessId=eltdot1dStpMultiProcessId, eltdot1sMstpMultiProcessInstanceHoldTime=eltdot1sMstpMultiProcessInstanceHoldTime, eltdot1sMstpMultiProcessRemainingHops=eltdot1sMstpMultiProcessRemainingHops, eltdot1sMstpMultiProcessInstanceId=eltdot1sMstpMultiProcessInstanceId, eltdot1sMstpMultiProcessTable=eltdot1sMstpMultiProcessTable, eltdot1sMstpMultiProcessInstanceMaxAge=eltdot1sMstpMultiProcessInstanceMaxAge, eltdot1sMstpMultiProcessInstanceRootPort=eltdot1sMstpMultiProcessInstanceRootPort, eltdot1dStpMultiProcessPortSharedProcessId5=eltdot1dStpMultiProcessPortSharedProcessId5, eltMesStpMultiProcessMIB=eltMesStpMultiProcessMIB, eltdot1dStpMultiProcessPortProcessId=eltdot1dStpMultiProcessPortProcessId, eltdot1dStpMultiProcessPortList=eltdot1dStpMultiProcessPortList, eltdot1dStpMultiProcessTopChanges=eltdot1dStpMultiProcessTopChanges, eltdot1sMstpMultiProcessInstanceForwardDelay=eltdot1sMstpMultiProcessInstanceForwardDelay, eltdot1dStpMultiProcessPortSharedProcessId2=eltdot1dStpMultiProcessPortSharedProcessId2, eltdot1sMstpMultiProcessId=eltdot1sMstpMultiProcessId, eltdot1sMstpMultiProcessInstanceProcessId=eltdot1sMstpMultiProcessInstanceProcessId, eltdot1dStpMultiProcessPortSharedProcessId4=eltdot1dStpMultiProcessPortSharedProcessId4, eltdot1dStpMultiProcessPortSharedProcessId6=eltdot1dStpMultiProcessPortSharedProcessId6, eltdot1dStpMultiProcessPortListTable=eltdot1dStpMultiProcessPortListTable, eltdot1dStpMultiProcessPortListEntry=eltdot1dStpMultiProcessPortListEntry, eltdot1sMstpMultiProcessInstanceRootCost=eltdot1sMstpMultiProcessInstanceRootCost, eltdot1dStpMultiProcessBridgeHelloTime=eltdot1dStpMultiProcessBridgeHelloTime, eltdot1dStpMultiProcessPortSharedProcessId10=eltdot1dStpMultiProcessPortSharedProcessId10, eltdot1sMstpMultiProcessInstanceLastTopologyChangePort=eltdot1sMstpMultiProcessInstanceLastTopologyChangePort, eltdot1sMstpMultiProcessRegionalRoot=eltdot1sMstpMultiProcessRegionalRoot, eltdot1dStpMultiProcessVersion=eltdot1dStpMultiProcessVersion, eltdot1sMstpMultiProcessInstanceDesignatedRoot=eltdot1sMstpMultiProcessInstanceDesignatedRoot, eltdot1dStpMultiProcessRootCost=eltdot1dStpMultiProcessRootCost, eltdot1sMstpMultiProcessInstanceEntry=eltdot1sMstpMultiProcessInstanceEntry, PYSNMP_MODULE_ID=eltMesStpMultiProcessMIB, eltdot1dStpMultiProcessEntry=eltdot1dStpMultiProcessEntry, eltdot1dStpMultiProcessBridgeMaxAge=eltdot1dStpMultiProcessBridgeMaxAge, eltdot1dStpMultiProcessHelloTime=eltdot1dStpMultiProcessHelloTime, eltdot1dStpMultiProcessDesignatedRoot=eltdot1dStpMultiProcessDesignatedRoot, eltdot1dStpMultiProcessPriority=eltdot1dStpMultiProcessPriority, eltdot1dStpMultiProcessPortListProcessId=eltdot1dStpMultiProcessPortListProcessId, eltMesDot1dStpMultiProcess=eltMesDot1dStpMultiProcess, eltMesDot1sMstpMultiProcess=eltMesDot1sMstpMultiProcess, eltdot1sMstpMultiProcessInstanceTable=eltdot1sMstpMultiProcessInstanceTable, eltdot1dStpMultiProcessPortSharedProcessId1=eltdot1dStpMultiProcessPortSharedProcessId1, eltdot1sMstpMultiProcessInstanceHelloTime=eltdot1sMstpMultiProcessInstanceHelloTime, eltdot1dStpMultiProcessRowStatus=eltdot1dStpMultiProcessRowStatus, eltdot1dStpMultiProcessLastTopologyChangePort=eltdot1dStpMultiProcessLastTopologyChangePort, eltdot1dStpMultiProcessPortSharedProcessId9=eltdot1dStpMultiProcessPortSharedProcessId9, eltdot1sMstpMultiProcessInstanceTimeSinceTopologyChange=eltdot1sMstpMultiProcessInstanceTimeSinceTopologyChange, eltdot1sMstpMultiProcessInstanceEnable=eltdot1sMstpMultiProcessInstanceEnable, eltdot1sMstpMultiProcessDesignatedMaxHopes=eltdot1sMstpMultiProcessDesignatedMaxHopes, eltdot1sMstpMultiProcessInstanceTopChanges=eltdot1sMstpMultiProcessInstanceTopChanges, eltdot1dStpMultiProcessPortSharedProcessId8=eltdot1dStpMultiProcessPortSharedProcessId8, eltdot1sMstpMultiProcessInstanceRemainingHopes=eltdot1sMstpMultiProcessInstanceRemainingHopes, eltdot1dStpMultiProcessPortTable=eltdot1dStpMultiProcessPortTable, eltdot1sMstpMultiProcessEntry=eltdot1sMstpMultiProcessEntry, eltdot1dStpMultiProcessPortEntry=eltdot1dStpMultiProcessPortEntry, eltdot1dStpMultiProcessPortSharedProcessId7=eltdot1dStpMultiProcessPortSharedProcessId7, eltdot1dStpMultiProcessHoldTime=eltdot1dStpMultiProcessHoldTime, eltdot1dStpMultiProcessPortSharedProcessId3=eltdot1dStpMultiProcessPortSharedProcessId3)
| 160.858108
| 5,111
| 0.804427
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.