id
int64 0
458k
| file_name
stringlengths 4
119
| file_path
stringlengths 14
227
| content
stringlengths 24
9.96M
| size
int64 24
9.96M
| language
stringclasses 1
value | extension
stringclasses 14
values | total_lines
int64 1
219k
| avg_line_length
float64 2.52
4.63M
| max_line_length
int64 5
9.91M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 7
101
| repo_stars
int64 100
139k
| repo_forks
int64 0
26.4k
| repo_open_issues
int64 0
2.27k
| repo_license
stringclasses 12
values | repo_extraction_date
stringclasses 433
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
10,600
|
olddict.py
|
zatosource_zato/code/zato-common/src/zato/common/py23_/past/types/olddict.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# ################################################################################################################################
# ################################################################################################################################
"""
Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
# ################################################################################################################################
# ################################################################################################################################
"""
A dict subclass for Python 3 that behaves like Python 2's dict
Example use:
>>> from zato.common.py23_.past.builtins import dict
>>> d1 = dict() # instead of {} for an empty dict
>>> d2 = dict(key1='value1', key2='value2')
The keys, values and items methods now return lists on Python 3.x and there are
methods for iterkeys, itervalues, iteritems, and viewkeys etc.
>>> for d in (d1, d2):
... assert isinstance(d.keys(), list)
... assert isinstance(d.values(), list)
... assert isinstance(d.items(), list)
"""
import sys
from zato.common.py23_.past.utils import with_metaclass
_builtin_dict = dict
ver = sys.version_info[:2]
class BaseOldDict(type):
def __instancecheck__(cls, instance):
return isinstance(instance, _builtin_dict)
class olddict(with_metaclass(BaseOldDict, _builtin_dict)):
"""
A backport of the Python 3 dict object to Py2
"""
iterkeys = _builtin_dict.keys
viewkeys = _builtin_dict.keys
def keys(self):
return list(super(olddict, self).keys())
itervalues = _builtin_dict.values
viewvalues = _builtin_dict.values
def values(self):
return list(super(olddict, self).values())
iteritems = _builtin_dict.items
viewitems = _builtin_dict.items
def items(self):
return list(super(olddict, self).items())
def has_key(self, k):
"""
D.has_key(k) -> True if D has a key k, else False
"""
return k in self
# def __new__(cls, *args, **kwargs):
# """
# dict() -> new empty dictionary
# dict(mapping) -> new dictionary initialized from a mapping object's
# (key, value) pairs
# dict(iterable) -> new dictionary initialized as if via:
# d = {}
# for k, v in iterable:
# d[k] = v
# dict(**kwargs) -> new dictionary initialized with the name=value pairs
# in the keyword argument list. For example: dict(one=1, two=2)
# """
#
# if len(args) == 0:
# return super(olddict, cls).__new__(cls)
# # Was: elif isinstance(args[0], newbytes):
# # We use type() instead of the above because we're redefining
# # this to be True for all unicode string subclasses. Warning:
# # This may render newstr un-subclassable.
# elif type(args[0]) == olddict:
# return args[0]
# # elif isinstance(args[0], _builtin_dict):
# # value = args[0]
# else:
# value = args[0]
# return super(olddict, cls).__new__(cls, value)
def __native__(self):
"""
Hook for the past.utils.native() function
"""
return super(oldbytes, self)
__all__ = ['olddict']
| 4,531
|
Python
|
.py
| 100
| 41.38
| 130
| 0.58754
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,601
|
basestring.py
|
zatosource_zato/code/zato-common/src/zato/common/py23_/past/types/basestring.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# ################################################################################################################################
# ################################################################################################################################
"""
Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
# ################################################################################################################################
# ################################################################################################################################
"""
An implementation of the basestring type for Python 3
Example use:
>>> s = b'abc'
>>> assert isinstance(s, basestring)
>>> from zato.common.py23_.past.types import str as oldstr
>>> s2 = oldstr(b'abc')
>>> assert isinstance(s2, basestring)
"""
import sys
from zato.common.py23_.past.utils import with_metaclass, PY2
if PY2:
str = unicode
ver = sys.version_info[:2]
class BaseBaseString(type):
def __instancecheck__(cls, instance):
return isinstance(instance, (bytes, str))
def __subclasshook__(cls, thing):
# TODO: What should go here?
raise NotImplemented
class basestring(with_metaclass(BaseBaseString)):
"""
A minimal backport of the Python 2 basestring type to Py3
"""
__all__ = ['basestring']
| 2,538
|
Python
|
.py
| 52
| 46.423077
| 130
| 0.60195
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,602
|
__init__.py
|
zatosource_zato/code/zato-common/src/zato/common/py23_/past/types/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# ################################################################################################################################
# ################################################################################################################################
"""
Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
# ################################################################################################################################
# ################################################################################################################################
"""
Forward-ports of types from Python 2 for use with Python 3:
- ``basestring``: equivalent to ``(str, bytes)`` in ``isinstance`` checks
- ``dict``: with list-producing .keys() etc. methods
- ``str``: bytes-like, but iterating over them doesn't product integers
- ``long``: alias of Py3 int with ``L`` suffix in the ``repr``
- ``unicode``: alias of Py3 str with ``u`` prefix in the ``repr``
"""
from zato.common.py23_.past import utils
if utils.PY2:
import __builtin__
basestring = __builtin__.basestring
dict = __builtin__.dict
str = __builtin__.str
long = __builtin__.long
unicode = __builtin__.unicode
__all__ = []
else:
from .basestring import basestring
from .olddict import olddict
from .oldstr import oldstr
long = int
unicode = str
# from .unicode import unicode
__all__ = ['basestring', 'olddict', 'oldstr', 'long', 'unicode']
| 2,671
|
Python
|
.py
| 52
| 49.019231
| 130
| 0.591939
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,603
|
oldstr.py
|
zatosource_zato/code/zato-common/src/zato/common/py23_/past/types/oldstr.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# ################################################################################################################################
# ################################################################################################################################
"""
Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
# ################################################################################################################################
# ################################################################################################################################
"""
Pure-Python implementation of a Python 2-like str object for Python 3.
"""
from numbers import Integral
from zato.common.py23_.past.utils import PY2, with_metaclass
if PY2:
from collections import Iterable
else:
from collections.abc import Iterable
_builtin_bytes = bytes
class BaseOldStr(type):
def __instancecheck__(cls, instance):
return isinstance(instance, _builtin_bytes)
def unescape(s):
"""
Interprets strings with escape sequences
Example:
>>> s = unescape(r'abc\\def') # i.e. 'abc\\\\def'
>>> print(s)
'abc\def'
>>> s2 = unescape('abc\\ndef')
>>> len(s2)
8
>>> print(s2)
abc
def
"""
return s.encode().decode('unicode_escape')
class oldstr(with_metaclass(BaseOldStr, _builtin_bytes)):
"""
A forward port of the Python 2 8-bit string object to Py3
"""
# Python 2 strings have no __iter__ method:
@property
def __iter__(self):
raise AttributeError
def __dir__(self):
return [thing for thing in dir(_builtin_bytes) if thing != '__iter__']
# def __new__(cls, *args, **kwargs):
# """
# From the Py3 bytes docstring:
# bytes(iterable_of_ints) -> bytes
# bytes(string, encoding[, errors]) -> bytes
# bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer
# bytes(int) -> bytes object of size given by the parameter initialized with null bytes
# bytes() -> empty bytes object
#
# Construct an immutable array of bytes from:
# - an iterable yielding integers in range(256)
# - a text string encoded using the specified encoding
# - any object implementing the buffer API.
# - an integer
# """
#
# if len(args) == 0:
# return super(newbytes, cls).__new__(cls)
# # Was: elif isinstance(args[0], newbytes):
# # We use type() instead of the above because we're redefining
# # this to be True for all unicode string subclasses. Warning:
# # This may render newstr un-subclassable.
# elif type(args[0]) == newbytes:
# return args[0]
# elif isinstance(args[0], _builtin_bytes):
# value = args[0]
# elif isinstance(args[0], unicode):
# if 'encoding' not in kwargs:
# raise TypeError('unicode string argument without an encoding')
# ###
# # Was: value = args[0].encode(**kwargs)
# # Python 2.6 string encode() method doesn't take kwargs:
# # Use this instead:
# newargs = [kwargs['encoding']]
# if 'errors' in kwargs:
# newargs.append(kwargs['errors'])
# value = args[0].encode(*newargs)
# ###
# elif isinstance(args[0], Iterable):
# if len(args[0]) == 0:
# # What is this?
# raise ValueError('unknown argument type')
# elif len(args[0]) > 0 and isinstance(args[0][0], Integral):
# # It's a list of integers
# value = b''.join([chr(x) for x in args[0]])
# else:
# raise ValueError('item cannot be interpreted as an integer')
# elif isinstance(args[0], Integral):
# if args[0] < 0:
# raise ValueError('negative count')
# value = b'\x00' * args[0]
# else:
# value = args[0]
# return super(newbytes, cls).__new__(cls, value)
def __repr__(self):
s = super(oldstr, self).__repr__() # e.g. b'abc' on Py3, b'abc' on Py3
return s[1:]
def __str__(self):
s = super(oldstr, self).__str__() # e.g. "b'abc'" or "b'abc\\ndef'
# TODO: fix this:
assert s[:2] == "b'" and s[-1] == "'"
return unescape(s[2:-1]) # e.g. 'abc' or 'abc\ndef'
def __getitem__(self, y):
if isinstance(y, Integral):
return super(oldstr, self).__getitem__(slice(y, y+1))
else:
return super(oldstr, self).__getitem__(y)
def __getslice__(self, *args):
return self.__getitem__(slice(*args))
def __contains__(self, key):
if isinstance(key, int):
return False
def __native__(self):
return bytes(self)
__all__ = ['oldstr']
| 6,124
|
Python
|
.py
| 140
| 39.057143
| 130
| 0.55578
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,604
|
noniterators.py
|
zatosource_zato/code/zato-common/src/zato/common/py23_/past/builtins/noniterators.py
|
# -*- coding: utf-8 -*-
from __future__ import division, absolute_import, print_function
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# ################################################################################################################################
# ################################################################################################################################
"""
Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
# ################################################################################################################################
# ################################################################################################################################
"""
This module is designed to be used as follows::
from zato.common.py23_.past.builtins.noniterators import filter, map, range, reduce, zip
And then, for example::
assert isinstance(range(5), list)
The list-producing functions this brings in are::
- ``filter``
- ``map``
- ``range``
- ``reduce``
- ``zip``
"""
from itertools import chain, starmap
import itertools # since zip_longest doesn't exist on Py2
from zato.common.py23_.past.types import basestring
from zato.common.py23_.past.utils import PY3
def flatmap(f, items):
return chain.from_iterable(map(f, items))
if PY3:
import builtins
# list-producing versions of the major Python iterating functions
def oldfilter(*args):
"""
filter(function or None, sequence) -> list, tuple, or string
Return those items of sequence for which function(item) is true.
If function is None, return the items that are true. If sequence
is a tuple or string, return the same type, else return a list.
"""
mytype = type(args[1])
if isinstance(args[1], basestring):
return mytype().join(builtins.filter(*args))
elif isinstance(args[1], (tuple, list)):
return mytype(builtins.filter(*args))
else:
# Fall back to list. Is this the right thing to do?
return list(builtins.filter(*args))
# This is surprisingly difficult to get right. For example, the
# solutions here fail with the test cases in the docstring below:
# http://stackoverflow.com/questions/8072755/
def oldmap(func, *iterables):
"""
map(function, sequence[, sequence, ...]) -> list
Return a list of the results of applying the function to the
items of the argument sequence(s). If more than one sequence is
given, the function is called with an argument list consisting of
the corresponding item of each sequence, substituting None for
missing values when not all sequences have the same length. If
the function is None, return a list of the items of the sequence
(or a list of tuples if more than one sequence).
Test cases:
>>> oldmap(None, 'hello world')
['h', 'e', 'l', 'l', 'o', ' ', 'w', 'o', 'r', 'l', 'd']
>>> oldmap(None, range(4))
[0, 1, 2, 3]
More test cases are in test_past.test_builtins.
"""
zipped = itertools.zip_longest(*iterables)
l = list(zipped)
if len(l) == 0:
return []
if func is None:
result = l
else:
result = list(starmap(func, l))
# Inspect to see whether it's a simple sequence of tuples
try:
if max([len(item) for item in result]) == 1:
return list(chain.from_iterable(result))
# return list(flatmap(func, result))
except TypeError as e:
# Simple objects like ints have no len()
pass
return result
############################
### For reference, the source code for Py2.7 map function:
# static PyObject *
# builtin_map(PyObject *self, PyObject *args)
# {
# typedef struct {
# PyObject *it; /* the iterator object */
# int saw_StopIteration; /* bool: did the iterator end? */
# } sequence;
#
# PyObject *func, *result;
# sequence *seqs = NULL, *sqp;
# Py_ssize_t n, len;
# register int i, j;
#
# n = PyTuple_Size(args);
# if (n < 2) {
# PyErr_SetString(PyExc_TypeError,
# "map() requires at least two args");
# return NULL;
# }
#
# func = PyTuple_GetItem(args, 0);
# n--;
#
# if (func == Py_None) {
# if (PyErr_WarnPy3k("map(None, ...) not supported in 3.x; "
# "use list(...)", 1) < 0)
# return NULL;
# if (n == 1) {
# /* map(None, S) is the same as list(S). */
# return PySequence_List(PyTuple_GetItem(args, 1));
# }
# }
#
# /* Get space for sequence descriptors. Must NULL out the iterator
# * pointers so that jumping to Fail_2 later doesn't see trash.
# */
# if ((seqs = PyMem_NEW(sequence, n)) == NULL) {
# PyErr_NoMemory();
# return NULL;
# }
# for (i = 0; i < n; ++i) {
# seqs[i].it = (PyObject*)NULL;
# seqs[i].saw_StopIteration = 0;
# }
#
# /* Do a first pass to obtain iterators for the arguments, and set len
# * to the largest of their lengths.
# */
# len = 0;
# for (i = 0, sqp = seqs; i < n; ++i, ++sqp) {
# PyObject *curseq;
# Py_ssize_t curlen;
#
# /* Get iterator. */
# curseq = PyTuple_GetItem(args, i+1);
# sqp->it = PyObject_GetIter(curseq);
# if (sqp->it == NULL) {
# static char errmsg[] =
# "argument %d to map() must support iteration";
# char errbuf[sizeof(errmsg) + 25];
# PyOS_snprintf(errbuf, sizeof(errbuf), errmsg, i+2);
# PyErr_SetString(PyExc_TypeError, errbuf);
# goto Fail_2;
# }
#
# /* Update len. */
# curlen = _PyObject_LengthHint(curseq, 8);
# if (curlen > len)
# len = curlen;
# }
#
# /* Get space for the result list. */
# if ((result = (PyObject *) PyList_New(len)) == NULL)
# goto Fail_2;
#
# /* Iterate over the sequences until all have stopped. */
# for (i = 0; ; ++i) {
# PyObject *alist, *item=NULL, *value;
# int numactive = 0;
#
# if (func == Py_None && n == 1)
# alist = NULL;
# else if ((alist = PyTuple_New(n)) == NULL)
# goto Fail_1;
#
# for (j = 0, sqp = seqs; j < n; ++j, ++sqp) {
# if (sqp->saw_StopIteration) {
# Py_INCREF(Py_None);
# item = Py_None;
# }
# else {
# item = PyIter_Next(sqp->it);
# if (item)
# ++numactive;
# else {
# if (PyErr_Occurred()) {
# Py_XDECREF(alist);
# goto Fail_1;
# }
# Py_INCREF(Py_None);
# item = Py_None;
# sqp->saw_StopIteration = 1;
# }
# }
# if (alist)
# PyTuple_SET_ITEM(alist, j, item);
# else
# break;
# }
#
# if (!alist)
# alist = item;
#
# if (numactive == 0) {
# Py_DECREF(alist);
# break;
# }
#
# if (func == Py_None)
# value = alist;
# else {
# value = PyEval_CallObject(func, alist);
# Py_DECREF(alist);
# if (value == NULL)
# goto Fail_1;
# }
# if (i >= len) {
# int status = PyList_Append(result, value);
# Py_DECREF(value);
# if (status < 0)
# goto Fail_1;
# }
# else if (PyList_SetItem(result, i, value) < 0)
# goto Fail_1;
# }
#
# if (i < len && PyList_SetSlice(result, i, len, NULL) < 0)
# goto Fail_1;
#
# goto Succeed;
#
# Fail_1:
# Py_DECREF(result);
# Fail_2:
# result = NULL;
# Succeed:
# assert(seqs);
# for (i = 0; i < n; ++i)
# Py_XDECREF(seqs[i].it);
# PyMem_DEL(seqs);
# return result;
# }
def oldrange(*args, **kwargs):
return list(builtins.range(*args, **kwargs))
def oldzip(*args, **kwargs):
return list(builtins.zip(*args, **kwargs))
filter = oldfilter
map = oldmap
range = oldrange
from functools import reduce
zip = oldzip
__all__ = ['filter', 'map', 'range', 'reduce', 'zip']
else:
import __builtin__
# Python 2-builtin ranges produce lists
filter = __builtin__.filter
map = __builtin__.map
range = __builtin__.range
reduce = __builtin__.reduce
zip = __builtin__.zip
__all__ = []
| 11,198
|
Python
|
.py
| 274
| 33.244526
| 130
| 0.47176
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,605
|
__init__.py
|
zatosource_zato/code/zato-common/src/zato/common/py23_/past/builtins/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# ################################################################################################################################
# ################################################################################################################################
"""
Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
# ################################################################################################################################
# ################################################################################################################################
"""
A resurrection of some old functions from Python 2 for use in Python 3. These
should be used sparingly, to help with porting efforts, since code using them
is no longer standard Python 3 code.
This module provides the following:
1. Implementations of these builtin functions which have no equivalent on Py3:
- apply
- chr
- cmp
- execfile
2. Aliases:
- intern <- sys.intern
- raw_input <- input
- reduce <- functools.reduce
- reload <- imp.reload
- unichr <- chr
- unicode <- str
- xrange <- range
3. List-producing versions of the corresponding Python 3 iterator-producing functions:
- filter
- map
- range
- zip
4. Forward-ported Py2 types:
- basestring
- dict
- str
- long
- unicode
"""
from zato.common.ext.future.utils import PY3
from zato.common.py23_.past.builtins.noniterators import (filter, map, range, reduce, zip)
# from zato.common.py23_.past.builtins.misc import (ascii, hex, input, oct, open)
if PY3:
from zato.common.py23_.past.types import (basestring,
olddict as dict,
oldstr as str,
long,
unicode)
else:
from __builtin__ import (basestring, dict, str, long, unicode)
from zato.common.py23_.past.builtins.misc import (apply, chr, cmp, execfile, intern, oct,
raw_input, unichr, unicode, xrange)
from zato.common.py23_.past import utils
if utils.PY3:
# We only import names that shadow the builtins on Py3. No other namespace
# pollution on Py3.
# Only shadow builtins on Py3; no new names
__all__ = ['filter', 'map', 'range', 'reduce', 'zip',
'basestring', 'dict', 'str', 'long', 'unicode',
'apply', 'chr', 'cmp', 'execfile', 'intern', 'raw_input',
'unichr', 'xrange'
]
else:
# No namespace pollution on Py2
__all__ = []
| 3,667
|
Python
|
.py
| 83
| 40.036145
| 130
| 0.606802
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,606
|
misc.py
|
zatosource_zato/code/zato-common/src/zato/common/py23_/past/builtins/misc.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# ################################################################################################################################
# ################################################################################################################################
"""
Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
# ################################################################################################################################
# ################################################################################################################################
import inspect
from zato.common.ext.future.utils import PY2, PY3, exec_
if PY2:
from collections import Mapping
else:
from collections.abc import Mapping
if PY3:
import builtins
from collections.abc import Mapping
def apply(f, *args, **kw):
return f(*args, **kw)
from zato.common.py23_.past.builtins import str as oldstr
def chr(i):
"""
Return a byte-string of one character with ordinal i; 0 <= i <= 256
"""
return oldstr(bytes((i,)))
def cmp(x, y):
"""
cmp(x, y) -> integer
Return negative if x<y, zero if x==y, positive if x>y.
"""
return (x > y) - (x < y)
from sys import intern
def oct(number):
"""oct(number) -> string
Return the octal representation of an integer
"""
return '0' + builtins.oct(number)[2:]
raw_input = input
unicode = str
unichr = chr
xrange = range
else:
import __builtin__
from collections import Mapping
apply = __builtin__.apply
chr = __builtin__.chr
cmp = __builtin__.cmp
execfile = __builtin__.execfile
intern = __builtin__.intern
oct = __builtin__.oct
raw_input = __builtin__.raw_input
unicode = __builtin__.unicode
unichr = __builtin__.unichr
xrange = __builtin__.xrange
if PY3:
def execfile(filename, myglobals=None, mylocals=None):
"""
Read and execute a Python script from a file in the given namespaces.
The globals and locals are dictionaries, defaulting to the current
globals and locals. If only globals is given, locals defaults to it.
"""
if myglobals is None:
# There seems to be no alternative to frame hacking here.
caller_frame = inspect.stack()[1]
myglobals = caller_frame[0].f_globals
mylocals = caller_frame[0].f_locals
elif mylocals is None:
# Only if myglobals is given do we set mylocals to it.
mylocals = myglobals
if not isinstance(myglobals, Mapping):
raise TypeError('globals must be a mapping')
if not isinstance(mylocals, Mapping):
raise TypeError('locals must be a mapping')
with open(filename, "rb") as fin:
source = fin.read()
code = compile(source, filename, "exec")
exec_(code, myglobals, mylocals)
if PY3:
__all__ = ['apply', 'chr', 'cmp', 'execfile', 'intern', 'raw_input',
'reload', 'unichr', 'unicode', 'xrange']
else:
__all__ = []
| 4,376
|
Python
|
.py
| 102
| 37.245098
| 130
| 0.588651
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,607
|
api.py
|
zatosource_zato/code/zato-common/src/zato/common/marshal_/api.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from dataclasses import asdict, _FIELDS, make_dataclass, MISSING, _PARAMS # type: ignore
from http.client import BAD_REQUEST
from inspect import isclass
from typing import Any
try:
from typing import _GenericAlias as _ListBaseClass # type: ignore
except ImportError:
class _Sentinel:
pass
_ListBaseClass = _Sentinel
# BSON (MongoDB)
from bson import ObjectId
# dateutil
from dateutil.parser import parse as dt_parse
# orjson
from orjson import dumps
# SQLAlchemy
from sqlalchemy.sql.schema import Table
# typing-utils
from typing_utils import issubtype
# Zato
from zato.common.api import ZatoNotGiven
from zato.common.marshal_.model import BaseModel
from zato.common.typing_ import cast_, date_, datetime_, datetimez, extract_from_union, isotimestamp, is_union, type_
# ################################################################################################################################
# ################################################################################################################################
if 0:
from dataclasses import Field
from zato.common.typing_ import any_, anydict, anylist, boolnone, dictnone, intnone, optional, tuplist
from zato.server.base.parallel import ParallelServer
from zato.server.service import Service
Field = Field
Service = Service
# ################################################################################################################################
# ################################################################################################################################
_None_Type = type(None)
# ################################################################################################################################
# ################################################################################################################################
def is_list(field_type:'Field', is_class:'bool') -> 'bool':
# Using str is the only reliable method
if 'typing.Union' in str(field_type):
type_to_check = field_type.__args__[0] # type: ignore
else:
type_to_check = field_type
# This will exist if input is, for instance abc = type_[Service]
# Again, this is the only reliable way to check it.
is_type = 'typing.Type' in str(type_to_check)
# If it is a type object, we know it cannot be a list so we can return here.
if is_type:
return False
is_list_base_class_instance = isinstance(type_to_check, _ListBaseClass)
is_list_sub_type = issubtype(type_to_check, list) # type: ignore
if is_list_base_class_instance:
result = True
elif (is_class and is_list_sub_type):
result = True
else:
result = False
return result
# ################################################################################################################################
# ################################################################################################################################
def extract_model_class(field_type:'Field') -> 'Model | None':
# The attribute is defined by typing.List but not by list elements,
# hence the getattr call ..
type_args = getattr(field_type, '__args__', None)
# .. if there are any arguments found ..
if type_args:
# .. the first one will be our model class.
return type_args[0]
# ################################################################################################################################
# ################################################################################################################################
class Model(BaseModel):
__name__: 'str'
after_created = None
def __getitem__(self, name, default=None):
if not isinstance(name, str):
name = str(name)
return getattr(self, name, default)
def __contains__(self, name):
return hasattr(self, name)
def get(self, name):
return self.__getitem__(name)
@classmethod
def zato_get_fields(class_:'any_') -> 'anydict':
fields = getattr(class_, _FIELDS) # type: anydict
return fields
@classmethod
def _zato_from_dict(class_, data, extra=None):
api = MarshalAPI()
return api.from_dict(cast_('Service', None), data, class_, extra=extra)
from_dict = _zato_from_dict
def to_dict(self):
return asdict(self)
def _json_default_serializer(self, value):
# Serialize BSON / MongoDB objects
if isinstance(value, ObjectId):
return str(value)
elif isinstance(value, bytes):
return value.decode('utf8')
# We do not know how to serialize it
return value
def to_json(self, default=None, impl_extra=0):
return dumps(self, default=default or self._json_default_serializer, option=impl_extra)
def clone(self) -> 'any_':
data = self.to_dict()
out = self.__class__._zato_from_dict(None, data)
return out
@staticmethod
def build_model_from_flat_input(
server, # type: ParallelServer
sio_server_config, # type: ignore
_CySimpleIO, # type: ignore
name, # type: str
input, # type: str | tuplist
) -> 'type_[BaseModel]':
# Local imports
from zato.simpleio import is_sio_bool, is_sio_int
# Local aliases
model_fields = []
# Make sure this is a list-like container ..
if isinstance(input, str):
input = [input]
# .. build an actual SIO handler ..
_cy_simple_io = _CySimpleIO(server, sio_server_config, input) # type: ignore
# .. now, go through everything we have on input ..
for item in input:
# .. find out if this is a required element or not ..
is_optional = item.startswith('-')
is_required = not is_optional
# .. turn each element input into a Cython-based one ..
sio_elem = _cy_simple_io.convert_to_elem_instance(item, is_required) # type: ignore
# .. check if it is not a string ..
is_int:'bool' = is_sio_int(sio_elem)
is_bool:'bool' = is_sio_bool(sio_elem)
# .. turn the type into a model-compatible name ..
if is_int:
_model_type = int
elif is_bool:
_model_type = bool
else:
_model_type = str
# .. append a model-compatible definition of this field for later use ..
model_fields.append((sio_elem.name, _model_type))
model = make_dataclass(name, model_fields, bases=(Model,))
return model # type: ignore
# ################################################################################################################################
# ################################################################################################################################
class ModelCtx:
service: 'Service'
data: 'anydict | Model'
DataClass: 'any_'
# ################################################################################################################################
# ################################################################################################################################
class ModelValidationError(Exception):
""" Base class for model validation errors.
"""
def __init__(self, elem_path:'str'):
self.elem_path = elem_path
self.reason = self.msg = self.get_reason()
self.status = BAD_REQUEST
self.needs_msg = True # Added for compatibility with BadRequest as used in channel.py:dispatch
# ################################################################################################################################
def get_reason(self):
raise NotImplementedError()
# ################################################################################################################################
# ################################################################################################################################
class ElementMissing(ModelValidationError):
def __repr__(self):
return '<{} at {} -> {}>'.format(self.__class__.__name__, hex(id(self)), self.reason)
__str__ = __repr__
def get_reason(self):
return 'Element missing: {}'.format(self.elem_path)
class ElementIsNotAList(ElementMissing):
def get_reason(self):
return 'Element is not a list: {}'.format(self.elem_path)
# ################################################################################################################################
# ################################################################################################################################
class DictCtx:
def __init__(
self,
service: 'Service',
current_dict: 'anydict | Model',
DataClass: 'any_',
extra: 'dictnone',
list_idx: 'intnone',
parent: 'optional[FieldCtx]' = None
) -> 'None':
# We get these on input ..
self.service = service
self.current_dict = current_dict
self.extra = extra
self.DataClass = DataClass
self.list_idx = list_idx
self.parent = parent
# .. while these we need to build ourselves in self.init.
self.has_init = None # type: boolnone
# These are the Field object that we expect this dict will contain,
# i.e. it will be possible to map ourselves to these Field objects.
self.fields = None # type: dictnone
# This will be populated with parameters to the dataclass's __init__ method, assuming that the class has one.
self.init_attrs = {}
# This will be populated with parameters to be set via setattr, in case the class does not have __init__.
self.setattr_attrs = {}
# We can check it once upfront and make it point to either init_attrs or setattr_attrs
self.attrs_container = cast_('dict', None) # type: dictnone
# ################################################################################################################################
def init(self):
# Whether the dataclass defines the __init__method
dataclass_params = getattr(self.DataClass, _PARAMS, None)
self.has_init = dataclass_params.init if dataclass_params else False
self.attrs_container = self.init_attrs if self.has_init else self.setattr_attrs
self.fields = getattr(self.DataClass, _FIELDS) # type: dictnone
# ################################################################################################################################
# ################################################################################################################################
class FieldCtx:
def __init__(self, dict_ctx, field, parent):
# type: (DictCtx, Field, optional[FieldCtx]) -> None
# We get these on input ..
self.dict_ctx = dict_ctx
self.field = field
self.parent = parent
self.name = self.field.name # type: str
# This will be the same as self.field.type unless self.field.type is a union (e.g. optional[str]).
# In this case, self.field_type will be str whereas self.field.type will be the original type.
self.field_type = None # type: object
# .. by default, assume we have no type information (we do not know what model class it is)
self.model_class = None # type: object
# .. while these we need to build ourselves in self.init ..
self.value = None # type: any_
self.is_class = None # type: boolnone
self.is_list = None # type: boolnone
self.is_required = None # type: boolnone
# This indicates if ourselves, we are a Model instance
self.is_model = None # type: boolnone
# This indicates whether we are a list that contains a Model instance.
# The value is based on whether self.model_class exists or not
# and whether self.is_model points to a Model rather than, for instance, the str class,
# as the latter is possible in strlist definitions.
self.contains_model = False
# We set this flag to True only if there is some extra data that we have
# and if we are a top-level element, as indicated by the lack of parent.
self.has_extra = self.dict_ctx.extra and (not self.dict_ctx.parent)
# ################################################################################################################################
def init(self):
# Assume that we do not have any value
value = ZatoNotGiven
# If we have extra data, that will take priority over our regular dict, which is why we check it first here.
if self.has_extra:
if self.dict_ctx.extra:
value = self.dict_ctx.extra.get(self.name, ZatoNotGiven)
# If we do not have a value here, it means that we have no extra,
# or that it did not contain the expected value so we look it up in the current dictionary.
if value == ZatoNotGiven:
if isinstance(self.dict_ctx.current_dict, dict):
value = self.dict_ctx.current_dict.get(self.name, ZatoNotGiven)
elif isinstance(self.dict_ctx.current_dict, Model): # type: ignore
value = getattr(self.dict_ctx.current_dict, self.name, ZatoNotGiven)
# We do not handle SQLAlchemy Table objects
is_table = isinstance(value, Table)
# If this field has a value, we can try to parse it into a specific type ..
if (not is_table) and value and (value != ZatoNotGiven):
try:
# .. as an integer ..
if self.field_type is int:
if not isinstance(value, int):
value = int(value)
if self.field_type is date_:
if not isinstance(value, date_):
value = dt_parse(value).date() # type: ignore
# .. as a datetime object ..
elif self.field_type in (datetime_, datetimez):
if not isinstance(value, (date_, datetime_, datetimez)):
_is_datetimez = self.field_type is datetimez
value = dt_parse(value) # type: ignore
if _is_datetimez:
value = datetimez(
year=value.year,
month=value.month,
day=value.day,
hour=value.hour,
minute=value.minute,
second=value.second,
microsecond=value.microsecond,
tzinfo=value.tzinfo,
fold=value.fold,
)
# .. as a datetime object formatted as string ..
elif self.field_type is isotimestamp:
if isinstance(value, str):
value = dt_parse(value) # type: ignore
value = value.isoformat()
except Exception as e:
msg = f'Value `{repr(value)}` of field {self.name} could not be parsed -> {e} -> {self.dict_ctx.current_dict}'
raise Exception(msg)
# At this point, we know there will be something to assign although it still may be ZatoNotGiven.
self.value = value
self.is_class = isclass(self.field.type)
self.is_model = self.is_class and issubclass(self.field.type, Model)
self.is_list = is_list(self.field.type, self.is_class) # type: ignore
#
# This is a list and we need to check if its definition
# contains information about the actual type of elements inside.
#
# If it does, in runtime, we will be extracting that particular type.
# Otherwise, we will just pass this list on as it is.
#
if self.is_list:
self.model_class = extract_model_class(self.field.type) # type: ignore
self.contains_model = bool(self.model_class and hasattr(self.model_class, _FIELDS))
# ################################################################################################################################
def get_name(self):
if self.dict_ctx.list_idx is not None:
return '{}[{}]'.format(self.name, self.dict_ctx.list_idx)
else:
return self.name
# ################################################################################################################################
# ################################################################################################################################
class MarshalAPI:
def __init__(self):
self._field_cache = {}
# ################################################################################################################################
def get_validation_error(
self,
field_ctx, # type: FieldCtx
error_class=ElementMissing # type: any_
) -> 'ModelValidationError':
# This will always exist
elem_path = [field_ctx.name]
# Keep checking parent fields as long as they exist
while field_ctx.parent:
elem_path.append(field_ctx.parent.get_name())
field_ctx = field_ctx.parent
# We need to reverse it now to present a top-down view
elem_path = reversed(elem_path)
# Now, join it with a elem_path separator
elem_path = '/' + '/'.join(elem_path)
return error_class(elem_path)
# ################################################################################################################################
def _self_require_dict_or_model(self, field_ctx:'FieldCtx') -> 'None':
if not isinstance(field_ctx.value, (dict, BaseModel)):
raise self.get_validation_error(field_ctx)
# ################################################################################################################################
def _visit_list(self, field_ctx:'FieldCtx') -> 'anylist':
# Local aliases
service = field_ctx.dict_ctx.service
model_class = field_ctx.model_class
# Respone to produce
out = []
# Visit each element in the list ..
for idx, elem in enumerate(field_ctx.value):
if field_ctx.is_list:
field_ctx.dict_ctx.list_idx = idx
# .. convert it to a model instance ..
instance = self.from_dict(service, elem, model_class, list_idx=idx, parent=field_ctx)
# .. and append it for our caller ..
out.append(instance)
# .. finally, return the response.
return out # type: ignore
# ################################################################################################################################
def from_field_ctx(self, field_ctx:'FieldCtx') -> 'any_':
return self.from_dict(field_ctx.dict_ctx.service, field_ctx.value, field_ctx.field.type,
extra=None, list_idx=field_ctx.dict_ctx.list_idx, parent=field_ctx)
# ################################################################################################################################
def _ensure_value_is_a_list(self, field_ctx:'FieldCtx', value:'any_') -> 'None':
if not isinstance(value, list):
raise self.get_validation_error(field_ctx, error_class=ElementIsNotAList)
# ################################################################################################################################
def from_dict(
self,
service: 'Service',
current_dict: 'anydict | BaseModel',
DataClass: 'any_',
extra: 'dictnone' = None,
list_idx: 'intnone' = None,
parent: 'optional[FieldCtx]' = None
) -> 'any_':
dict_ctx = DictCtx(service, current_dict, DataClass, extra, list_idx, parent)
dict_ctx.init()
# All fields that we will visit
field_items = sorted(dict_ctx.fields.items()) # type: any_
for _ignored_name, _field in field_items:
# Assume we are required ..
is_required = True
# Use this by default ..
field_type = _field.type
# .. unless it is a union with None = this field is really optional[type_]
if is_union(_field.type):
result = extract_from_union(_field.type)
_, field_type, union_with = result
# .. check if this was an optional field.
is_required = not (union_with is _None_Type)
# Represents a current field in the model in the context of the input dict ..
field_ctx = FieldCtx(dict_ctx, _field, parent)
field_ctx.is_required = is_required
field_ctx.field_type = field_type
# .. this call will populate the initial value of the field as well (field_ctx..
field_ctx.init()
# If this field points to a model ..
if field_ctx.is_model:
# .. first, we need a dict as value as it is the only container that we can extract model fields from ..
self._self_require_dict_or_model(field_ctx)
# .. if we are here, it means that we can check the dict and extract its fields,
# but note that we do not pass extra data on to nested models
# because we can only ever overwrite top-level elements with what extra contains.
field_ctx.value = self.from_field_ctx(field_ctx)
# .. if this field points to a list ..
elif field_ctx.is_list:
# If we have a model class the elements of the list are of,
# we need to visit each of them now.
if field_ctx.model_class:
# Enter further only if we have any value at all to check ..
if field_ctx.value and field_ctx.value != ZatoNotGiven: # type: ignore
# .. if the field is required, make sure that what we have on input really is a list object ..
if field_ctx.is_required:
self._ensure_value_is_a_list(field_ctx, field_ctx.value)
# However, that model class may actually point to <type 'str'> types
# in case of fields like strlist, and we need to take that into account
# before entering the _visit_list method below.
if field_ctx.is_model or field_ctx.contains_model:
field_ctx.value = self._visit_list(field_ctx)
# .. if we are here, it may be because the value is a dictlist instance
# .. for which there will be no underlying model and we can just assign it as is ..
else:
#
# Object current_field may be returned by a default factory
# in declarations, such as the one below. This is why we need to
# ensure that this name exist in current_dict before we extract its value.
#
#
# @dataclass(init=False, repr=False)
# class MyModel(Model):
# my_list: anylistnone = list_field()
# my_dict: anydictnone = dict_field()
#
if field_ctx.name in current_dict:
# .. extract the value first ..
value = current_dict[field_ctx.name]
# .. if the field is required, make sure that what we have on input really is a list object ..
if field_ctx.is_required:
self._ensure_value_is_a_list(field_ctx, value)
# .. assign the list now.
field_ctx.value = value
# If we do not have a value yet, perhaps we will find a default one
if field_ctx.value == ZatoNotGiven:
default = field_ctx.field.default
default_factory = field_ctx.field.default_factory
if default is not MISSING:
field_ctx.value = default
elif default_factory and default_factory is not MISSING:
field_ctx.value = default_factory()
# Let's check if we found any value
if field_ctx.value != ZatoNotGiven:
value = field_ctx.value
else:
if field_ctx.is_required:
raise self.get_validation_error(field_ctx)
else:
# This is the most reliable way
if 'typing.List' in str(field_ctx.field_type):
value = []
elif field_ctx.field_type is Any:
value = None
elif issubclass(field_ctx.field_type, str):
value = ''
elif issubclass(field_ctx.field_type, int):
value = 0
elif issubclass(field_ctx.field_type, list):
value = []
elif issubclass(field_ctx.field_type, dict):
value = {}
elif issubclass(field_ctx.field_type, float):
value = 0.0
else:
value = None
# Assign the value now
dict_ctx.attrs_container[field_ctx.name] = value # type: ignore
# Create a new instance, potentially with attributes ..
instance = DataClass(**dict_ctx.init_attrs) # type: Model
# .. and add extra ones in case __init__ was not defined ..
for k, v in dict_ctx.setattr_attrs.items():
setattr(instance, k, v)
# .. run the post-creation hook ..
if instance.after_created:
ctx = ModelCtx()
ctx.service = service
ctx.data = dict_ctx.current_dict
ctx.DataClass = DataClass
instance.after_created(ctx)
# .. and return the new dataclass to our caller.
return instance
# ################################################################################################################################
def unmarshall(self, data:'dict', class_:'any_') -> 'any_':
""" A publicly available convenience method to unmarshall arbitrary dicts into model classes.
"""
return self.from_dict(cast_('Service', None), data, class_)
# ################################################################################################################################
# ################################################################################################################################
| 27,591
|
Python
|
.py
| 503
| 43.709742
| 130
| 0.483394
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,608
|
__init__.py
|
zatosource_zato/code/zato-common/src/zato/common/marshal_/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 148
|
Python
|
.py
| 5
| 28.2
| 64
| 0.687943
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,609
|
model.py
|
zatosource_zato/code/zato-common/src/zato/common/marshal_/model.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
class BaseModel:
""" This is a base class for actual models.
"""
| 228
|
Python
|
.py
| 8
| 26.125
| 64
| 0.672811
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,610
|
simpleio.py
|
zatosource_zato/code/zato-common/src/zato/common/marshal_/simpleio.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
from traceback import format_exc
# orjson
from orjson import loads
# Zato
from zato.common import DATA_FORMAT
from zato.common.marshal_.api import Model
from zato.common.pubsub import PubSubMessage
# ################################################################################################################################
# ################################################################################################################################
if 0:
from dataclasses import Field
from zato.common.typing_ import any_
from zato.cy.simpleio import SIOServerConfig
from zato.server.base.parallel import ParallelServer
from zato.server.service import Service
Field = Field
Service = Service
SIOServerConfig = SIOServerConfig
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
_dict_like = {DATA_FORMAT.DICT, DATA_FORMAT.JSON}
# ################################################################################################################################
# ################################################################################################################################
class DataClassSimpleIO:
service_class: 'Service'
# We are based on dataclasses, unlike CySimpleIO
is_dataclass = True
def __init__(
self,
server, # type: ParallelServer
server_config, # type: SIOServerConfig
user_declaration # type: any_
) -> 'None':
self.server = server
self.server_config = server_config
self.user_declaration = user_declaration
@staticmethod
def attach_sio(server, server_config, class_):
""" Given a service class, the method extracts its user-defined SimpleIO definition
and attaches the Cython-based one to the class's _sio attribute.
"""
try:
# pylint: disable=attribute-defined-outside-init
# Get the user-defined SimpleIO definition
user_sio = getattr(class_, 'SimpleIO', None)
# This class does not use SIO so we can just return immediately
if not user_sio:
return
# Attach the Cython object representing the parsed user definition
sio = DataClassSimpleIO(server, server_config, user_sio)
sio.service_class = class_
class_._sio = sio
except Exception:
logger.warning('Could not attach DataClassSimpleIO to class `%s`, e:`%s`', class_, format_exc())
raise
# ################################################################################################################################
def parse_input(
self,
data, # type: any_
data_format, # type: any_
service, # type: Service
extra # type: any_
) -> 'any_':
# If we have a SimpleIO input declared ..
if getattr(self.user_declaration, 'input', None):
# .. if it already is a model, we give it to the service as-is ..
if isinstance(data, Model):
return data
elif isinstance(data, PubSubMessage):
data = data.data
# .. otherwise, it can be a dict and we extract its contents.
if data_format in _dict_like and (not isinstance(data, (dict))):
if not isinstance(data, list):
data = loads(data)
return self.server.marshal_api.from_dict(service, data, self.user_declaration.input, extra)
# ################################################################################################################################
# ################################################################################################################################
| 4,475
|
Python
|
.py
| 87
| 44.252874
| 130
| 0.428408
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,611
|
_dataclasses.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/_dataclasses.py
|
"""
This module is a vendor copy of the dataclasses package from https://pypi.org/project/dataclasses/
The original license is:
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# flake8: noqa
import re
import sys
import copy
import types
import inspect
import keyword
__all__ = ['dataclass',
'field',
'Field',
'FrozenInstanceError',
'InitVar',
'MISSING',
# Helper functions.
'fields',
'asdict',
'astuple',
'make_dataclass',
'replace',
'is_dataclass',
]
# Conditions for adding methods. The boxes indicate what action the
# dataclass decorator takes. For all of these tables, when I talk
# about init=, repr=, eq=, order=, unsafe_hash=, or frozen=, I'm
# referring to the arguments to the @dataclass decorator. When
# checking if a dunder method already exists, I mean check for an
# entry in the class's __dict__. I never check to see if an attribute
# is defined in a base class.
# Key:
# +=========+=========================================+
# + Value | Meaning |
# +=========+=========================================+
# | <blank> | No action: no method is added. |
# +---------+-----------------------------------------+
# | add | Generated method is added. |
# +---------+-----------------------------------------+
# | raise | TypeError is raised. |
# +---------+-----------------------------------------+
# | None | Attribute is set to None. |
# +=========+=========================================+
# __init__
#
# +--- init= parameter
# |
# v | | |
# | no | yes | <--- class has __init__ in __dict__?
# +=======+=======+=======+
# | False | | |
# +-------+-------+-------+
# | True | add | | <- the default
# +=======+=======+=======+
# __repr__
#
# +--- repr= parameter
# |
# v | | |
# | no | yes | <--- class has __repr__ in __dict__?
# +=======+=======+=======+
# | False | | |
# +-------+-------+-------+
# | True | add | | <- the default
# +=======+=======+=======+
# __setattr__
# __delattr__
#
# +--- frozen= parameter
# |
# v | | |
# | no | yes | <--- class has __setattr__ or __delattr__ in __dict__?
# +=======+=======+=======+
# | False | | | <- the default
# +-------+-------+-------+
# | True | add | raise |
# +=======+=======+=======+
# Raise because not adding these methods would break the "frozen-ness"
# of the class.
# __eq__
#
# +--- eq= parameter
# |
# v | | |
# | no | yes | <--- class has __eq__ in __dict__?
# +=======+=======+=======+
# | False | | |
# +-------+-------+-------+
# | True | add | | <- the default
# +=======+=======+=======+
# __lt__
# __le__
# __gt__
# __ge__
#
# +--- order= parameter
# |
# v | | |
# | no | yes | <--- class has any comparison method in __dict__?
# +=======+=======+=======+
# | False | | | <- the default
# +-------+-------+-------+
# | True | add | raise |
# +=======+=======+=======+
# Raise because to allow this case would interfere with using
# functools.total_ordering.
# __hash__
# +------------------- unsafe_hash= parameter
# | +----------- eq= parameter
# | | +--- frozen= parameter
# | | |
# v v v | | |
# | no | yes | <--- class has explicitly defined __hash__
# +=======+=======+=======+========+========+
# | False | False | False | | | No __eq__, use the base class __hash__
# +-------+-------+-------+--------+--------+
# | False | False | True | | | No __eq__, use the base class __hash__
# +-------+-------+-------+--------+--------+
# | False | True | False | None | | <-- the default, not hashable
# +-------+-------+-------+--------+--------+
# | False | True | True | add | | Frozen, so hashable, allows override
# +-------+-------+-------+--------+--------+
# | True | False | False | add | raise | Has no __eq__, but hashable
# +-------+-------+-------+--------+--------+
# | True | False | True | add | raise | Has no __eq__, but hashable
# +-------+-------+-------+--------+--------+
# | True | True | False | add | raise | Not frozen, but hashable
# +-------+-------+-------+--------+--------+
# | True | True | True | add | raise | Frozen, so hashable
# +=======+=======+=======+========+========+
# For boxes that are blank, __hash__ is untouched and therefore
# inherited from the base class. If the base is object, then
# id-based hashing is used.
#
# Note that a class may already have __hash__=None if it specified an
# __eq__ method in the class body (not one that was created by
# @dataclass).
#
# See _hash_action (below) for a coded version of this table.
# Raised when an attempt is made to modify a frozen class.
class FrozenInstanceError(AttributeError): pass
# A sentinel object for default values to signal that a default
# factory will be used. This is given a nice repr() which will appear
# in the function signature of dataclasses' constructors.
class _HAS_DEFAULT_FACTORY_CLASS:
def __repr__(self):
return '<factory>'
_HAS_DEFAULT_FACTORY = _HAS_DEFAULT_FACTORY_CLASS()
# A sentinel object to detect if a parameter is supplied or not. Use
# a class to give it a better repr.
class _MISSING_TYPE:
pass
MISSING = _MISSING_TYPE()
# Since most per-field metadata will be unused, create an empty
# read-only proxy that can be shared among all fields.
_EMPTY_METADATA = types.MappingProxyType({})
# Markers for the various kinds of fields and pseudo-fields.
class _FIELD_BASE:
def __init__(self, name):
self.name = name
def __repr__(self):
return self.name
_FIELD = _FIELD_BASE('_FIELD')
_FIELD_CLASSVAR = _FIELD_BASE('_FIELD_CLASSVAR')
_FIELD_INITVAR = _FIELD_BASE('_FIELD_INITVAR')
# The name of an attribute on the class where we store the Field
# objects. Also used to check if a class is a Data Class.
_FIELDS = '__dataclass_fields__'
# The name of an attribute on the class that stores the parameters to
# @dataclass.
_PARAMS = '__dataclass_params__'
# The name of the function, that if it exists, is called at the end of
# __init__.
_POST_INIT_NAME = '__post_init__'
# String regex that string annotations for ClassVar or InitVar must match.
# Allows "identifier.identifier[" or "identifier[".
# https://bugs.python.org/issue33453 for details.
_MODULE_IDENTIFIER_RE = re.compile(r'^(?:\s*(\w+)\s*\.)?\s*(\w+)')
class _InitVarMeta(type):
def __getitem__(self, params):
return self
class InitVar(metaclass=_InitVarMeta):
pass
# Instances of Field are only ever created from within this module,
# and only from the field() function, although Field instances are
# exposed externally as (conceptually) read-only objects.
#
# name and type are filled in after the fact, not in __init__.
# They're not known at the time this class is instantiated, but it's
# convenient if they're available later.
#
# When cls._FIELDS is filled in with a list of Field objects, the name
# and type fields will have been populated.
class Field:
__slots__ = ('name',
'type',
'default',
'default_factory',
'repr',
'hash',
'init',
'compare',
'metadata',
'_field_type', # Private: not to be used by user code.
)
def __init__(self, default, default_factory, init, repr, hash, compare,
metadata):
self.name = None
self.type = None
self.default = default
self.default_factory = default_factory
self.init = init
self.repr = repr
self.hash = hash
self.compare = compare
self.metadata = (_EMPTY_METADATA
if metadata is None or len(metadata) == 0 else
types.MappingProxyType(metadata))
self._field_type = None
def __repr__(self):
return ('Field('
f'name={self.name!r},'
f'type={self.type!r},'
f'default={self.default!r},'
f'default_factory={self.default_factory!r},'
f'init={self.init!r},'
f'repr={self.repr!r},'
f'hash={self.hash!r},'
f'compare={self.compare!r},'
f'metadata={self.metadata!r},'
f'_field_type={self._field_type}'
')')
# This is used to support the PEP 487 __set_name__ protocol in the
# case where we're using a field that contains a descriptor as a
# defaul value. For details on __set_name__, see
# https://www.python.org/dev/peps/pep-0487/#implementation-details.
#
# Note that in _process_class, this Field object is overwritten
# with the default value, so the end result is a descriptor that
# had __set_name__ called on it at the right time.
def __set_name__(self, owner, name):
func = getattr(type(self.default), '__set_name__', None)
if func:
# There is a __set_name__ method on the descriptor, call
# it.
func(self.default, owner, name)
class _DataclassParams:
__slots__ = ('init',
'repr',
'eq',
'order',
'unsafe_hash',
'frozen',
)
def __init__(self, init, repr, eq, order, unsafe_hash, frozen):
self.init = init
self.repr = repr
self.eq = eq
self.order = order
self.unsafe_hash = unsafe_hash
self.frozen = frozen
def __repr__(self):
return ('_DataclassParams('
f'init={self.init!r},'
f'repr={self.repr!r},'
f'eq={self.eq!r},'
f'order={self.order!r},'
f'unsafe_hash={self.unsafe_hash!r},'
f'frozen={self.frozen!r}'
')')
# This function is used instead of exposing Field creation directly,
# so that a type checker can be told (via overloads) that this is a
# function whose type depends on its parameters.
def field(*, default=MISSING, default_factory=MISSING, init=True, repr=True,
hash=None, compare=True, metadata=None):
"""Return an object to identify dataclass fields.
default is the default value of the field. default_factory is a
0-argument function called to initialize a field's value. If init
is True, the field will be a parameter to the class's __init__()
function. If repr is True, the field will be included in the
object's repr(). If hash is True, the field will be included in
the object's hash(). If compare is True, the field will be used
in comparison functions. metadata, if specified, must be a
mapping which is stored but not otherwise examined by dataclass.
It is an error to specify both default and default_factory.
"""
if default is not MISSING and default_factory is not MISSING:
raise ValueError('cannot specify both default and default_factory')
return Field(default, default_factory, init, repr, hash, compare,
metadata)
def _tuple_str(obj_name, fields):
# Return a string representing each field of obj_name as a tuple
# member. So, if fields is ['x', 'y'] and obj_name is "self",
# return "(self.x,self.y)".
# Special case for the 0-tuple.
if not fields:
return '()'
# Note the trailing comma, needed if this turns out to be a 1-tuple.
return f'({",".join([f"{obj_name}.{f.name}" for f in fields])},)'
def _create_fn(name, args, body, *, globals=None, locals=None,
return_type=MISSING):
# Note that we mutate locals when exec() is called. Caller
# beware! The only callers are internal to this module, so no
# worries about external callers.
if locals is None:
locals = {}
return_annotation = ''
if return_type is not MISSING:
locals['_return_type'] = return_type
return_annotation = '->_return_type'
args = ','.join(args)
body = '\n'.join(f' {b}' for b in body)
# Compute the text of the entire function.
txt = f'def {name}({args}){return_annotation}:\n{body}'
exec(txt, globals, locals)
return locals[name]
def _field_assign(frozen, name, value, self_name):
# If we're a frozen class, then assign to our fields in __init__
# via object.__setattr__. Otherwise, just use a simple
# assignment.
#
# self_name is what "self" is called in this function: don't
# hard-code "self", since that might be a field name.
if frozen:
return f'object.__setattr__({self_name},{name!r},{value})'
return f'{self_name}.{name}={value}'
def _field_init(f, frozen, globals, self_name):
# Return the text of the line in the body of __init__ that will
# initialize this field.
default_name = f'_dflt_{f.name}'
if f.default_factory is not MISSING:
if f.init:
# This field has a default factory. If a parameter is
# given, use it. If not, call the factory.
globals[default_name] = f.default_factory
value = (f'{default_name}() '
f'if {f.name} is _HAS_DEFAULT_FACTORY '
f'else {f.name}')
else:
# This is a field that's not in the __init__ params, but
# has a default factory function. It needs to be
# initialized here by calling the factory function,
# because there's no other way to initialize it.
# For a field initialized with a default=defaultvalue, the
# class dict just has the default value
# (cls.fieldname=defaultvalue). But that won't work for a
# default factory, the factory must be called in __init__
# and we must assign that to self.fieldname. We can't
# fall back to the class dict's value, both because it's
# not set, and because it might be different per-class
# (which, after all, is why we have a factory function!).
globals[default_name] = f.default_factory
value = f'{default_name}()'
else:
# No default factory.
if f.init:
if f.default is MISSING:
# There's no default, just do an assignment.
value = f.name
elif f.default is not MISSING:
globals[default_name] = f.default
value = f.name
else:
# This field does not need initialization. Signify that
# to the caller by returning None.
return None
# Only test this now, so that we can create variables for the
# default. However, return None to signify that we're not going
# to actually do the assignment statement for InitVars.
if f._field_type is _FIELD_INITVAR:
return None
# Now, actually generate the field assignment.
return _field_assign(frozen, f.name, value, self_name)
def _init_param(f):
# Return the __init__ parameter string for this field. For
# example, the equivalent of 'x:int=3' (except instead of 'int',
# reference a variable set to int, and instead of '3', reference a
# variable set to 3).
if f.default is MISSING and f.default_factory is MISSING:
# There's no default, and no default_factory, just output the
# variable name and type.
default = ''
elif f.default is not MISSING:
# There's a default, this will be the name that's used to look
# it up.
default = f'=_dflt_{f.name}'
elif f.default_factory is not MISSING:
# There's a factory function. Set a marker.
default = '=_HAS_DEFAULT_FACTORY'
return f'{f.name}:_type_{f.name}{default}'
def _init_fn(fields, frozen, has_post_init, self_name):
# fields contains both real fields and InitVar pseudo-fields.
# Make sure we don't have fields without defaults following fields
# with defaults. This actually would be caught when exec-ing the
# function source code, but catching it here gives a better error
# message, and future-proofs us in case we build up the function
# using ast.
seen_default = False
for f in fields:
# Only consider fields in the __init__ call.
if f.init:
if not (f.default is MISSING and f.default_factory is MISSING):
seen_default = True
elif seen_default:
raise TypeError(f'non-default argument {f.name!r} '
'follows default argument')
globals = {'MISSING': MISSING,
'_HAS_DEFAULT_FACTORY': _HAS_DEFAULT_FACTORY}
body_lines = []
for f in fields:
line = _field_init(f, frozen, globals, self_name)
# line is None means that this field doesn't require
# initialization (it's a pseudo-field). Just skip it.
if line:
body_lines.append(line)
# Does this class have a post-init function?
if has_post_init:
params_str = ','.join(f.name for f in fields
if f._field_type is _FIELD_INITVAR)
body_lines.append(f'{self_name}.{_POST_INIT_NAME}({params_str})')
# If no body lines, use 'pass'.
if not body_lines:
body_lines = ['pass']
locals = {f'_type_{f.name}': f.type for f in fields}
return _create_fn('__init__',
[self_name] + [_init_param(f) for f in fields if f.init],
body_lines,
locals=locals,
globals=globals,
return_type=None)
def _repr_fn(fields):
return _create_fn('__repr__',
('self',),
['return self.__class__.__qualname__ + f"(' +
', '.join([f"{f.name}={{self.{f.name}!r}}"
for f in fields]) +
')"'])
def _frozen_get_del_attr(cls, fields):
# XXX: globals is modified on the first call to _create_fn, then
# the modified version is used in the second call. Is this okay?
globals = {'cls': cls,
'FrozenInstanceError': FrozenInstanceError}
if fields:
fields_str = '(' + ','.join(repr(f.name) for f in fields) + ',)'
else:
# Special case for the zero-length tuple.
fields_str = '()'
return (_create_fn('__setattr__',
('self', 'name', 'value'),
(f'if type(self) is cls or name in {fields_str}:',
' raise FrozenInstanceError(f"cannot assign to field {name!r}")',
f'super(cls, self).__setattr__(name, value)'),
globals=globals),
_create_fn('__delattr__',
('self', 'name'),
(f'if type(self) is cls or name in {fields_str}:',
' raise FrozenInstanceError(f"cannot delete field {name!r}")',
f'super(cls, self).__delattr__(name)'),
globals=globals),
)
def _cmp_fn(name, op, self_tuple, other_tuple):
# Create a comparison function. If the fields in the object are
# named 'x' and 'y', then self_tuple is the string
# '(self.x,self.y)' and other_tuple is the string
# '(other.x,other.y)'.
return _create_fn(name,
('self', 'other'),
[ 'if other.__class__ is self.__class__:',
f' return {self_tuple}{op}{other_tuple}',
'return NotImplemented'])
def _hash_fn(fields):
self_tuple = _tuple_str('self', fields)
return _create_fn('__hash__',
('self',),
[f'return hash({self_tuple})'])
def _is_classvar(a_type, typing):
# This test uses a typing internal class, but it's the best way to
# test if this is a ClassVar.
return type(a_type) is typing._ClassVar
def _is_initvar(a_type, dataclasses):
# The module we're checking against is the module we're
# currently in (dataclasses.py).
return a_type is dataclasses.InitVar
def _is_type(annotation, cls, a_module, a_type, is_type_predicate):
# Given a type annotation string, does it refer to a_type in
# a_module? For example, when checking that annotation denotes a
# ClassVar, then a_module is typing, and a_type is
# typing.ClassVar.
# It's possible to look up a_module given a_type, but it involves
# looking in sys.modules (again!), and seems like a waste since
# the caller already knows a_module.
# - annotation is a string type annotation
# - cls is the class that this annotation was found in
# - a_module is the module we want to match
# - a_type is the type in that module we want to match
# - is_type_predicate is a function called with (obj, a_module)
# that determines if obj is of the desired type.
# Since this test does not do a local namespace lookup (and
# instead only a module (global) lookup), there are some things it
# gets wrong.
# With string annotations, cv0 will be detected as a ClassVar:
# CV = ClassVar
# @dataclass
# class C0:
# cv0: CV
# But in this example cv1 will not be detected as a ClassVar:
# @dataclass
# class C1:
# CV = ClassVar
# cv1: CV
# In C1, the code in this function (_is_type) will look up "CV" in
# the module and not find it, so it will not consider cv1 as a
# ClassVar. This is a fairly obscure corner case, and the best
# way to fix it would be to eval() the string "CV" with the
# correct global and local namespaces. However that would involve
# a eval() penalty for every single field of every dataclass
# that's defined. It was judged not worth it.
match = _MODULE_IDENTIFIER_RE.match(annotation)
if match:
ns = None
module_name = match.group(1)
if not module_name:
# No module name, assume the class's module did
# "from dataclasses import InitVar".
ns = sys.modules.get(cls.__module__).__dict__
else:
# Look up module_name in the class's module.
module = sys.modules.get(cls.__module__)
if module and module.__dict__.get(module_name) is a_module:
ns = sys.modules.get(a_type.__module__).__dict__
if ns and is_type_predicate(ns.get(match.group(2)), a_module):
return True
return False
def _get_field(cls, a_name, a_type):
# Return a Field object for this field name and type. ClassVars
# and InitVars are also returned, but marked as such (see
# f._field_type).
# If the default value isn't derived from Field, then it's only a
# normal default value. Convert it to a Field().
default = getattr(cls, a_name, MISSING)
if isinstance(default, Field):
f = default
else:
if isinstance(default, types.MemberDescriptorType):
# This is a field in __slots__, so it has no default value.
default = MISSING
f = field(default=default)
# Only at this point do we know the name and the type. Set them.
f.name = a_name
f.type = a_type
# Assume it's a normal field until proven otherwise. We're next
# going to decide if it's a ClassVar or InitVar, everything else
# is just a normal field.
f._field_type = _FIELD
# In addition to checking for actual types here, also check for
# string annotations. get_type_hints() won't always work for us
# (see https://github.com/python/typing/issues/508 for example),
# plus it's expensive and would require an eval for every stirng
# annotation. So, make a best effort to see if this is a ClassVar
# or InitVar using regex's and checking that the thing referenced
# is actually of the correct type.
# For the complete discussion, see https://bugs.python.org/issue33453
# If typing has not been imported, then it's impossible for any
# annotation to be a ClassVar. So, only look for ClassVar if
# typing has been imported by any module (not necessarily cls's
# module).
typing = sys.modules.get('typing')
if typing:
if (_is_classvar(a_type, typing)
or (isinstance(f.type, str)
and _is_type(f.type, cls, typing, typing.ClassVar,
_is_classvar))):
f._field_type = _FIELD_CLASSVAR
# If the type is InitVar, or if it's a matching string annotation,
# then it's an InitVar.
if f._field_type is _FIELD:
# The module we're checking against is the module we're
# currently in (dataclasses.py).
dataclasses = sys.modules[__name__]
if (_is_initvar(a_type, dataclasses)
or (isinstance(f.type, str)
and _is_type(f.type, cls, dataclasses, dataclasses.InitVar,
_is_initvar))):
f._field_type = _FIELD_INITVAR
# Validations for individual fields. This is delayed until now,
# instead of in the Field() constructor, since only here do we
# know the field name, which allows for better error reporting.
# Special restrictions for ClassVar and InitVar.
if f._field_type in (_FIELD_CLASSVAR, _FIELD_INITVAR):
if f.default_factory is not MISSING:
raise TypeError(f'field {f.name} cannot have a '
'default factory')
# Should I check for other field settings? default_factory
# seems the most serious to check for. Maybe add others. For
# example, how about init=False (or really,
# init=<not-the-default-init-value>)? It makes no sense for
# ClassVar and InitVar to specify init=<anything>.
# For real fields, disallow mutable defaults for known types.
if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)):
raise ValueError(f'mutable default {type(f.default)} for field '
f'{f.name} is not allowed: use default_factory')
return f
def _set_new_attribute(cls, name, value):
# Never overwrites an existing attribute. Returns True if the
# attribute already exists.
if name in cls.__dict__:
return True
setattr(cls, name, value)
return False
# Decide if/how we're going to create a hash function. Key is
# (unsafe_hash, eq, frozen, does-hash-exist). Value is the action to
# take. The common case is to do nothing, so instead of providing a
# function that is a no-op, use None to signify that.
def _hash_set_none(cls, fields):
return None
def _hash_add(cls, fields):
flds = [f for f in fields if (f.compare if f.hash is None else f.hash)]
return _hash_fn(flds)
def _hash_exception(cls, fields):
# Raise an exception.
raise TypeError(f'Cannot overwrite attribute __hash__ '
f'in class {cls.__name__}')
#
# +-------------------------------------- unsafe_hash?
# | +------------------------------- eq?
# | | +------------------------ frozen?
# | | | +---------------- has-explicit-hash?
# | | | |
# | | | | +------- action
# | | | | |
# v v v v v
_hash_action = {(False, False, False, False): None,
(False, False, False, True ): None,
(False, False, True, False): None,
(False, False, True, True ): None,
(False, True, False, False): _hash_set_none,
(False, True, False, True ): None,
(False, True, True, False): _hash_add,
(False, True, True, True ): None,
(True, False, False, False): _hash_add,
(True, False, False, True ): _hash_exception,
(True, False, True, False): _hash_add,
(True, False, True, True ): _hash_exception,
(True, True, False, False): _hash_add,
(True, True, False, True ): _hash_exception,
(True, True, True, False): _hash_add,
(True, True, True, True ): _hash_exception,
}
# See https://bugs.python.org/issue32929#msg312829 for an if-statement
# version of this table.
def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
# Now that dicts retain insertion order, there's no reason to use
# an ordered dict. I am leveraging that ordering here, because
# derived class fields overwrite base class fields, but the order
# is defined by the base class, which is found first.
fields = {}
setattr(cls, _PARAMS, _DataclassParams(init, repr, eq, order,
unsafe_hash, frozen))
# Find our base classes in reverse MRO order, and exclude
# ourselves. In reversed order so that more derived classes
# override earlier field definitions in base classes. As long as
# we're iterating over them, see if any are frozen.
any_frozen_base = False
has_dataclass_bases = False
for b in cls.__mro__[-1:0:-1]:
# Only process classes that have been processed by our
# decorator. That is, they have a _FIELDS attribute.
base_fields = getattr(b, _FIELDS, None)
if base_fields:
has_dataclass_bases = True
for f in base_fields.values():
fields[f.name] = f
if getattr(b, _PARAMS).frozen:
any_frozen_base = True
# Annotations that are defined in this class (not in base
# classes). If __annotations__ isn't present, then this class
# adds no new annotations. We use this to compute fields that are
# added by this class.
#
# Fields are found from cls_annotations, which is guaranteed to be
# ordered. Default values are from class attributes, if a field
# has a default. If the default value is a Field(), then it
# contains additional info beyond (and possibly including) the
# actual default value. Pseudo-fields ClassVars and InitVars are
# included, despite the fact that they're not real fields. That's
# dealt with later.
cls_annotations = cls.__dict__.get('__annotations__', {})
# Now find fields in our class. While doing so, validate some
# things, and set the default values (as class attributes) where
# we can.
cls_fields = [_get_field(cls, name, type)
for name, type in cls_annotations.items()]
for f in cls_fields:
fields[f.name] = f
# If the class attribute (which is the default value for this
# field) exists and is of type 'Field', replace it with the
# real default. This is so that normal class introspection
# sees a real default value, not a Field.
if isinstance(getattr(cls, f.name, None), Field):
if f.default is MISSING:
# If there's no default, delete the class attribute.
# This happens if we specify field(repr=False), for
# example (that is, we specified a field object, but
# no default value). Also if we're using a default
# factory. The class attribute should not be set at
# all in the post-processed class.
delattr(cls, f.name)
else:
setattr(cls, f.name, f.default)
# Do we have any Field members that don't also have annotations?
for name, value in cls.__dict__.items():
if isinstance(value, Field) and not name in cls_annotations:
raise TypeError(f'{name!r} is a field but has no type annotation')
# Check rules that apply if we are derived from any dataclasses.
if has_dataclass_bases:
# Raise an exception if any of our bases are frozen, but we're not.
if any_frozen_base and not frozen:
raise TypeError('cannot inherit non-frozen dataclass from a '
'frozen one')
# Raise an exception if we're frozen, but none of our bases are.
if not any_frozen_base and frozen:
raise TypeError('cannot inherit frozen dataclass from a '
'non-frozen one')
# Remember all of the fields on our class (including bases). This
# also marks this class as being a dataclass.
setattr(cls, _FIELDS, fields)
# Was this class defined with an explicit __hash__? Note that if
# __eq__ is defined in this class, then python will automatically
# set __hash__ to None. This is a heuristic, as it's possible
# that such a __hash__ == None was not auto-generated, but it
# close enough.
class_hash = cls.__dict__.get('__hash__', MISSING)
has_explicit_hash = not (class_hash is MISSING or
(class_hash is None and '__eq__' in cls.__dict__))
# If we're generating ordering methods, we must be generating the
# eq methods.
if order and not eq:
raise ValueError('eq must be true if order is true')
if init:
# Does this class have a post-init function?
has_post_init = hasattr(cls, _POST_INIT_NAME)
# Include InitVars and regular fields (so, not ClassVars).
flds = [f for f in fields.values()
if f._field_type in (_FIELD, _FIELD_INITVAR)]
_set_new_attribute(cls, '__init__',
_init_fn(flds,
frozen,
has_post_init,
# The name to use for the "self"
# param in __init__. Use "self"
# if possible.
'__dataclass_self__' if 'self' in fields
else 'self',
))
# Get the fields as a list, and include only real fields. This is
# used in all of the following methods.
field_list = [f for f in fields.values() if f._field_type is _FIELD]
if repr:
flds = [f for f in field_list if f.repr]
_set_new_attribute(cls, '__repr__', _repr_fn(flds))
if eq:
# Create _eq__ method. There's no need for a __ne__ method,
# since python will call __eq__ and negate it.
flds = [f for f in field_list if f.compare]
self_tuple = _tuple_str('self', flds)
other_tuple = _tuple_str('other', flds)
_set_new_attribute(cls, '__eq__',
_cmp_fn('__eq__', '==',
self_tuple, other_tuple))
if order:
# Create and set the ordering methods.
flds = [f for f in field_list if f.compare]
self_tuple = _tuple_str('self', flds)
other_tuple = _tuple_str('other', flds)
for name, op in [('__lt__', '<'),
('__le__', '<='),
('__gt__', '>'),
('__ge__', '>='),
]:
if _set_new_attribute(cls, name,
_cmp_fn(name, op, self_tuple, other_tuple)):
raise TypeError(f'Cannot overwrite attribute {name} '
f'in class {cls.__name__}. Consider using '
'functools.total_ordering')
if frozen:
for fn in _frozen_get_del_attr(cls, field_list):
if _set_new_attribute(cls, fn.__name__, fn):
raise TypeError(f'Cannot overwrite attribute {fn.__name__} '
f'in class {cls.__name__}')
# Decide if/how we're going to create a hash function.
hash_action = _hash_action[bool(unsafe_hash),
bool(eq),
bool(frozen),
has_explicit_hash]
if hash_action:
# No need to call _set_new_attribute here, since by the time
# we're here the overwriting is unconditional.
cls.__hash__ = hash_action(cls, field_list)
if not getattr(cls, '__doc__'):
# Create a class doc-string.
cls.__doc__ = (cls.__name__ +
str(inspect.signature(cls)).replace(' -> None', ''))
return cls
# _cls should never be specified by keyword, so start it with an
# underscore. The presence of _cls is used to detect if this
# decorator is being called with parameters or not.
def dataclass(_cls=None, *, init=True, repr=True, eq=True, order=False,
unsafe_hash=False, frozen=False):
"""Returns the same class as was passed in, with dunder methods
added based on the fields defined in the class.
Examines PEP 526 __annotations__ to determine fields.
If init is true, an __init__() method is added to the class. If
repr is true, a __repr__() method is added. If order is true, rich
comparison dunder methods are added. If unsafe_hash is true, a
__hash__() method function is added. If frozen is true, fields may
not be assigned to after instance creation.
"""
def wrap(cls):
return _process_class(cls, init, repr, eq, order, unsafe_hash, frozen)
# See if we're being called as @dataclass or @dataclass().
if _cls is None:
# We're called with parens.
return wrap
# We're called as @dataclass without parens.
return wrap(_cls)
def fields(class_or_instance):
"""Return a tuple describing the fields of this dataclass.
Accepts a dataclass or an instance of one. Tuple elements are of
type Field.
"""
# Might it be worth caching this, per class?
try:
fields = getattr(class_or_instance, _FIELDS)
except AttributeError:
raise TypeError('must be called with a dataclass type or instance')
# Exclude pseudo-fields. Note that fields is sorted by insertion
# order, so the order of the tuple is as the fields were defined.
return tuple(f for f in fields.values() if f._field_type is _FIELD)
def _is_dataclass_instance(obj):
"""Returns True if obj is an instance of a dataclass."""
return not isinstance(obj, type) and hasattr(obj, _FIELDS)
def is_dataclass(obj):
"""Returns True if obj is a dataclass or an instance of a
dataclass."""
return hasattr(obj, _FIELDS)
def asdict(obj, *, dict_factory=dict):
"""Return the fields of a dataclass instance as a new dictionary mapping
field names to field values.
Example usage:
@dataclass
class C:
x: int
y: int
c = C(1, 2)
assert asdict(c) == {'x': 1, 'y': 2}
If given, 'dict_factory' will be used instead of built-in dict.
The function applies recursively to field values that are
dataclass instances. This will also look into built-in containers:
tuples, lists, and dicts.
"""
if not _is_dataclass_instance(obj):
raise TypeError("asdict() should be called on dataclass instances")
return _asdict_inner(obj, dict_factory)
def _asdict_inner(obj, dict_factory):
if _is_dataclass_instance(obj):
result = []
for f in fields(obj):
value = _asdict_inner(getattr(obj, f.name), dict_factory)
result.append((f.name, value))
return dict_factory(result)
elif isinstance(obj, (list, tuple)):
return type(obj)(_asdict_inner(v, dict_factory) for v in obj)
elif isinstance(obj, dict):
return type(obj)((_asdict_inner(k, dict_factory), _asdict_inner(v, dict_factory))
for k, v in obj.items())
else:
return copy.deepcopy(obj)
def astuple(obj, *, tuple_factory=tuple):
"""Return the fields of a dataclass instance as a new tuple of field values.
Example usage::
@dataclass
class C:
x: int
y: int
c = C(1, 2)
assert astuple(c) == (1, 2)
If given, 'tuple_factory' will be used instead of built-in tuple.
The function applies recursively to field values that are
dataclass instances. This will also look into built-in containers:
tuples, lists, and dicts.
"""
if not _is_dataclass_instance(obj):
raise TypeError("astuple() should be called on dataclass instances")
return _astuple_inner(obj, tuple_factory)
def _astuple_inner(obj, tuple_factory):
if _is_dataclass_instance(obj):
result = []
for f in fields(obj):
value = _astuple_inner(getattr(obj, f.name), tuple_factory)
result.append(value)
return tuple_factory(result)
elif isinstance(obj, (list, tuple)):
return type(obj)(_astuple_inner(v, tuple_factory) for v in obj)
elif isinstance(obj, dict):
return type(obj)((_astuple_inner(k, tuple_factory), _astuple_inner(v, tuple_factory))
for k, v in obj.items())
else:
return copy.deepcopy(obj)
def make_dataclass(cls_name, fields, *, bases=(), namespace=None, init=True,
repr=True, eq=True, order=False, unsafe_hash=False,
frozen=False):
"""Return a new dynamically created dataclass.
The dataclass name will be 'cls_name'. 'fields' is an iterable
of either (name), (name, type) or (name, type, Field) objects. If type is
omitted, use the string 'typing.Any'. Field objects are created by
the equivalent of calling 'field(name, type [, Field-info])'.
C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,))
is equivalent to:
@dataclass
class C(Base):
x: 'typing.Any'
y: int
z: int = field(init=False)
For the bases and namespace parameters, see the builtin type() function.
The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to
dataclass().
"""
if namespace is None:
namespace = {}
else:
# Copy namespace since we're going to mutate it.
namespace = namespace.copy()
# While we're looking through the field names, validate that they
# are identifiers, are not keywords, and not duplicates.
seen = set()
anns = {}
for item in fields:
if isinstance(item, str):
name = item
tp = 'typing.Any'
elif len(item) == 2:
name, tp, = item
elif len(item) == 3:
name, tp, spec = item
namespace[name] = spec
else:
raise TypeError(f'Invalid field: {item!r}')
if not isinstance(name, str) or not name.isidentifier():
raise TypeError(f'Field names must be valid identifers: {name!r}')
if keyword.iskeyword(name):
raise TypeError(f'Field names must not be keywords: {name!r}')
if name in seen:
raise TypeError(f'Field name duplicated: {name!r}')
seen.add(name)
anns[name] = tp
namespace['__annotations__'] = anns
# We use `types.new_class()` instead of simply `type()` to allow dynamic creation
# of generic dataclassses.
cls = types.new_class(cls_name, bases, {}, lambda ns: ns.update(namespace))
return dataclass(cls, init=init, repr=repr, eq=eq, order=order,
unsafe_hash=unsafe_hash, frozen=frozen)
def replace(obj, **changes):
"""Return a new object replacing specified fields with new values.
This is especially useful for frozen classes. Example usage:
@dataclass(frozen=True)
class C:
x: int
y: int
c = C(1, 2)
c1 = replace(c, x=3)
assert c1.x == 3 and c1.y == 2
"""
# We're going to mutate 'changes', but that's okay because it's a
# new dict, even if called with 'replace(obj, **my_changes)'.
if not _is_dataclass_instance(obj):
raise TypeError("replace() should be called on dataclass instances")
# It's an error to have init=False fields in 'changes'.
# If a field is not in 'changes', read its value from the provided obj.
for f in getattr(obj, _FIELDS).values():
# Only consider normal fields or InitVars.
if f._field_type is _FIELD_CLASSVAR:
continue
if not f.init:
# Error if this field is specified in changes.
if f.name in changes:
raise ValueError(f'field {f.name} is declared with '
'init=False, it cannot be specified with '
'replace()')
continue
if f.name not in changes:
if f._field_type is _FIELD_INITVAR:
raise ValueError(f"InitVar {f.name!r} "
'must be specified with replace()')
changes[f.name] = getattr(obj, f.name)
# Create the new object, which calls __init__() and
# __post_init__() (if defined), using all of the init fields we've
# added and/or left in 'changes'. If there are values supplied in
# changes that aren't fields, this will correctly raise a
# TypeError.
return obj.__class__(**changes)
| 57,009
|
Python
|
.py
| 1,170
| 40.498291
| 98
| 0.609624
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,612
|
configobj_.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/configobj_.py
|
"""
This module is a modified vendor copy of the configobj package from https://pypi.org/project/configobj/
Copyright (c):
2003-2010, Michael Foord
2014, Eli Courtwright, Rob Dennis
All rights reserved.
E-mails :
fuzzyman AT voidspace DOT org DOT uk
eli AT courtwright DOT org
rdennis AT gmail DOT com
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the names of Michael Foord, Eli Courtwright or Rob Dennis,
nor the name of Voidspace, may be used to endorse or promote
products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# flake8: noqa
import os
import re
import sys
from logging import getLogger
from ast import literal_eval
from codecs import BOM_UTF8, BOM_UTF16, BOM_UTF16_BE, BOM_UTF16_LE
from collections import OrderedDict
import six
__version__ = '5.0.6'
logger = getLogger('zato')
# imported lazily to avoid startup performance hit if it isn't used
compiler = None
# A dictionary mapping BOM to
# the encoding to decode with, and what to set the
# encoding attribute to.
BOMS = {
BOM_UTF8: ('utf_8', None),
BOM_UTF16_BE: ('utf16_be', 'utf_16'),
BOM_UTF16_LE: ('utf16_le', 'utf_16'),
BOM_UTF16: ('utf_16', 'utf_16'),
}
# All legal variants of the BOM codecs.
# TODO: the list of aliases is not meant to be exhaustive, is there a
# better way ?
BOM_LIST = {
'utf_16': 'utf_16',
'u16': 'utf_16',
'utf16': 'utf_16',
'utf-16': 'utf_16',
'utf16_be': 'utf16_be',
'utf_16_be': 'utf16_be',
'utf-16be': 'utf16_be',
'utf16_le': 'utf16_le',
'utf_16_le': 'utf16_le',
'utf-16le': 'utf16_le',
'utf_8': 'utf_8',
'u8': 'utf_8',
'utf': 'utf_8',
'utf8': 'utf_8',
'utf-8': 'utf_8',
}
# Map of encodings to the BOM to write.
BOM_SET = {
'utf_8': BOM_UTF8,
'utf_16': BOM_UTF16,
'utf16_be': BOM_UTF16_BE,
'utf16_le': BOM_UTF16_LE,
None: BOM_UTF8
}
def match_utf8(encoding):
return BOM_LIST.get(encoding.lower()) == 'utf_8'
# Quote strings used for writing values
squot = "'%s'"
dquot = '"%s"'
noquot = "%s"
wspace_plus = ' \r\n\v\t\'"'
tsquot = '"""%s"""'
tdquot = "'''%s'''"
# Sentinel for use in getattr calls to replace hasattr
MISSING = object()
__all__ = (
'DEFAULT_INDENT_TYPE',
'DEFAULT_INTERPOLATION',
'ConfigObjError',
'NestingError',
'ParseError',
'DuplicateError',
'ConfigspecError',
'ConfigObj',
'SimpleVal',
'InterpolationError',
'InterpolationLoopError',
'MissingInterpolationOption',
'RepeatSectionError',
'ReloadError',
'UnreprError',
'UnknownType',
'flatten_errors',
'get_extra_values'
)
DEFAULT_INTERPOLATION = 'configparser'
DEFAULT_INDENT_TYPE = ' '
MAX_INTERPOL_DEPTH = 10
OPTION_DEFAULTS = {
'interpolation': True,
'raise_errors': False,
'list_values': True,
'create_empty': False,
'file_error': False,
'configspec': None,
'stringify': True,
# option may be set to one of ('', ' ', '\t')
'indent_type': None,
'encoding': None,
'default_encoding': None,
'unrepr': False,
'write_empty_values': False,
}
# this could be replaced if six is used for compatibility, or there are no
# more assertions about items being a string
def getObj(s):
global compiler
if compiler is None:
import compiler
s = "a=" + s
p = compiler.parse(s)
return p.getChildren()[1].getChildren()[0].getChildren()[1]
class UnknownType(Exception):
pass
class Builder:
def build(self, o):
if m is None:
raise UnknownType(o.__class__.__name__)
return m(o)
def build_List(self, o):
return list(map(self.build, o.getChildren()))
def build_Const(self, o):
return o.value
def build_Dict(self, o):
d = {}
i = iter(map(self.build, o.getChildren()))
for el in i:
d[el] = next(i)
return d
def build_Tuple(self, o):
return tuple(self.build_List(o))
def build_Name(self, o):
if o.name == 'None':
return None
if o.name == 'True':
return True
if o.name == 'False':
return False
# An undefined Name
raise UnknownType('Undefined Name')
def build_Add(self, o):
real, imag = list(map(self.build_Const, o.getChildren()))
try:
real = float(real)
except TypeError:
raise UnknownType('Add')
if not isinstance(imag, complex) or imag.real != 0.0:
raise UnknownType('Add')
return real+imag
def build_Getattr(self, o):
parent = self.build(o.expr)
return getattr(parent, o.attrname)
def build_UnarySub(self, o):
return -self.build_Const(o.getChildren()[0])
def build_UnaryAdd(self, o):
return self.build_Const(o.getChildren()[0])
_builder = Builder()
def unrepr(s):
if not s:
return s
# this is supposed to be safe
import ast
return ast.literal_eval(s)
class ConfigObjError(SyntaxError):
"""
This is the base class for all errors that ConfigObj raises.
It is a subclass of SyntaxError.
"""
def __init__(self, message='', line_number=None, line=''):
self.line = line
self.line_number = line_number
SyntaxError.__init__(self, message)
class NestingError(ConfigObjError):
"""
This error indicates a level of nesting that doesn't match.
"""
class ParseError(ConfigObjError):
"""
This error indicates that a line is badly written.
It is neither a valid ``key = value`` line,
nor a valid section marker line.
"""
class ReloadError(IOError):
"""
A 'reload' operation failed.
This exception is a subclass of ``IOError``.
"""
def __init__(self):
IOError.__init__(self, 'reload failed, filename is not set.')
class DuplicateError(ConfigObjError):
"""
The keyword or section specified already exists.
"""
class ConfigspecError(ConfigObjError):
"""
An error occured whilst parsing a configspec.
"""
class InterpolationError(ConfigObjError):
"""Base class for the two interpolation errors."""
class InterpolationLoopError(InterpolationError):
"""Maximum interpolation depth exceeded in string interpolation."""
def __init__(self, option):
InterpolationError.__init__(
self,
'interpolation loop detected in value "%s".' % option)
class RepeatSectionError(ConfigObjError):
"""
This error indicates additional sections in a section with a
``__many__`` (repeated) section.
"""
class MissingInterpolationOption(InterpolationError):
"""A value specified for interpolation was missing."""
def __init__(self, option):
msg = 'missing option "%s" in interpolation.' % option
InterpolationError.__init__(self, msg)
class UnreprError(ConfigObjError):
"""An error parsing in unrepr mode."""
class InterpolationEngine:
"""
A helper class to help perform string interpolation.
This class is an abstract base class; its descendants perform
the actual work.
"""
# compiled regexp to use in self.interpolate()
_KEYCRE = re.compile(r"%\(([^)]*)\)s")
_cookie = '%'
def __init__(self, section):
# the Section instance that "owns" this engine
self.section = section
def interpolate(self, key, value):
# short-cut
if not self._cookie in value:
return value
def recursive_interpolate(key, value, section, backtrail):
"""The function that does the actual work.
``value``: the string we're trying to interpolate.
``section``: the section in which that string was found
``backtrail``: a dict to keep track of where we've been,
to detect and prevent infinite recursion loops
This is similar to a depth-first-search algorithm.
"""
# Have we been here already?
if (key, section.name) in backtrail:
# Yes - infinite loop detected
raise InterpolationLoopError(key)
# Place a marker on our backtrail so we won't come back here again
backtrail[(key, section.name)] = 1
# Now start the actual work
match = self._KEYCRE.search(value)
while match:
# The actual parsing of the match is implementation-dependent,
# so delegate to our helper function
k, v, s = self._parse_match(match)
if k is None:
# That's the signal that no further interpolation is needed
replacement = v
else:
# Further interpolation may be needed to obtain final value
replacement = recursive_interpolate(k, v, s, backtrail)
# Replace the matched string with its final value
start, end = match.span()
value = ''.join((value[:start], replacement, value[end:]))
new_search_start = start + len(replacement)
# Pick up the next interpolation key, if any, for next time
# through the while loop
match = self._KEYCRE.search(value, new_search_start)
# Now safe to come back here again; remove marker from backtrail
del backtrail[(key, section.name)]
return value
# Back in interpolate(), all we have to do is kick off the recursive
# function with appropriate starting values
value = recursive_interpolate(key, value, self.section, {})
return value
def _fetch(self, key):
"""Helper function to fetch values from owning section.
Returns a 2-tuple: the value, and the section where it was found.
"""
# switch off interpolation before we try and fetch anything !
save_interp = self.section.main.interpolation
self.section.main.interpolation = False
# Start at section that "owns" this InterpolationEngine
current_section = self.section
while True:
# try the current section first
val = current_section.get(key)
if val is not None and not isinstance(val, Section):
break
# try "DEFAULT" next
val = current_section.get('DEFAULT', {}).get(key)
if val is not None and not isinstance(val, Section):
break
# move up to parent and try again
# top-level's parent is itself
if current_section.parent is current_section:
# reached top level, time to give up
break
current_section = current_section.parent
# restore interpolation to previous value before returning
self.section.main.interpolation = save_interp
if val is None:
raise MissingInterpolationOption(key)
return val, current_section
def _parse_match(self, match):
"""Implementation-dependent helper function.
Will be passed a match object corresponding to the interpolation
key we just found (e.g., "%(foo)s" or "$foo"). Should look up that
key in the appropriate config file section (using the ``_fetch()``
helper function) and return a 3-tuple: (key, value, section)
``key`` is the name of the key we're looking for
``value`` is the value found for that key
``section`` is a reference to the section where it was found
``key`` and ``section`` should be None if no further
interpolation should be performed on the resulting value
(e.g., if we interpolated "$$" and returned "$").
"""
raise NotImplementedError()
class ConfigParserInterpolation(InterpolationEngine):
"""Behaves like ConfigParser."""
_cookie = '%'
_KEYCRE = re.compile(r"%\(([^)]*)\)s")
def _parse_match(self, match):
key = match.group(1)
value, section = self._fetch(key)
return key, value, section
class TemplateInterpolation(InterpolationEngine):
"""Behaves like string.Template."""
_cookie = '$'
_delimiter = '$'
_KEYCRE = re.compile(r"""
\$(?:
(?P<escaped>\$) | # Two $ signs
(?P<named>[_a-z][_a-z0-9]*) | # $name format
{(?P<braced>[^}]*)} # ${name} format
)
""", re.IGNORECASE | re.VERBOSE)
def _parse_match(self, match):
# Valid name (in or out of braces): fetch value from section
key = match.group('named') or match.group('braced')
if key is not None:
value, section = self._fetch(key)
return key, value, section
# Escaped delimiter (e.g., $$): return single delimiter
if match.group('escaped') is not None:
# Return None for key and section to indicate it's time to stop
return None, self._delimiter, None
# Anything else: ignore completely, just return it unchanged
return None, match.group(), None
interpolation_engines = {
'configparser': ConfigParserInterpolation,
'template': TemplateInterpolation,
}
def __newobj__(cls, *args):
# Hack for pickle
return cls.__new__(cls, *args)
class Section(OrderedDict):
"""
A dictionary-like object that represents a section in a config file.
It does string interpolation if the 'interpolation' attribute
of the 'main' object is set to True.
Interpolation is tried first from this object, then from the 'DEFAULT'
section of this object, next from the parent and its 'DEFAULT' section,
and so on until the main object is reached.
A Section will behave like an ordered dictionary - following the
order of the ``scalars`` and ``sections`` attributes.
You can use this to change the order of members.
Iteration follows the order: scalars, then sections.
"""
def __setstate__(self, state):
OrderedDict.update(self, state[0])
self.__dict__.update(state[1])
def __reduce__(self):
state = (OrderedDict(self), self.__dict__)
return (__newobj__, (self.__class__,), state)
def __init__(self, parent, depth, main, indict=None, name=None):
"""
* parent is the section above
* depth is the depth level of this section
* main is the main ConfigObj
* indict is a dictionary to initialise the section with
"""
if indict is None:
indict = {}
OrderedDict.__init__(self)
# used for nesting level *and* interpolation
self.parent = parent
# used for the interpolation attribute
self.main = main
# level of nesting depth of this Section
self.depth = depth
# purely for information
self.name = name
#
self._initialise()
# we do this explicitly so that __setitem__ is used properly
# (rather than just passing to ``OrderedDict.__init__``)
for entry, value in indict.items():
self[entry] = value
def _initialise(self):
# the sequence of scalar values in this Section
self.scalars = []
# the sequence of sections in this Section
self.sections = []
# for comments :-)
self.comments = {}
self.inline_comments = {}
# the configspec
self.configspec = None
# for defaults
self.defaults = []
self.default_values = {}
self.extra_values = []
self._created = False
def _interpolate(self, key, value):
try:
# do we already have an interpolation engine?
engine = self._interpolation_engine
except AttributeError:
# not yet: first time running _interpolate(), so pick the engine
name = self.main.interpolation
if name == True: # note that "if name:" would be incorrect here
# backwards-compatibility: interpolation=True means use default
name = DEFAULT_INTERPOLATION
name = name.lower() # so that "Template", "template", etc. all work
class_ = interpolation_engines.get(name, None)
if class_ is None:
# invalid value for self.main.interpolation
self.main.interpolation = False
return value
else:
# save reference to engine so we don't have to do this again
engine = self._interpolation_engine = class_(self)
# let the engine do the actual work
return engine.interpolate(key, value)
def __getitem__(self, key):
"""Fetch the item and do string interpolation."""
val = OrderedDict.__getitem__(self, key)
if self.main.interpolation:
if isinstance(val, six.string_types):
return self._interpolate(key, val)
if isinstance(val, list):
def _check(entry):
if isinstance(entry, six.string_types):
return self._interpolate(key, entry)
return entry
new = [_check(entry) for entry in val]
if new != val:
return new
return val
def __setitem__(self, key, value, unrepr=False):
"""
Correctly set a value.
Making dictionary values Section instances.
(We have to special case 'Section' instances - which are also dicts)
Keys must be strings.
Values need only be strings (or lists of strings) if
``main.stringify`` is set.
``unrepr`` must be set when setting a value to a dictionary, without
creating a new sub-section.
"""
if not isinstance(key, six.string_types):
raise ValueError('The key "%s" is not a string.' % key)
# add the comment
if key not in self.comments:
self.comments[key] = []
self.inline_comments[key] = ''
# remove the entry from defaults
if key in self.defaults:
self.defaults.remove(key)
#
if isinstance(value, Section):
if key not in self:
self.sections.append(key)
OrderedDict.__setitem__(self, key, value)
elif isinstance(value, dict) and not unrepr:
# First create the new depth level,
# then create the section
if key not in self:
self.sections.append(key)
new_depth = self.depth + 1
OrderedDict.__setitem__(
self,
key,
Section(
self,
new_depth,
self.main,
indict=value,
name=key))
else:
if key not in self:
self.scalars.append(key)
if not self.main.stringify:
if isinstance(value, six.string_types):
pass
elif isinstance(value, (list, tuple)):
for entry in value:
if not isinstance(entry, six.string_types):
raise TypeError('Value is not a string "%s".' % entry)
else:
raise TypeError('Value is not a string "%s".' % value)
OrderedDict.__setitem__(self, key, value)
def __delitem__(self, key):
"""Remove items from the sequence when deleting."""
OrderedDict. __delitem__(self, key)
if key in self.scalars:
self.scalars.remove(key)
else:
self.sections.remove(key)
del self.comments[key]
del self.inline_comments[key]
def get(self, key, default=None):
"""A version of ``get`` that doesn't bypass string interpolation."""
try:
return self[key]
except KeyError:
return default
def update(self, indict):
"""
A version of update that uses our ``__setitem__``.
"""
for entry in indict:
self[entry] = indict[entry]
def pop(self, key, default=MISSING):
"""
'D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised'
"""
try:
val = self[key]
except KeyError:
if default is MISSING:
raise
val = default
else:
del self[key]
return val
def popitem(self):
"""Pops the first (key,val)"""
sequence = (self.scalars + self.sections)
if not sequence:
raise KeyError(": 'popitem(): dictionary is empty'")
key = sequence[0]
val = self[key]
del self[key]
return key, val
def clear(self):
"""
A version of clear that also affects scalars/sections
Also clears comments and configspec.
Leaves other attributes alone :
depth/main/parent are not affected
"""
OrderedDict.clear(self)
self.scalars = []
self.sections = []
self.comments = {}
self.inline_comments = {}
self.configspec = None
self.defaults = []
self.extra_values = []
def setdefault(self, key, default=None):
"""A version of setdefault that sets sequence if appropriate."""
try:
return self[key]
except KeyError:
self[key] = default
return self[key]
def items(self):
"""D.items() -> list of D's (key, value) pairs, as 2-tuples"""
return list(zip((self.scalars + self.sections), list(self.values())))
def keys(self):
"""D.keys() -> list of D's keys"""
return (self.scalars + self.sections)
def values(self):
"""D.values() -> list of D's values"""
return [self[key] for key in (self.scalars + self.sections)]
def iteritems(self):
"""D.iteritems() -> an iterator over the (key, value) items of D"""
return iter(list(self.items()))
def iterkeys(self):
"""D.iterkeys() -> an iterator over the keys of D"""
return iter((self.scalars + self.sections))
__iter__ = iterkeys
def itervalues(self):
"""D.itervalues() -> an iterator over the values of D"""
return iter(list(self.values()))
def __repr__(self):
"""x.__repr__() <==> repr(x)"""
def _getval(key):
try:
return self[key]
except MissingInterpolationOption:
return OrderedDict.__getitem__(self, key)
return '{%s}' % ', '.join([('%s: %s' % (repr(key), repr(_getval(key))))
for key in (self.scalars + self.sections)])
__str__ = __repr__
__str__.__doc__ = "x.__str__() <==> str(x)"
# Extra methods - not in a normal dictionary
def dict(self):
"""
Return a deepcopy of self as a dictionary.
All members that are ``Section`` instances are recursively turned to
ordinary dictionaries - by calling their ``dict`` method.
>>> n = a.dict()
>>> n == a
1
>>> n is a
0
"""
newdict = {}
for entry in self:
this_entry = self[entry]
if isinstance(this_entry, Section):
this_entry = this_entry.dict()
elif isinstance(this_entry, list):
# create a copy rather than a reference
this_entry = list(this_entry)
elif isinstance(this_entry, tuple):
# create a copy rather than a reference
this_entry = tuple(this_entry)
newdict[entry] = this_entry
return newdict
def merge(self, indict):
"""
A recursive update - useful for merging config files.
>>> a = '''[section1]
... option1 = True
... [[subsection]]
... more_options = False
... # end of file'''.splitlines()
>>> b = '''# File is user.ini
... [section1]
... option1 = False
... # end of file'''.splitlines()
>>> c1 = ConfigObj(b)
>>> c2 = ConfigObj(a)
>>> c2.merge(c1)
>>> c2
ConfigObj({'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}})
"""
for key, val in list(indict.items()):
if (key in self and isinstance(self[key], dict) and
isinstance(val, dict)):
self[key].merge(val)
else:
self[key] = val
def rename(self, oldkey, newkey):
"""
Change a keyname to another, without changing position in sequence.
Implemented so that transformations can be made on keys,
as well as on values. (used by encode and decode)
Also renames comments.
"""
if oldkey in self.scalars:
the_list = self.scalars
elif oldkey in self.sections:
the_list = self.sections
else:
raise KeyError('Key "%s" not found.' % oldkey)
pos = the_list.index(oldkey)
#
val = self[oldkey]
OrderedDict.__delitem__(self, oldkey)
OrderedDict.__setitem__(self, newkey, val)
the_list.remove(oldkey)
the_list.insert(pos, newkey)
comm = self.comments[oldkey]
inline_comment = self.inline_comments[oldkey]
del self.comments[oldkey]
del self.inline_comments[oldkey]
self.comments[newkey] = comm
self.inline_comments[newkey] = inline_comment
def walk(self, function, raise_errors=True,
call_on_sections=False, **keywargs):
"""
Walk every member and call a function on the keyword and value.
Return a dictionary of the return values
If the function raises an exception, raise the errror
unless ``raise_errors=False``, in which case set the return value to
``False``.
Any unrecognised keyword arguments you pass to walk, will be pased on
to the function you pass in.
Note: if ``call_on_sections`` is ``True`` then - on encountering a
subsection, *first* the function is called for the *whole* subsection,
and then recurses into it's members. This means your function must be
able to handle strings, dictionaries and lists. This allows you
to change the key of subsections as well as for ordinary members. The
return value when called on the whole subsection has to be discarded.
See the encode and decode methods for examples, including functions.
.. admonition:: caution
You can use ``walk`` to transform the names of members of a section
but you mustn't add or delete members.
>>> config = '''[XXXXsection]
... XXXXkey = XXXXvalue'''.splitlines()
>>> cfg = ConfigObj(config)
>>> cfg
ConfigObj({'XXXXsection': {'XXXXkey': 'XXXXvalue'}})
>>> def transform(section, key):
... val = section[key]
... newkey = key.replace('XXXX', 'CLIENT1')
... section.rename(key, newkey)
... if isinstance(val, (tuple, list, dict)):
... pass
... else:
... val = val.replace('XXXX', 'CLIENT1')
... section[newkey] = val
>>> cfg.walk(transform, call_on_sections=True)
{'CLIENT1section': {'CLIENT1key': None}}
>>> cfg
ConfigObj({'CLIENT1section': {'CLIENT1key': 'CLIENT1value'}})
"""
out = {}
# scalars first
for i in range(len(self.scalars)):
entry = self.scalars[i]
try:
val = function(self, entry, **keywargs)
# bound again in case name has changed
entry = self.scalars[i]
out[entry] = val
except Exception:
if raise_errors:
raise
else:
entry = self.scalars[i]
out[entry] = False
# then sections
for i in range(len(self.sections)):
entry = self.sections[i]
if call_on_sections:
try:
function(self, entry, **keywargs)
except Exception:
if raise_errors:
raise
else:
entry = self.sections[i]
out[entry] = False
# bound again in case name has changed
entry = self.sections[i]
# previous result is discarded
out[entry] = self[entry].walk(
function,
raise_errors=raise_errors,
call_on_sections=call_on_sections,
**keywargs)
return out
def as_bool(self, key):
"""
Accepts a key as input. The corresponding value must be a string or
the objects (``True`` or 1) or (``False`` or 0). We allow 0 and 1 to
retain compatibility with Python 2.2.
If the string is one of ``True``, ``On``, ``Yes``, or ``1`` it returns
``True``.
If the string is one of ``False``, ``Off``, ``No``, or ``0`` it returns
``False``.
``as_bool`` is not case sensitive.
Any other input will raise a ``ValueError``.
>>> a = ConfigObj()
>>> a['a'] = 'fish'
>>> a.as_bool('a')
Traceback (most recent call last):
ValueError: Value "fish" is neither True nor False
>>> a['b'] = 'True'
>>> a.as_bool('b')
1
>>> a['b'] = 'off'
>>> a.as_bool('b')
0
"""
val = self[key]
if val == True:
return True
elif val == False:
return False
else:
try:
if not isinstance(val, six.string_types):
# TODO: Why do we raise a KeyError here?
raise KeyError()
else:
return self.main._bools[val.lower()]
except KeyError:
raise ValueError('Value "%s" is neither True nor False' % val)
def as_int(self, key):
"""
A convenience method which coerces the specified value to an integer.
If the value is an invalid literal for ``int``, a ``ValueError`` will
be raised.
>>> a = ConfigObj()
>>> a['a'] = 'fish'
>>> a.as_int('a')
Traceback (most recent call last):
ValueError: invalid literal for int() with base 10: 'fish'
>>> a['b'] = '1'
>>> a.as_int('b')
1
>>> a['b'] = '3.2'
>>> a.as_int('b')
Traceback (most recent call last):
ValueError: invalid literal for int() with base 10: '3.2'
"""
return int(self[key])
def as_float(self, key):
"""
A convenience method which coerces the specified value to a float.
If the value is an invalid literal for ``float``, a ``ValueError`` will
be raised.
>>> a = ConfigObj()
>>> a['a'] = 'fish'
>>> a.as_float('a') #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
ValueError: invalid literal for float(): fish
>>> a['b'] = '1'
>>> a.as_float('b')
1.0
>>> a['b'] = '3.2'
>>> a.as_float('b') #doctest: +ELLIPSIS
3.2...
"""
return float(self[key])
def as_list(self, key):
"""
A convenience method which fetches the specified value, guaranteeing
that it is a list.
>>> a = ConfigObj()
>>> a['a'] = 1
>>> a.as_list('a')
[1]
>>> a['a'] = (1,)
>>> a.as_list('a')
[1]
>>> a['a'] = [1]
>>> a.as_list('a')
[1]
"""
result = self[key]
if isinstance(result, (tuple, list)):
return list(result)
return [result]
def restore_default(self, key):
"""
Restore (and return) default value for the specified key.
This method will only work for a ConfigObj that was created
with a configspec and has been validated.
If there is no default value for this key, ``KeyError`` is raised.
"""
default = self.default_values[key]
OrderedDict.__setitem__(self, key, default)
if key not in self.defaults:
self.defaults.append(key)
return default
def restore_defaults(self):
"""
Recursively restore default values to all members
that have them.
This method will only work for a ConfigObj that was created
with a configspec and has been validated.
It doesn't delete or modify entries without default values.
"""
for key in self.default_values:
self.restore_default(key)
for section in self.sections:
self[section].restore_defaults()
class ConfigObj(Section):
"""An object to read, create, and write config files."""
_keyword = re.compile(r'''^ # line start
(\s*) # indentation
( # keyword
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'"=].*?) # no quotes
)
\s*=\s* # divider
(.*) # value (including list values and comments)
$ # line end
''',
re.VERBOSE)
_sectionmarker = re.compile(r'''^
(\s*) # 1: indentation
((?:\[\s*)+) # 2: section marker open
( # 3: section name open
(?:"\s*\S.*?\s*")| # at least one non-space with double quotes
(?:'\s*\S.*?\s*')| # at least one non-space with single quotes
(?:[^'"\s].*?) # at least one non-space unquoted
) # section name close
((?:\s*\])+) # 4: section marker close
\s*(\#.*)? # 5: optional comment
$''',
re.VERBOSE)
# this regexp pulls list values out as a single string
# or single values and comments
# FIXME: this regex adds a '' to the end of comma terminated lists
# workaround in ``_handle_value``
_valueexp = re.compile(r'''^
(?:
(?:
(
(?:
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\#][^,\#]*?) # unquoted
)
\s*,\s* # comma
)* # match all list items ending in a comma (if any)
)
(
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\#\s][^,]*?)| # unquoted
(?:(?<!,)) # Empty value
)? # last item in a list - or string value
)|
(,) # alternatively a single comma - empty list
)
\s*(\#.*)? # optional comment
$''',
re.VERBOSE)
# use findall to get the members of a list value
_listvalueexp = re.compile(r'''
(
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\#]?.*?) # unquoted
)
\s*,\s* # comma
''',
re.VERBOSE)
# this regexp is used for the value
# when lists are switched off
_nolistvalue = re.compile(r'''^
(
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'"\#].*?)| # unquoted
(?:) # Empty value
)
\s*(\#.*)? # optional comment
$''',
re.VERBOSE)
# regexes for finding triple quoted values on one line
_single_line_single = re.compile(r"^'''(.*?)'''\s*(#.*)?$")
_single_line_double = re.compile(r'^"""(.*?)"""\s*(#.*)?$')
_multi_line_single = re.compile(r"^(.*?)'''\s*(#.*)?$")
_multi_line_double = re.compile(r'^(.*?)"""\s*(#.*)?$')
_triple_quote = {
"'''": (_single_line_single, _multi_line_single),
'"""': (_single_line_double, _multi_line_double),
}
# Used by the ``istrue`` Section method
_bools = {
'yes': True, 'no': False,
'on': True, 'off': False,
'1': True, '0': False,
'true': True, 'false': False,
}
def __init__(self, infile=None, options=None, configspec=None, encoding=None,
interpolation=True, raise_errors=False, list_values=True,
create_empty=False, file_error=False, stringify=True,
indent_type=None, default_encoding=None, unrepr=False,
write_empty_values=False, _inspec=False, use_zato=True, zato_crypto_manager=None,
zato_secrets_conf=None, zato_secrets_url_prefix='zato+secret://'):
"""
Parse a config file or create a config file object.
``ConfigObj(infile=None, configspec=None, encoding=None,
interpolation=True, raise_errors=False, list_values=True,
create_empty=False, file_error=False, stringify=True,
indent_type=None, default_encoding=None, unrepr=False,
write_empty_values=False, _inspec=False)``
"""
# Zato
from zato.common.util.config import get_env_config_ctx, get_env_config_value
# Extract the details about this file
zato_env_config_ctx = get_env_config_ctx(infile)
# Save it for later use
self.zato_env_config_ctx = get_env_config_ctx(infile)
self.zato_component = zato_env_config_ctx.component
self.zato_config_file_name = zato_env_config_ctx.file_name
self.zato_env_variable_missing_suffix = zato_env_config_ctx.missing_suffix
self.zato_get_env_config_value = get_env_config_value
self._inspec = _inspec
self.use_zato = use_zato
self.zato_crypto_manager = zato_crypto_manager
self.zato_secrets_conf = zato_secrets_conf
self.zato_secrets_url_prefix = zato_secrets_url_prefix
# init the superclass
Section.__init__(self, self, 0, self)
self.zato_file_name = infile
infile = infile or []
_options = {'configspec': configspec,
'encoding': encoding, 'interpolation': interpolation,
'raise_errors': raise_errors, 'list_values': list_values,
'create_empty': create_empty, 'file_error': file_error,
'stringify': stringify, 'indent_type': indent_type,
'default_encoding': default_encoding, 'unrepr': unrepr,
'write_empty_values': write_empty_values}
if options is None:
options = _options
else:
import warnings
warnings.warn('Passing in an options dictionary to ConfigObj() is '
'deprecated. Use **options instead.',
DeprecationWarning, stacklevel=2)
# TODO: check the values too.
for entry in options:
if entry not in OPTION_DEFAULTS:
raise TypeError('Unrecognised option "%s".' % entry)
for entry, value in list(OPTION_DEFAULTS.items()):
if entry not in options:
options[entry] = value
keyword_value = _options[entry]
if value != keyword_value:
options[entry] = keyword_value
# XXXX this ignores an explicit list_values = True in combination
# with _inspec. The user should *never* do that anyway, but still...
if _inspec:
options['list_values'] = False
self._initialise(options)
configspec = options['configspec']
self._original_configspec = configspec
self._load(infile, configspec)
def _load(self, infile, configspec):
if isinstance(infile, six.string_types):
self.filename = infile
if os.path.isfile(infile):
with open(infile, 'rb') as h:
content = h.readlines() or []
elif self.file_error:
# raise an error if the file doesn't exist
raise IOError('Config file not found: "%s".' % self.filename)
else:
# file doesn't already exist
if self.create_empty:
# this is a good test that the filename specified
# isn't impossible - like on a non-existent device
with open(infile, 'w') as h:
h.write('')
content = []
elif isinstance(infile, (list, tuple)):
content = list(infile)
elif isinstance(infile, dict):
# initialise self
# the Section class handles creating subsections
if isinstance(infile, ConfigObj):
# get a copy of our ConfigObj
def set_section(in_section, this_section):
for entry in in_section.scalars:
this_section[entry] = in_section[entry]
for section in in_section.sections:
this_section[section] = {}
set_section(in_section[section], this_section[section])
set_section(infile, self)
else:
for entry in infile:
self[entry] = infile[entry]
del self._errors
if configspec is not None:
self._handle_configspec(configspec)
else:
self.configspec = None
return
elif getattr(infile, 'read', MISSING) is not MISSING:
# This supports file like objects
content = infile.read() or []
# needs splitting into lines - but needs doing *after* decoding
# in case it's not an 8 bit encoding
else:
raise TypeError('infile must be a filename, file like object, or list of lines.')
if content:
# don't do it for the empty ConfigObj
content = self._handle_bom(content)
# infile is now *always* a list
#
# Set the newlines attribute (first line ending it finds)
# and strip trailing '\n' or '\r' from lines
for line in content:
if (not line) or (line[-1] not in ('\r', '\n')):
continue
for end in ('\r\n', '\n', '\r'):
if line.endswith(end):
self.newlines = end
break
break
assert all(isinstance(line, six.string_types) for line in content), repr(content)
content = [line.rstrip('\r\n') for line in content]
self._parse(content)
# if we had any errors, now is the time to raise them
if self._errors:
info = "at line %s." % self._errors[0].line_number
if len(self._errors) > 1:
msg = "Parsing failed with several errors.\nFirst error %s" % info
error = ConfigObjError(msg)
else:
error = self._errors[0]
# set the errors attribute; it's a list of tuples:
# (error_type, message, line_number)
error.errors = self._errors
# set the config attribute
error.config = self
raise error
# delete private attributes
del self._errors
if configspec is None:
self.configspec = None
else:
self._handle_configspec(configspec)
def _initialise(self, options=None):
if options is None:
options = OPTION_DEFAULTS
# initialise a few variables
self.filename = None
self._errors = []
self.raise_errors = options['raise_errors']
self.interpolation = options['interpolation']
self.list_values = options['list_values']
self.create_empty = options['create_empty']
self.file_error = options['file_error']
self.stringify = options['stringify']
self.indent_type = options['indent_type']
self.encoding = options['encoding']
self.default_encoding = options['default_encoding']
self.BOM = False
self.newlines = None
self.write_empty_values = options['write_empty_values']
self.unrepr = options['unrepr']
self.initial_comment = []
self.final_comment = []
self.configspec = None
if self._inspec:
self.list_values = False
# Clear section attributes as well
Section._initialise(self)
def __repr__(self):
def _getval(key):
try:
return self[key]
except MissingInterpolationOption:
return OrderedDict.__getitem__(self, key)
return ('ConfigObj({%s})' %
', '.join([('%s: %s' % (repr(key), repr(_getval(key))))
for key in (self.scalars + self.sections)]))
def _handle_bom(self, infile):
"""
Handle any BOM, and decode if necessary.
If an encoding is specified, that *must* be used - but the BOM should
still be removed (and the BOM attribute set).
(If the encoding is wrongly specified, then a BOM for an alternative
encoding won't be discovered or removed.)
If an encoding is not specified, UTF8 or UTF16 BOM will be detected and
removed. The BOM attribute will be set. UTF16 will be decoded to
unicode.
NOTE: This method must not be called with an empty ``infile``.
Specifying the *wrong* encoding is likely to cause a
``UnicodeDecodeError``.
``infile`` must always be returned as a list of lines, but may be
passed in as a single string.
"""
if ((self.encoding is not None) and
(self.encoding.lower() not in BOM_LIST)):
# No need to check for a BOM
# the encoding specified doesn't have one
# just decode
return self._decode(infile, self.encoding)
if isinstance(infile, (list, tuple)):
line = infile[0]
else:
line = infile
if isinstance(line, six.text_type):
# it's already decoded and there's no need to do anything
# else, just use the _decode utility method to handle
# listifying appropriately
return self._decode(infile, self.encoding)
if self.encoding is not None:
# encoding explicitly supplied
# And it could have an associated BOM
# TODO: if encoding is just UTF16 - we ought to check for both
# TODO: big endian and little endian versions.
enc = BOM_LIST[self.encoding.lower()]
if enc == 'utf_16':
# For UTF16 we try big endian and little endian
for BOM, (encoding, final_encoding) in list(BOMS.items()):
if not final_encoding:
# skip UTF8
continue
if infile.startswith(BOM):
### BOM discovered
##self.BOM = True
# Don't need to remove BOM
return self._decode(infile, encoding)
# If we get this far, will *probably* raise a DecodeError
# As it doesn't appear to start with a BOM
return self._decode(infile, self.encoding)
# Must be UTF8
BOM = BOM_SET[enc]
if not line.startswith(BOM):
return self._decode(infile, self.encoding)
newline = line[len(BOM):]
# BOM removed
if isinstance(infile, (list, tuple)):
infile[0] = newline
else:
infile = newline
self.BOM = True
return self._decode(infile, self.encoding)
# No encoding specified - so we need to check for UTF8/UTF16
for BOM, (encoding, final_encoding) in list(BOMS.items()):
if not isinstance(line, six.binary_type) or not line.startswith(BOM):
# didn't specify a BOM, or it's not a bytestring
continue
else:
# BOM discovered
self.encoding = final_encoding
if not final_encoding:
self.BOM = True
# UTF8
# remove BOM
newline = line[len(BOM):]
if isinstance(infile, (list, tuple)):
infile[0] = newline
else:
infile = newline
# UTF-8
if isinstance(infile, six.text_type):
return infile.splitlines(True)
elif isinstance(infile, six.binary_type):
return infile.decode('utf-8').splitlines(True)
else:
return self._decode(infile, 'utf-8')
# UTF16 - have to decode
return self._decode(infile, encoding)
if six.PY2 and isinstance(line, str):
# don't actually do any decoding, since we're on python 2 and
# returning a bytestring is fine
return self._decode(infile, None)
# No BOM discovered and no encoding specified, default to UTF-8
if isinstance(infile, six.binary_type):
return infile.decode('utf-8').splitlines(True)
else:
return self._decode(infile, 'utf-8')
def _a_to_u(self, aString):
"""Decode ASCII strings to unicode if a self.encoding is specified."""
if isinstance(aString, six.binary_type) and self.encoding:
return aString.decode(self.encoding)
else:
return aString
def _decode(self, infile, encoding):
"""
Decode infile to unicode. Using the specified encoding.
if is a string, it also needs converting to a list.
"""
if isinstance(infile, six.string_types):
return infile.splitlines(True)
if isinstance(infile, six.binary_type):
# NOTE: Could raise a ``UnicodeDecodeError``
if encoding:
return infile.decode(encoding).splitlines(True)
else:
return infile.splitlines(True)
if encoding:
for i, line in enumerate(infile):
if isinstance(line, six.binary_type):
# NOTE: The isinstance test here handles mixed lists of unicode/string
# NOTE: But the decode will break on any non-string values
# NOTE: Or could raise a ``UnicodeDecodeError``
infile[i] = line.decode(encoding)
return infile
def _decode_element(self, line):
"""Decode element to unicode if necessary."""
if isinstance(line, six.binary_type) and self.default_encoding:
return line.decode(self.default_encoding)
else:
return line
# TODO: this may need to be modified
def _str(self, value):
"""
Used by ``stringify`` within validate, to turn non-string values
into strings.
"""
if not isinstance(value, six.string_types):
# intentially 'str' because it's just whatever the "normal"
# string type is for the python version we're dealing with
return str(value)
else:
return value
def _parse(self, infile):
"""Actually parse the config file."""
temp_list_values = self.list_values
if self.unrepr:
self.list_values = False
comment_list = []
done_start = False
this_section = self
maxline = len(infile) - 1
cur_index = -1
reset_comment = False
while cur_index < maxline:
if reset_comment:
comment_list = []
cur_index += 1
line = infile[cur_index]
sline = line.strip()
# do we have anything on the line ?
if not sline or sline.startswith('#'):
reset_comment = False
comment_list.append(line)
continue
if not done_start:
# preserve initial comment
self.initial_comment = comment_list
comment_list = []
done_start = True
reset_comment = True
# first we check if it's a section marker
mat = self._sectionmarker.match(line)
if mat is not None:
# is a section line
(indent, sect_open, sect_name, sect_close, comment) = mat.groups()
if indent and (self.indent_type is None):
self.indent_type = indent
cur_depth = sect_open.count('[')
if cur_depth != sect_close.count(']'):
self._handle_error("Cannot compute the section depth",
NestingError, infile, cur_index)
continue
if cur_depth < this_section.depth:
# the new section is dropping back to a previous level
try:
parent = self._match_depth(this_section,
cur_depth).parent
except SyntaxError:
self._handle_error("Cannot compute nesting level",
NestingError, infile, cur_index)
continue
elif cur_depth == this_section.depth:
# the new section is a sibling of the current section
parent = this_section.parent
elif cur_depth == this_section.depth + 1:
# the new section is a child the current section
parent = this_section
else:
self._handle_error("Section too nested",
NestingError, infile, cur_index)
continue
sect_name = self._unquote(sect_name)
if sect_name in parent:
self._handle_error('Duplicate section name',
DuplicateError, infile, cur_index)
continue
# create the new section
this_section = Section(
parent,
cur_depth,
self,
name=sect_name)
parent[sect_name] = this_section
parent.inline_comments[sect_name] = comment
parent.comments[sect_name] = comment_list
continue
#
# it's not a section marker,
# so it should be a valid ``key = value`` line
mat = self._keyword.match(line)
if mat is None:
self._handle_error(
'Invalid line ({0!r}) (matched as neither section nor keyword)'.format(line),
ParseError, infile, cur_index)
else:
# is a keyword value
# value will include any inline comment
(indent, key, value) = mat.groups()
_env_value = self.zato_get_env_config_value(self.zato_component, self.zato_config_file_name, sect_name, key)
if not _env_value.endswith(self.zato_env_variable_missing_suffix):
value = _env_value
# Handle Zato-specific needs
if self.use_zato:
# This may be an environment variable ..
if value.startswith('$'):
# .. certain keys should be ignored because they will be processed ..
# .. by other layers, e.g. pickup configuration ..
to_ignore = {'pickup_from'}
if not key in to_ignore:
# .. do not process it if it just a $ sign or an actual variable starting with it.
if not (len(value) == 1 or value.startswith('$$')):
env_key_name = value[1:]
try:
value = os.environ[env_key_name]
except KeyError:
logger.warning('Environment variable `%s` not found, config key `%s` (%s)',
env_key_name, key, self.zato_file_name)
# .. this may be a value to decrypt with a secret key (note that it is an if, not elif,
# to make it possible for environment variables to point to secrets.conf).
if value.startswith(self.zato_secrets_url_prefix):
entry = value.replace(self.zato_secrets_url_prefix, '', 1)
if not entry:
raise ValueError('Missing entry in address `{}`, config key `{}`'.format(value, key))
entry = entry.split('.')
group_name = entry[0]
group_key = '.'.join(entry[1:])
if self.zato_secrets_conf:
group = self.zato_secrets_conf.get(group_name)
if not group:
raise ValueError('Group not found `{}`, config key `{}`, value `{}`'.format(
group_name, key, value))
if not group_key in group:
raise ValueError('Group key not found `{}`, config key `{}`, value `{}`'.format(
group_key, key, value))
else:
encrypted = group[group_key]
if encrypted:
try:
value = self.zato_crypto_manager.decrypt(encrypted)
except Exception as e:
raise ValueError('Could not decrypt value `{}`, group:`{}`, group_key:`{}`, e:`{}`'.format(
encrypted, group, group_key, e))
else:
value = encrypted # This will happen if 'encrypted' is actually an empty string
if indent and (self.indent_type is None):
self.indent_type = indent
# check for a multiline value
if value[:3] in ['"""', "'''"]:
try:
value, comment, cur_index = self._multiline(
value, infile, cur_index, maxline)
except SyntaxError:
self._handle_error(
'Parse error in multiline value',
ParseError, infile, cur_index)
continue
else:
if self.unrepr:
comment = ''
try:
value = unrepr(value)
except Exception as e:
if type(e) == UnknownType:
msg = 'Unknown name or type in value'
else:
msg = 'Parse error from unrepr-ing multiline value'
self._handle_error(msg, UnreprError, infile,
cur_index)
continue
else:
if self.unrepr:
comment = ''
try:
value = unrepr(value)
except Exception as e:
if isinstance(e, UnknownType):
msg = 'Unknown name or type in value'
else:
msg = 'Parse error from unrepr-ing value'
self._handle_error(msg, UnreprError, infile,
cur_index)
continue
else:
# extract comment and lists
try:
(value, comment) = self._handle_value(value)
except SyntaxError:
self._handle_error(
'Parse error in value',
ParseError, infile, cur_index)
continue
#
key = self._unquote(key)
if key in this_section:
self._handle_error(
'Duplicate keyword name',
DuplicateError, infile, cur_index)
continue
# add the key.
# we set unrepr because if we have got this far we will never
# be creating a new section
# As a last resort, we can attempt to convert strings to their actual
# data types, e.g. integers.
if self.use_zato:
try:
value = literal_eval(value)
except Exception:
# That's OK, we just had to try
pass
this_section.__setitem__(key, value, unrepr=True)
this_section.inline_comments[key] = comment
this_section.comments[key] = comment_list
continue
#
if self.indent_type is None:
# no indentation used, set the type accordingly
self.indent_type = ''
# preserve the final comment
if not self and not self.initial_comment:
self.initial_comment = comment_list
elif not reset_comment:
self.final_comment = comment_list
self.list_values = temp_list_values
def _match_depth(self, sect, depth):
"""
Given a section and a depth level, walk back through the sections
parents to see if the depth level matches a previous section.
Return a reference to the right section,
or raise a SyntaxError.
"""
while depth < sect.depth:
if sect is sect.parent:
# we've reached the top level already
raise SyntaxError()
sect = sect.parent
if sect.depth == depth:
return sect
# shouldn't get here
raise SyntaxError()
def _handle_error(self, text, ErrorClass, infile, cur_index):
"""
Handle an error according to the error settings.
Either raise the error or store it.
The error will have occured at ``cur_index``
"""
line = infile[cur_index]
cur_index += 1
message = '{0} at line {1}.'.format(text, cur_index)
error = ErrorClass(message, cur_index, line)
if self.raise_errors:
# raise the error - parsing stops here
raise error
# store the error
# reraise when parsing has finished
self._errors.append(error)
def _unquote(self, value):
"""Return an unquoted version of a value"""
if not value:
# should only happen during parsing of lists
raise SyntaxError
if (value[0] == value[-1]) and (value[0] in ('"', "'")):
value = value[1:-1]
return value
def _quote(self, value, multiline=True):
"""
Return a safely quoted version of a value.
Raise a ConfigObjError if the value cannot be safely quoted.
If multiline is ``True`` (default) then use triple quotes
if necessary.
* Don't quote values that don't need it.
* Recursively quote members of a list and return a comma joined list.
* Multiline is ``False`` for lists.
* Obey list syntax for empty and single member lists.
If ``list_values=False`` then the value is only quoted if it contains
a ``\\n`` (is multiline) or '#'.
If ``write_empty_values`` is set, and the value is an empty string, it
won't be quoted.
"""
if multiline and self.write_empty_values and value == '':
# Only if multiline is set, so that it is used for values not
# keys, and not values that are part of a list
return ''
if multiline and isinstance(value, (list, tuple)):
if not value:
return ','
elif len(value) == 1:
return self._quote(value[0], multiline=False) + ','
return ', '.join([self._quote(val, multiline=False)
for val in value])
if not isinstance(value, six.string_types):
if self.stringify:
# intentially 'str' because it's just whatever the "normal"
# string type is for the python version we're dealing with
value = str(value)
else:
raise TypeError('Value "%s" is not a string.' % value)
if not value:
return '""'
no_lists_no_quotes = not self.list_values and '\n' not in value and '#' not in value
need_triple = multiline and ((("'" in value) and ('"' in value)) or ('\n' in value ))
hash_triple_quote = multiline and not need_triple and ("'" in value) and ('"' in value) and ('#' in value)
check_for_single = (no_lists_no_quotes or not need_triple) and not hash_triple_quote
if check_for_single:
if not self.list_values:
# we don't quote if ``list_values=False``
quot = noquot
# for normal values either single or double quotes will do
elif '\n' in value:
# will only happen if multiline is off - e.g. '\n' in key
raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
elif ((value[0] not in wspace_plus) and
(value[-1] not in wspace_plus) and
(',' not in value)):
quot = noquot
else:
quot = self._get_single_quote(value)
else:
# if value has '\n' or "'" *and* '"', it will need triple quotes
quot = self._get_triple_quote(value)
if quot == noquot and '#' in value and self.list_values:
quot = self._get_single_quote(value)
return quot % value
def _get_single_quote(self, value):
if ("'" in value) and ('"' in value):
raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
elif '"' in value:
quot = squot
else:
quot = dquot
return quot
def _get_triple_quote(self, value):
if (value.find('"""') != -1) and (value.find("'''") != -1):
raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
if value.find('"""') == -1:
quot = tdquot
else:
quot = tsquot
return quot
def _handle_value(self, value):
"""
Given a value string, unquote, remove comment,
handle lists. (including empty and single member lists)
"""
if self._inspec:
# Parsing a configspec so don't handle comments
return (value, '')
# do we look for lists in values ?
if not self.list_values:
mat = self._nolistvalue.match(value)
if mat is None:
raise SyntaxError()
# NOTE: we don't unquote here
return mat.groups()
#
mat = self._valueexp.match(value)
if mat is None:
# the value is badly constructed, probably badly quoted,
# or an invalid list
raise SyntaxError()
(list_values, single, empty_list, comment) = mat.groups()
if (list_values == '') and (single is None):
# change this if you want to accept empty values
raise SyntaxError()
# NOTE: note there is no error handling from here if the regex
# is wrong: then incorrect values will slip through
if empty_list is not None:
# the single comma - meaning an empty list
return ([], comment)
if single is not None:
# handle empty values
if list_values and not single:
# FIXME: the '' is a workaround because our regex now matches
# '' at the end of a list if it has a trailing comma
single = None
else:
single = single or '""'
single = self._unquote(single)
if list_values == '':
# not a list value
return (single, comment)
the_list = self._listvalueexp.findall(list_values)
the_list = [self._unquote(val) for val in the_list]
if single is not None:
the_list += [single]
return (the_list, comment)
def _multiline(self, value, infile, cur_index, maxline):
"""Extract the value, where we are in a multiline situation."""
quot = value[:3]
newvalue = value[3:]
single_line = self._triple_quote[quot][0]
multi_line = self._triple_quote[quot][1]
mat = single_line.match(value)
if mat is not None:
retval = list(mat.groups())
retval.append(cur_index)
return retval
elif newvalue.find(quot) != -1:
# somehow the triple quote is missing
raise SyntaxError()
#
while cur_index < maxline:
cur_index += 1
newvalue += '\n'
line = infile[cur_index]
if line.find(quot) == -1:
newvalue += line
else:
# end of multiline, process it
break
else:
# we've got to the end of the config, oops...
raise SyntaxError()
mat = multi_line.match(line)
if mat is None:
# a badly formed line
raise SyntaxError()
(value, comment) = mat.groups()
return (newvalue + value, comment, cur_index)
def _handle_configspec(self, configspec):
"""Parse the configspec."""
# FIXME: Should we check that the configspec was created with the
# correct settings ? (i.e. ``list_values=False``)
if not isinstance(configspec, ConfigObj):
try:
configspec = ConfigObj(configspec,
raise_errors=True,
file_error=True,
_inspec=True)
except ConfigObjError as e:
# FIXME: Should these errors have a reference
# to the already parsed ConfigObj ?
raise ConfigspecError('Parsing configspec failed: %s' % e)
except IOError as e:
raise IOError('Reading configspec failed: %s' % e)
self.configspec = configspec
def _set_configspec(self, section, copy):
"""
Called by validate. Handles setting the configspec on subsections
including sections to be validated by __many__
"""
configspec = section.configspec
many = configspec.get('__many__')
if isinstance(many, dict):
for entry in section.sections:
if entry not in configspec:
section[entry].configspec = many
for entry in configspec.sections:
if entry == '__many__':
continue
if entry not in section:
section[entry] = {}
section[entry]._created = True
if copy:
# copy comments
section.comments[entry] = configspec.comments.get(entry, [])
section.inline_comments[entry] = configspec.inline_comments.get(entry, '')
# Could be a scalar when we expect a section
if isinstance(section[entry], Section):
section[entry].configspec = configspec[entry]
def _write_line(self, indent_string, entry, this_entry, comment):
"""Write an individual line, for the write method"""
# NOTE: the calls to self._quote here handles non-StringType values.
if not self.unrepr:
val = self._decode_element(self._quote(this_entry))
else:
val = repr(this_entry)
out = '%s%s%s%s %s' % (indent_string,
self._decode_element(self._quote(entry, multiline=False)),
self._a_to_u(' = '),
val,
self._decode_element(comment))
out = out.strip()
return out
def _write_marker(self, indent_string, depth, entry, comment):
"""Write a section marker line"""
return '%s%s%s%s%s' % (indent_string,
self._a_to_u('[' * depth),
self._quote(self._decode_element(entry), multiline=False),
self._a_to_u(']' * depth),
self._decode_element(comment))
def _handle_comment(self, comment):
"""Deal with a comment."""
if not comment:
return ''
start = self.indent_type
if not comment.startswith('#'):
start += self._a_to_u(' # ')
return (start + comment)
# Public methods
def write(self, outfile=None, section=None):
"""
Write the current ConfigObj as a file
tekNico: FIXME: use StringIO instead of real files
>>> filename = a.filename
>>> a.filename = 'test.ini'
>>> a.write()
>>> a.filename = filename
>>> a == ConfigObj('test.ini', raise_errors=True)
1
>>> import os
>>> os.remove('test.ini')
"""
if self.indent_type is None:
# this can be true if initialised from a dictionary
self.indent_type = DEFAULT_INDENT_TYPE
out = []
cs = self._a_to_u('#')
csp = self._a_to_u('# ')
if section is None:
int_val = self.interpolation
self.interpolation = False
section = self
for line in self.initial_comment:
line = self._decode_element(line)
stripped_line = line.strip()
if stripped_line and not stripped_line.startswith(cs):
line = csp + line
out.append(line)
indent_string = self.indent_type * section.depth
for entry in (section.scalars + section.sections):
if entry in section.defaults:
# don't write out default values
continue
for comment_line in section.comments[entry]:
comment_line = self._decode_element(comment_line.lstrip())
if comment_line and not comment_line.startswith(cs):
comment_line = csp + comment_line
out.append(indent_string + comment_line)
this_entry = section[entry]
comment = self._handle_comment(section.inline_comments[entry])
if isinstance(this_entry, Section):
# a section
out.append(self._write_marker(
indent_string,
this_entry.depth,
entry,
comment))
out.extend(self.write(section=this_entry))
else:
out.append(self._write_line(
indent_string,
entry,
this_entry,
comment))
if section is self:
for line in self.final_comment:
line = self._decode_element(line)
stripped_line = line.strip()
if stripped_line and not stripped_line.startswith(cs):
line = csp + line
out.append(line)
self.interpolation = int_val
if section is not self:
return out
if (self.filename is None) and (outfile is None):
# output a list of lines
# might need to encode
# NOTE: This will *screw* UTF16, each line will start with the BOM
if self.encoding:
out = [l.encode(self.encoding) for l in out]
if (self.BOM and ((self.encoding is None) or
(BOM_LIST.get(self.encoding.lower()) == 'utf_8'))):
# Add the UTF8 BOM
if not out:
out.append('')
out[0] = BOM_UTF8 + out[0]
return out
# Turn the list to a string, joined with correct newlines
newline = self.newlines or os.linesep
if (getattr(outfile, 'mode', None) is not None and outfile.mode == 'w'
and sys.platform == 'win32' and newline == '\r\n'):
# Windows specific hack to avoid writing '\r\r\n'
newline = '\n'
output = self._a_to_u(newline).join(out)
if not output.endswith(newline):
output += newline
if isinstance(output, six.binary_type):
output_bytes = output
else:
output_bytes = output.encode(self.encoding or
self.default_encoding or
'ascii')
if self.BOM and ((self.encoding is None) or match_utf8(self.encoding)):
# Add the UTF8 BOM
output_bytes = BOM_UTF8 + output_bytes
if outfile is not None:
outfile.write(output_bytes)
else:
with open(self.filename, 'wb') as h:
h.write(output_bytes)
def validate(self, validator, preserve_errors=False, copy=False,
section=None):
"""
Test the ConfigObj against a configspec.
It uses the ``validator`` object from *validate.py*.
To run ``validate`` on the current ConfigObj, call: ::
test = config.validate(validator)
(Normally having previously passed in the configspec when the ConfigObj
was created - you can dynamically assign a dictionary of checks to the
``configspec`` attribute of a section though).
It returns ``True`` if everything passes, or a dictionary of
pass/fails (True/False). If every member of a subsection passes, it
will just have the value ``True``. (It also returns ``False`` if all
members fail).
In addition, it converts the values from strings to their native
types if their checks pass (and ``stringify`` is set).
If ``preserve_errors`` is ``True`` (``False`` is default) then instead
of a marking a fail with a ``False``, it will preserve the actual
exception object. This can contain info about the reason for failure.
For example the ``VdtValueTooSmallError`` indicates that the value
supplied was too small. If a value (or section) is missing it will
still be marked as ``False``.
You must have the validate module to use ``preserve_errors=True``.
You can then use the ``flatten_errors`` function to turn your nested
results dictionary into a flattened list of failures - useful for
displaying meaningful error messages.
"""
if section is None:
if self.configspec is None:
raise ValueError('No configspec supplied.')
if preserve_errors:
# We do this once to remove a top level dependency on the validate module
# Which makes importing configobj faster
from zato.common.ext.validate_ import VdtTypeError
self._vdtMissingValue = VdtMissingValue
section = self
if copy:
section.initial_comment = section.configspec.initial_comment
section.final_comment = section.configspec.final_comment
section.encoding = section.configspec.encoding
section.BOM = section.configspec.BOM
section.newlines = section.configspec.newlines
section.indent_type = section.configspec.indent_type
#
# section.default_values.clear() #??
configspec = section.configspec
self._set_configspec(section, copy)
def validate_entry(entry, spec, val, missing, ret_true, ret_false):
section.default_values.pop(entry, None)
try:
section.default_values[entry] = validator.get_default_value(configspec[entry])
except (KeyError, AttributeError, validator.baseErrorClass):
# No default, bad default or validator has no 'get_default_value'
# (e.g. SimpleVal)
pass
try:
check = validator.check(spec,
val,
missing=missing
)
except validator.baseErrorClass as e:
if not preserve_errors or isinstance(e, self._vdtMissingValue):
out[entry] = False
else:
# preserve the error
out[entry] = e
ret_false = False
ret_true = False
else:
ret_false = False
out[entry] = True
if self.stringify or missing:
# if we are doing type conversion
# or the value is a supplied default
if not self.stringify:
if isinstance(check, (list, tuple)):
# preserve lists
check = [self._str(item) for item in check]
elif missing and check is None:
# convert the None from a default to a ''
check = ''
else:
check = self._str(check)
if (check != val) or missing:
section[entry] = check
if not copy and missing and entry not in section.defaults:
section.defaults.append(entry)
return ret_true, ret_false
#
out = {}
ret_true = True
ret_false = True
unvalidated = [k for k in section.scalars if k not in configspec]
incorrect_sections = [k for k in configspec.sections if k in section.scalars]
incorrect_scalars = [k for k in configspec.scalars if k in section.sections]
for entry in configspec.scalars:
if entry in ('__many__', '___many___'):
# reserved names
continue
if (not entry in section.scalars) or (entry in section.defaults):
# missing entries
# or entries from defaults
missing = True
val = None
if copy and entry not in section.scalars:
# copy comments
section.comments[entry] = (
configspec.comments.get(entry, []))
section.inline_comments[entry] = (
configspec.inline_comments.get(entry, ''))
#
else:
missing = False
val = section[entry]
ret_true, ret_false = validate_entry(entry, configspec[entry], val,
missing, ret_true, ret_false)
many = None
if '__many__' in configspec.scalars:
many = configspec['__many__']
elif '___many___' in configspec.scalars:
many = configspec['___many___']
if many is not None:
for entry in unvalidated:
val = section[entry]
ret_true, ret_false = validate_entry(entry, many, val, False,
ret_true, ret_false)
unvalidated = []
for entry in incorrect_scalars:
ret_true = False
if not preserve_errors:
out[entry] = False
else:
ret_false = False
msg = 'Value %r was provided as a section' % entry
out[entry] = validator.baseErrorClass(msg)
for entry in incorrect_sections:
ret_true = False
if not preserve_errors:
out[entry] = False
else:
ret_false = False
msg = 'Section %r was provided as a single value' % entry
out[entry] = validator.baseErrorClass(msg)
# Missing sections will have been created as empty ones when the
# configspec was read.
for entry in section.sections:
# FIXME: this means DEFAULT is not copied in copy mode
if section is self and entry == 'DEFAULT':
continue
if section[entry].configspec is None:
unvalidated.append(entry)
continue
if copy:
section.comments[entry] = configspec.comments.get(entry, [])
section.inline_comments[entry] = configspec.inline_comments.get(entry, '')
check = self.validate(validator, preserve_errors=preserve_errors, copy=copy, section=section[entry])
out[entry] = check
if check == False:
ret_true = False
elif check == True:
ret_false = False
else:
ret_true = False
section.extra_values = unvalidated
if preserve_errors and not section._created:
# If the section wasn't created (i.e. it wasn't missing)
# then we can't return False, we need to preserve errors
ret_false = False
#
if ret_false and preserve_errors and out:
# If we are preserving errors, but all
# the failures are from missing sections / values
# then we can return False. Otherwise there is a
# real failure that we need to preserve.
ret_false = not any(out.values())
if ret_true:
return True
elif ret_false:
return False
return out
def reset(self):
"""Clear ConfigObj instance and restore to 'freshly created' state."""
self.clear()
self._initialise()
# FIXME: Should be done by '_initialise', but ConfigObj constructor (and reload)
# requires an empty dictionary
self.configspec = None
# Just to be sure ;-)
self._original_configspec = None
def reload(self):
"""
Reload a ConfigObj from file.
This method raises a ``ReloadError`` if the ConfigObj doesn't have
a filename attribute pointing to a file.
"""
if not isinstance(self.filename, six.string_types):
raise ReloadError()
filename = self.filename
current_options = {}
for entry in OPTION_DEFAULTS:
if entry == 'configspec':
continue
current_options[entry] = getattr(self, entry)
configspec = self._original_configspec
current_options['configspec'] = configspec
self.clear()
self._initialise(current_options)
self._load(filename, configspec)
class SimpleVal:
"""
A simple validator.
Can be used to check that all members expected are present.
To use it, provide a configspec with all your members in (the value given
will be ignored). Pass an instance of ``SimpleVal`` to the ``validate``
method of your ``ConfigObj``. ``validate`` will return ``True`` if all
members are present, or a dictionary with True/False meaning
present/missing. (Whole missing sections will be replaced with ``False``)
"""
def __init__(self):
self.baseErrorClass = ConfigObjError
def check(self, check, member, missing=False):
"""A dummy check method, always returns the value unchanged."""
if missing:
raise self.baseErrorClass()
return member
def flatten_errors(cfg, res, levels=None, results=None):
"""
An example function that will turn a nested dictionary of results
(as returned by ``ConfigObj.validate``) into a flat list.
``cfg`` is the ConfigObj instance being checked, ``res`` is the results
dictionary returned by ``validate``.
(This is a recursive function, so you shouldn't use the ``levels`` or
``results`` arguments - they are used by the function.)
Returns a list of keys that failed. Each member of the list is a tuple::
([list of sections...], key, result)
If ``validate`` was called with ``preserve_errors=False`` (the default)
then ``result`` will always be ``False``.
*list of sections* is a flattened list of sections that the key was found
in.
If the section was missing (or a section was expected and a scalar provided
- or vice-versa) then key will be ``None``.
If the value (or section) was missing then ``result`` will be ``False``.
If ``validate`` was called with ``preserve_errors=True`` and a value
was present, but failed the check, then ``result`` will be the exception
object returned. You can use this as a string that describes the failure.
For example *The value "3" is of the wrong type*.
"""
if levels is None:
# first time called
levels = []
results = []
if res == True:
return sorted(results)
if res == False or isinstance(res, Exception):
results.append((levels[:], None, res))
if levels:
levels.pop()
return sorted(results)
for (key, val) in list(res.items()):
if val == True:
continue
if isinstance(cfg.get(key), dict):
# Go down one level
levels.append(key)
flatten_errors(cfg[key], val, levels, results)
continue
results.append((levels[:], key, val))
#
# Go up one level
if levels:
levels.pop()
#
return sorted(results)
def get_extra_values(conf, _prepend=()):
"""
Find all the values and sections not in the configspec from a validated
ConfigObj.
``get_extra_values`` returns a list of tuples where each tuple represents
either an extra section, or an extra value.
The tuples contain two values, a tuple representing the section the value
is in and the name of the extra values. For extra values in the top level
section the first member will be an empty tuple. For values in the 'foo'
section the first member will be ``('foo',)``. For members in the 'bar'
subsection of the 'foo' section the first member will be ``('foo', 'bar')``.
NOTE: If you call ``get_extra_values`` on a ConfigObj instance that hasn't
been validated it will return an empty list.
"""
out = []
out.extend([(_prepend, name) for name in conf.extra_values])
for name in conf.sections:
if name not in conf.extra_values:
out.extend(get_extra_values(conf[name], _prepend + (name,)))
return out
"""*A programming language is a medium of expression.* - Paul Graham"""
| 94,193
|
Python
|
.py
| 2,188
| 30.805759
| 131
| 0.545113
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,613
|
typing_extensions.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/typing_extensions.py
|
"""
This module is a vendor copy of the typing-extensions package from https://pypi.org/project/typing-extensions/
The original license is:
A. HISTORY OF THE SOFTWARE
==========================
Python was created in the early 1990s by Guido van Rossum at Stichting
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
as a successor of a language called ABC. Guido remains Python's
principal author, although it includes many contributions from others.
In 1995, Guido continued his work on Python at the Corporation for
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
in Reston, Virginia where he released several versions of the
software.
In May 2000, Guido and the Python core development team moved to
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
year, the PythonLabs team moved to Digital Creations (now Zope
Corporation, see http://www.zope.com). In 2001, the Python Software
Foundation (PSF, see http://www.python.org/psf/) was formed, a
non-profit organization created specifically to own Python-related
Intellectual Property. Zope Corporation is a sponsoring member of
the PSF.
All Python releases are Open Source (see http://www.opensource.org for
the Open Source Definition). Historically, most, but not all, Python
releases have also been GPL-compatible; the table below summarizes
the various releases.
Release Derived Year Owner GPL-
from compatible? (1)
0.9.0 thru 1.2 1991-1995 CWI yes
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
1.6 1.5.2 2000 CNRI no
2.0 1.6 2000 BeOpen.com no
1.6.1 1.6 2001 CNRI yes (2)
2.1 2.0+1.6.1 2001 PSF no
2.0.1 2.0+1.6.1 2001 PSF yes
2.1.1 2.1+2.0.1 2001 PSF yes
2.1.2 2.1.1 2002 PSF yes
2.1.3 2.1.2 2002 PSF yes
2.2 and above 2.1.1 2001-now PSF yes
Footnotes:
(1) GPL-compatible doesn't mean that we're distributing Python under
the GPL. All Python licenses, unlike the GPL, let you distribute
a modified version without making your changes open source. The
GPL-compatible licenses make it possible to combine Python with
other software that is released under the GPL; the others don't.
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
because its license has a choice of law clause. According to
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
is "not incompatible" with the GPL.
Thanks to the many outside volunteers who have worked under Guido's
direction to make these releases possible.
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
===============================================================
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
--------------------------------------------
1. This LICENSE AGREEMENT is between the Python Software Foundation
("PSF"), and the Individual or Organization ("Licensee") accessing and
otherwise using this software ("Python") in source or binary form and
its associated documentation.
2. Subject to the terms and conditions of this License Agreement, PSF hereby
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
analyze, test, perform and/or display publicly, prepare derivative works,
distribute, and otherwise use Python alone or in any derivative version,
provided, however, that PSF's License Agreement and PSF's notice of copyright,
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are
retained in Python alone or in any derivative version prepared by Licensee.
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python.
4. PSF is making Python available to Licensee on an "AS IS"
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any
relationship of agency, partnership, or joint venture between PSF and
Licensee. This License Agreement does not grant permission to use PSF
trademarks or trade name in a trademark sense to endorse or promote
products or services of Licensee, or any third party.
8. By copying, installing or otherwise using Python, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
-------------------------------------------
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
Individual or Organization ("Licensee") accessing and otherwise using
this software in source or binary form and its associated
documentation ("the Software").
2. Subject to the terms and conditions of this BeOpen Python License
Agreement, BeOpen hereby grants Licensee a non-exclusive,
royalty-free, world-wide license to reproduce, analyze, test, perform
and/or display publicly, prepare derivative works, distribute, and
otherwise use the Software alone or in any derivative version,
provided, however, that the BeOpen Python License is retained in the
Software, alone or in any derivative version prepared by Licensee.
3. BeOpen is making the Software available to Licensee on an "AS IS"
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
5. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
6. This License Agreement shall be governed by and interpreted in all
respects by the law of the State of California, excluding conflict of
law provisions. Nothing in this License Agreement shall be deemed to
create any relationship of agency, partnership, or joint venture
between BeOpen and Licensee. This License Agreement does not grant
permission to use BeOpen trademarks or trade names in a trademark
sense to endorse or promote products or services of Licensee, or any
third party. As an exception, the "BeOpen Python" logos available at
http://www.pythonlabs.com/logos.html may be used according to the
permissions granted on that web page.
7. By copying, installing or otherwise using the software, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
---------------------------------------
1. This LICENSE AGREEMENT is between the Corporation for National
Research Initiatives, having an office at 1895 Preston White Drive,
Reston, VA 20191 ("CNRI"), and the Individual or Organization
("Licensee") accessing and otherwise using Python 1.6.1 software in
source or binary form and its associated documentation.
2. Subject to the terms and conditions of this License Agreement, CNRI
hereby grants Licensee a nonexclusive, royalty-free, world-wide
license to reproduce, analyze, test, perform and/or display publicly,
prepare derivative works, distribute, and otherwise use Python 1.6.1
alone or in any derivative version, provided, however, that CNRI's
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
1995-2001 Corporation for National Research Initiatives; All Rights
Reserved" are retained in Python 1.6.1 alone or in any derivative
version prepared by Licensee. Alternately, in lieu of CNRI's License
Agreement, Licensee may substitute the following text (omitting the
quotes): "Python 1.6.1 is made available subject to the terms and
conditions in CNRI's License Agreement. This Agreement together with
Python 1.6.1 may be located on the Internet using the following
unique, persistent identifier (known as a handle): 1895.22/1013. This
Agreement may also be obtained from a proxy server on the Internet
using the following URL: http://hdl.handle.net/1895.22/1013".
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python 1.6.1 or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python 1.6.1.
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. This License Agreement shall be governed by the federal
intellectual property law of the United States, including without
limitation the federal copyright law, and, to the extent such
U.S. federal law does not apply, by the law of the Commonwealth of
Virginia, excluding Virginia's conflict of law provisions.
Notwithstanding the foregoing, with regard to derivative works based
on Python 1.6.1 that incorporate non-separable material that was
previously distributed under the GNU General Public License (GPL), the
law of the Commonwealth of Virginia shall govern this License
Agreement only as to issues arising under or with respect to
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
License Agreement shall be deemed to create any relationship of
agency, partnership, or joint venture between CNRI and Licensee. This
License Agreement does not grant permission to use CNRI trademarks or
trade name in a trademark sense to endorse or promote products or
services of Licensee, or any third party.
8. By clicking on the "ACCEPT" button where indicated, or by copying,
installing or otherwise using Python 1.6.1, Licensee agrees to be
bound by the terms and conditions of this License Agreement.
ACCEPT
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
--------------------------------------------------
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
The Netherlands. All rights reserved.
Permission to use, copy, modify, and distribute this software and its
documentation for any purpose and without fee is hereby granted,
provided that the above copyright notice appear in all copies and that
both that copyright notice and this permission notice appear in
supporting documentation, and that the name of Stichting Mathematisch
Centrum or CWI not be used in advertising or publicity pertaining to
distribution of the software without specific, written prior
permission.
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
# flake8: noqa
import abc
import collections
import collections.abc
import operator
import sys
import typing
# After PEP 560, internal typing API was substantially reworked.
# This is especially important for Protocol class which uses internal APIs
# quite extensively.
PEP_560 = sys.version_info[:3] >= (3, 7, 0)
if PEP_560:
GenericMeta = type
else:
# 3.6
from typing import GenericMeta, _type_vars # noqa
# The two functions below are copies of typing internal helpers.
# They are needed by _ProtocolMeta
def _no_slots_copy(dct):
dict_copy = dict(dct)
if '__slots__' in dict_copy:
for slot in dict_copy['__slots__']:
dict_copy.pop(slot, None)
return dict_copy
def _check_generic(cls, parameters):
if not cls.__parameters__:
raise TypeError(f"{cls} is not a generic class")
alen = len(parameters)
elen = len(cls.__parameters__)
if alen != elen:
raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments for {cls};"
f" actual {alen}, expected {elen}")
# Please keep __all__ alphabetized within each category.
__all__ = [
# Super-special typing primitives.
'ClassVar',
'Concatenate',
'Final',
'ParamSpec',
'Self',
'Type',
# ABCs (from collections.abc).
'Awaitable',
'AsyncIterator',
'AsyncIterable',
'Coroutine',
'AsyncGenerator',
'AsyncContextManager',
'ChainMap',
# Concrete collection types.
'ContextManager',
'Counter',
'Deque',
'DefaultDict',
'OrderedDict',
'TypedDict',
# Structural checks, a.k.a. protocols.
'SupportsIndex',
# One-off things.
'Annotated',
'final',
'IntVar',
'Literal',
'NewType',
'overload',
'Protocol',
'runtime',
'runtime_checkable',
'Text',
'TypeAlias',
'TypeGuard',
'TYPE_CHECKING',
]
if PEP_560:
__all__.extend(["get_args", "get_origin", "get_type_hints"])
# 3.6.2+
if hasattr(typing, 'NoReturn'):
NoReturn = typing.NoReturn
# 3.6.0-3.6.1
else:
class _NoReturn(typing._FinalTypingBase, _root=True):
"""Special type indicating functions that never return.
Example::
from typing import NoReturn
def stop() -> NoReturn:
raise Exception('no way')
This type is invalid in other positions, e.g., ``List[NoReturn]``
will fail in static type checkers.
"""
__slots__ = ()
def __instancecheck__(self, obj):
raise TypeError("NoReturn cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("NoReturn cannot be used with issubclass().")
NoReturn = _NoReturn(_root=True)
# Some unconstrained type variables. These are used by the container types.
# (These are not for export.)
T = typing.TypeVar('T') # Any type.
KT = typing.TypeVar('KT') # Key type.
VT = typing.TypeVar('VT') # Value type.
T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers.
T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant.
ClassVar = typing.ClassVar
# On older versions of typing there is an internal class named "Final".
# 3.8+
if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7):
Final = typing.Final
# 3.7
elif sys.version_info[:2] >= (3, 7):
class _FinalForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
def __getitem__(self, parameters):
item = typing._type_check(parameters,
f'{self._name} accepts only single type')
return typing._GenericAlias(self, (item,))
Final = _FinalForm('Final',
doc="""A special typing construct to indicate that a name
cannot be re-assigned or overridden in a subclass.
For example:
MAX_SIZE: Final = 9000
MAX_SIZE += 1 # Error reported by type checker
class Connection:
TIMEOUT: Final[int] = 10
class FastConnector(Connection):
TIMEOUT = 1 # Error reported by type checker
There is no runtime checking of these properties.""")
# 3.6
else:
class _Final(typing._FinalTypingBase, _root=True):
"""A special typing construct to indicate that a name
cannot be re-assigned or overridden in a subclass.
For example:
MAX_SIZE: Final = 9000
MAX_SIZE += 1 # Error reported by type checker
class Connection:
TIMEOUT: Final[int] = 10
class FastConnector(Connection):
TIMEOUT = 1 # Error reported by type checker
There is no runtime checking of these properties.
"""
__slots__ = ('__type__',)
def __init__(self, tp=None, **kwds):
self.__type__ = tp
def __getitem__(self, item):
cls = type(self)
if self.__type__ is None:
return cls(typing._type_check(item,
f'{cls.__name__[1:]} accepts only single type.'),
_root=True)
raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted')
def _eval_type(self, globalns, localns):
new_tp = typing._eval_type(self.__type__, globalns, localns)
if new_tp == self.__type__:
return self
return type(self)(new_tp, _root=True)
def __repr__(self):
r = super().__repr__()
if self.__type__ is not None:
r += f'[{typing._type_repr(self.__type__)}]'
return r
def __hash__(self):
return hash((type(self).__name__, self.__type__))
def __eq__(self, other):
if not isinstance(other, _Final):
return NotImplemented
if self.__type__ is not None:
return self.__type__ == other.__type__
return self is other
Final = _Final(_root=True)
# 3.8+
if hasattr(typing, 'final'):
final = typing.final
# 3.6-3.7
else:
def final(f):
"""This decorator can be used to indicate to type checkers that
the decorated method cannot be overridden, and decorated class
cannot be subclassed. For example:
class Base:
@final
def done(self) -> None:
...
class Sub(Base):
def done(self) -> None: # Error reported by type checker
...
@final
class Leaf:
...
class Other(Leaf): # Error reported by type checker
...
There is no runtime checking of these properties.
"""
return f
def IntVar(name):
return typing.TypeVar(name)
# 3.8+:
if hasattr(typing, 'Literal'):
Literal = typing.Literal
# 3.7:
elif sys.version_info[:2] >= (3, 7):
class _LiteralForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
def __getitem__(self, parameters):
return typing._GenericAlias(self, parameters)
Literal = _LiteralForm('Literal',
doc="""A type that can be used to indicate to type checkers
that the corresponding value has a value literally equivalent
to the provided parameter. For example:
var: Literal[4] = 4
The type checker understands that 'var' is literally equal to
the value 4 and no other value.
Literal[...] cannot be subclassed. There is no runtime
checking verifying that the parameter is actually a value
instead of a type.""")
# 3.6:
else:
class _Literal(typing._FinalTypingBase, _root=True):
"""A type that can be used to indicate to type checkers that the
corresponding value has a value literally equivalent to the
provided parameter. For example:
var: Literal[4] = 4
The type checker understands that 'var' is literally equal to the
value 4 and no other value.
Literal[...] cannot be subclassed. There is no runtime checking
verifying that the parameter is actually a value instead of a type.
"""
__slots__ = ('__values__',)
def __init__(self, values=None, **kwds):
self.__values__ = values
def __getitem__(self, values):
cls = type(self)
if self.__values__ is None:
if not isinstance(values, tuple):
values = (values,)
return cls(values, _root=True)
raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted')
def _eval_type(self, globalns, localns):
return self
def __repr__(self):
r = super().__repr__()
if self.__values__ is not None:
r += f'[{", ".join(map(typing._type_repr, self.__values__))}]'
return r
def __hash__(self):
return hash((type(self).__name__, self.__values__))
def __eq__(self, other):
if not isinstance(other, _Literal):
return NotImplemented
if self.__values__ is not None:
return self.__values__ == other.__values__
return self is other
Literal = _Literal(_root=True)
_overload_dummy = typing._overload_dummy # noqa
overload = typing.overload
# This is not a real generic class. Don't use outside annotations.
Type = typing.Type
# Various ABCs mimicking those in collections.abc.
# A few are simply re-exported for completeness.
class _ExtensionsGenericMeta(GenericMeta):
def __subclasscheck__(self, subclass):
"""This mimics a more modern GenericMeta.__subclasscheck__() logic
(that does not have problems with recursion) to work around interactions
between collections, typing, and typing_extensions on older
versions of Python, see https://github.com/python/typing/issues/501.
"""
if self.__origin__ is not None:
if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']:
raise TypeError("Parameterized generics cannot be used with class "
"or instance checks")
return False
if not self.__extra__:
return super().__subclasscheck__(subclass)
res = self.__extra__.__subclasshook__(subclass)
if res is not NotImplemented:
return res
if self.__extra__ in subclass.__mro__:
return True
for scls in self.__extra__.__subclasses__():
if isinstance(scls, GenericMeta):
continue
if issubclass(subclass, scls):
return True
return False
Awaitable = typing.Awaitable
Coroutine = typing.Coroutine
AsyncIterable = typing.AsyncIterable
AsyncIterator = typing.AsyncIterator
# 3.6.1+
if hasattr(typing, 'Deque'):
Deque = typing.Deque
# 3.6.0
else:
class Deque(collections.deque, typing.MutableSequence[T],
metaclass=_ExtensionsGenericMeta,
extra=collections.deque):
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is Deque:
return collections.deque(*args, **kwds)
return typing._generic_new(collections.deque, cls, *args, **kwds)
ContextManager = typing.ContextManager
# 3.6.2+
if hasattr(typing, 'AsyncContextManager'):
AsyncContextManager = typing.AsyncContextManager
# 3.6.0-3.6.1
else:
from _collections_abc import _check_methods as _check_methods_in_mro # noqa
class AsyncContextManager(typing.Generic[T_co]):
__slots__ = ()
async def __aenter__(self):
return self
@abc.abstractmethod
async def __aexit__(self, exc_type, exc_value, traceback):
return None
@classmethod
def __subclasshook__(cls, C):
if cls is AsyncContextManager:
return _check_methods_in_mro(C, "__aenter__", "__aexit__")
return NotImplemented
DefaultDict = typing.DefaultDict
# 3.7.2+
if hasattr(typing, 'OrderedDict'):
OrderedDict = typing.OrderedDict
# 3.7.0-3.7.2
elif (3, 7, 0) <= sys.version_info[:3] < (3, 7, 2):
OrderedDict = typing._alias(collections.OrderedDict, (KT, VT))
# 3.6
else:
class OrderedDict(collections.OrderedDict, typing.MutableMapping[KT, VT],
metaclass=_ExtensionsGenericMeta,
extra=collections.OrderedDict):
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is OrderedDict:
return collections.OrderedDict(*args, **kwds)
return typing._generic_new(collections.OrderedDict, cls, *args, **kwds)
# 3.6.2+
if hasattr(typing, 'Counter'):
Counter = typing.Counter
# 3.6.0-3.6.1
else:
class Counter(collections.Counter,
typing.Dict[T, int],
metaclass=_ExtensionsGenericMeta, extra=collections.Counter):
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is Counter:
return collections.Counter(*args, **kwds)
return typing._generic_new(collections.Counter, cls, *args, **kwds)
# 3.6.1+
if hasattr(typing, 'ChainMap'):
ChainMap = typing.ChainMap
elif hasattr(collections, 'ChainMap'):
class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT],
metaclass=_ExtensionsGenericMeta,
extra=collections.ChainMap):
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is ChainMap:
return collections.ChainMap(*args, **kwds)
return typing._generic_new(collections.ChainMap, cls, *args, **kwds)
# 3.6.1+
if hasattr(typing, 'AsyncGenerator'):
AsyncGenerator = typing.AsyncGenerator
# 3.6.0
else:
class AsyncGenerator(AsyncIterator[T_co], typing.Generic[T_co, T_contra],
metaclass=_ExtensionsGenericMeta,
extra=collections.abc.AsyncGenerator):
__slots__ = ()
NewType = typing.NewType
Text = typing.Text
TYPE_CHECKING = typing.TYPE_CHECKING
def _gorg(cls):
"""This function exists for compatibility with old typing versions."""
assert isinstance(cls, GenericMeta)
if hasattr(cls, '_gorg'):
return cls._gorg
while cls.__origin__ is not None:
cls = cls.__origin__
return cls
_PROTO_WHITELIST = ['Callable', 'Awaitable',
'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator',
'Hashable', 'Sized', 'Container', 'Collection', 'Reversible',
'ContextManager', 'AsyncContextManager']
def _get_protocol_attrs(cls):
attrs = set()
for base in cls.__mro__[:-1]: # without object
if base.__name__ in ('Protocol', 'Generic'):
continue
annotations = getattr(base, '__annotations__', {})
for attr in list(base.__dict__.keys()) + list(annotations.keys()):
if (not attr.startswith('_abc_') and attr not in (
'__abstractmethods__', '__annotations__', '__weakref__',
'_is_protocol', '_is_runtime_protocol', '__dict__',
'__args__', '__slots__',
'__next_in_mro__', '__parameters__', '__origin__',
'__orig_bases__', '__extra__', '__tree_hash__',
'__doc__', '__subclasshook__', '__init__', '__new__',
'__module__', '_MutableMapping__marker', '_gorg')):
attrs.add(attr)
return attrs
def _is_callable_members_only(cls):
return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls))
# 3.8+
if hasattr(typing, 'Protocol'):
Protocol = typing.Protocol
# 3.7
elif PEP_560:
from typing import _collect_type_vars # noqa
def _no_init(self, *args, **kwargs):
if type(self)._is_protocol:
raise TypeError('Protocols cannot be instantiated')
class _ProtocolMeta(abc.ABCMeta):
# This metaclass is a bit unfortunate and exists only because of the lack
# of __instancehook__.
def __instancecheck__(cls, instance):
# We need this method for situations where attributes are
# assigned in __init__.
if ((not getattr(cls, '_is_protocol', False) or
_is_callable_members_only(cls)) and
issubclass(instance.__class__, cls)):
return True
if cls._is_protocol:
if all(hasattr(instance, attr) and
(not callable(getattr(cls, attr, None)) or
getattr(instance, attr) is not None)
for attr in _get_protocol_attrs(cls)):
return True
return super().__instancecheck__(instance)
class Protocol(metaclass=_ProtocolMeta):
# There is quite a lot of overlapping code with typing.Generic.
# Unfortunately it is hard to avoid this while these live in two different
# modules. The duplicated code will be removed when Protocol is moved to typing.
"""Base class for protocol classes. Protocol classes are defined as::
class Proto(Protocol):
def meth(self) -> int:
...
Such classes are primarily used with static type checkers that recognize
structural subtyping (static duck-typing), for example::
class C:
def meth(self) -> int:
return 0
def func(x: Proto) -> int:
return x.meth()
func(C()) # Passes static type check
See PEP 544 for details. Protocol classes decorated with
@typing_extensions.runtime act as simple-minded runtime protocol that checks
only the presence of given attributes, ignoring their type signatures.
Protocol classes can be generic, they are defined as::
class GenProto(Protocol[T]):
def meth(self) -> T:
...
"""
__slots__ = ()
_is_protocol = True
def __new__(cls, *args, **kwds):
if cls is Protocol:
raise TypeError("Type Protocol cannot be instantiated; "
"it can only be used as a base class")
return super().__new__(cls)
@typing._tp_cache
def __class_getitem__(cls, params):
if not isinstance(params, tuple):
params = (params,)
if not params and cls is not typing.Tuple:
raise TypeError(
f"Parameter list to {cls.__qualname__}[...] cannot be empty")
msg = "Parameters to generic types must be types."
params = tuple(typing._type_check(p, msg) for p in params) # noqa
if cls is Protocol:
# Generic can only be subscripted with unique type variables.
if not all(isinstance(p, typing.TypeVar) for p in params):
i = 0
while isinstance(params[i], typing.TypeVar):
i += 1
raise TypeError(
"Parameters to Protocol[...] must all be type variables."
f" Parameter {i + 1} is {params[i]}")
if len(set(params)) != len(params):
raise TypeError(
"Parameters to Protocol[...] must all be unique")
else:
# Subscripting a regular Generic subclass.
_check_generic(cls, params)
return typing._GenericAlias(cls, params)
def __init_subclass__(cls, *args, **kwargs):
tvars = []
if '__orig_bases__' in cls.__dict__:
error = typing.Generic in cls.__orig_bases__
else:
error = typing.Generic in cls.__bases__
if error:
raise TypeError("Cannot inherit from plain Generic")
if '__orig_bases__' in cls.__dict__:
tvars = _collect_type_vars(cls.__orig_bases__)
# Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn].
# If found, tvars must be a subset of it.
# If not found, tvars is it.
# Also check for and reject plain Generic,
# and reject multiple Generic[...] and/or Protocol[...].
gvars = None
for base in cls.__orig_bases__:
if (isinstance(base, typing._GenericAlias) and
base.__origin__ in (typing.Generic, Protocol)):
# for error messages
the_base = base.__origin__.__name__
if gvars is not None:
raise TypeError(
"Cannot inherit from Generic[...]"
" and/or Protocol[...] multiple types.")
gvars = base.__parameters__
if gvars is None:
gvars = tvars
else:
tvarset = set(tvars)
gvarset = set(gvars)
if not tvarset <= gvarset:
s_vars = ', '.join(str(t) for t in tvars if t not in gvarset)
s_args = ', '.join(str(g) for g in gvars)
raise TypeError(f"Some type variables ({s_vars}) are"
f" not listed in {the_base}[{s_args}]")
tvars = gvars
cls.__parameters__ = tuple(tvars)
# Determine if this is a protocol or a concrete subclass.
if not cls.__dict__.get('_is_protocol', None):
cls._is_protocol = any(b is Protocol for b in cls.__bases__)
# Set (or override) the protocol subclass hook.
def _proto_hook(other):
if not cls.__dict__.get('_is_protocol', None):
return NotImplemented
if not getattr(cls, '_is_runtime_protocol', False):
if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
return NotImplemented
raise TypeError("Instance and class checks can only be used with"
" @runtime protocols")
if not _is_callable_members_only(cls):
if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
return NotImplemented
raise TypeError("Protocols with non-method members"
" don't support issubclass()")
if not isinstance(other, type):
# Same error as for issubclass(1, int)
raise TypeError('issubclass() arg 1 must be a class')
for attr in _get_protocol_attrs(cls):
for base in other.__mro__:
if attr in base.__dict__:
if base.__dict__[attr] is None:
return NotImplemented
break
annotations = getattr(base, '__annotations__', {})
if (isinstance(annotations, typing.Mapping) and
attr in annotations and
isinstance(other, _ProtocolMeta) and
other._is_protocol):
break
else:
return NotImplemented
return True
if '__subclasshook__' not in cls.__dict__:
cls.__subclasshook__ = _proto_hook
# We have nothing more to do for non-protocols.
if not cls._is_protocol:
return
# Check consistency of bases.
for base in cls.__bases__:
if not (base in (object, typing.Generic) or
base.__module__ == 'collections.abc' and
base.__name__ in _PROTO_WHITELIST or
isinstance(base, _ProtocolMeta) and base._is_protocol):
raise TypeError('Protocols can only inherit from other'
f' protocols, got {repr(base)}')
cls.__init__ = _no_init
# 3.6
else:
from typing import _next_in_mro, _type_check # noqa
def _no_init(self, *args, **kwargs):
if type(self)._is_protocol:
raise TypeError('Protocols cannot be instantiated')
class _ProtocolMeta(GenericMeta):
"""Internal metaclass for Protocol.
This exists so Protocol classes can be generic without deriving
from Generic.
"""
def __new__(cls, name, bases, namespace,
tvars=None, args=None, origin=None, extra=None, orig_bases=None):
# This is just a version copied from GenericMeta.__new__ that
# includes "Protocol" special treatment. (Comments removed for brevity.)
assert extra is None # Protocols should not have extra
if tvars is not None:
assert origin is not None
assert all(isinstance(t, typing.TypeVar) for t in tvars), tvars
else:
tvars = _type_vars(bases)
gvars = None
for base in bases:
if base is typing.Generic:
raise TypeError("Cannot inherit from plain Generic")
if (isinstance(base, GenericMeta) and
base.__origin__ in (typing.Generic, Protocol)):
if gvars is not None:
raise TypeError(
"Cannot inherit from Generic[...] or"
" Protocol[...] multiple times.")
gvars = base.__parameters__
if gvars is None:
gvars = tvars
else:
tvarset = set(tvars)
gvarset = set(gvars)
if not tvarset <= gvarset:
s_vars = ", ".join(str(t) for t in tvars if t not in gvarset)
s_args = ", ".join(str(g) for g in gvars)
cls_name = "Generic" if any(b.__origin__ is typing.Generic
for b in bases) else "Protocol"
raise TypeError(f"Some type variables ({s_vars}) are"
f" not listed in {cls_name}[{s_args}]")
tvars = gvars
initial_bases = bases
if (extra is not None and type(extra) is abc.ABCMeta and
extra not in bases):
bases = (extra,) + bases
bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b
for b in bases)
if any(isinstance(b, GenericMeta) and b is not typing.Generic for b in bases):
bases = tuple(b for b in bases if b is not typing.Generic)
namespace.update({'__origin__': origin, '__extra__': extra})
self = super(GenericMeta, cls).__new__(cls, name, bases, namespace,
_root=True)
super(GenericMeta, self).__setattr__('_gorg',
self if not origin else
_gorg(origin))
self.__parameters__ = tvars
self.__args__ = tuple(... if a is typing._TypingEllipsis else
() if a is typing._TypingEmpty else
a for a in args) if args else None
self.__next_in_mro__ = _next_in_mro(self)
if orig_bases is None:
self.__orig_bases__ = initial_bases
elif origin is not None:
self._abc_registry = origin._abc_registry
self._abc_cache = origin._abc_cache
if hasattr(self, '_subs_tree'):
self.__tree_hash__ = (hash(self._subs_tree()) if origin else
super(GenericMeta, self).__hash__())
return self
def __init__(cls, *args, **kwargs):
super().__init__(*args, **kwargs)
if not cls.__dict__.get('_is_protocol', None):
cls._is_protocol = any(b is Protocol or
isinstance(b, _ProtocolMeta) and
b.__origin__ is Protocol
for b in cls.__bases__)
if cls._is_protocol:
for base in cls.__mro__[1:]:
if not (base in (object, typing.Generic) or
base.__module__ == 'collections.abc' and
base.__name__ in _PROTO_WHITELIST or
isinstance(base, typing.TypingMeta) and base._is_protocol or
isinstance(base, GenericMeta) and
base.__origin__ is typing.Generic):
raise TypeError(f'Protocols can only inherit from other'
f' protocols, got {repr(base)}')
cls.__init__ = _no_init
def _proto_hook(other):
if not cls.__dict__.get('_is_protocol', None):
return NotImplemented
if not isinstance(other, type):
# Same error as for issubclass(1, int)
raise TypeError('issubclass() arg 1 must be a class')
for attr in _get_protocol_attrs(cls):
for base in other.__mro__:
if attr in base.__dict__:
if base.__dict__[attr] is None:
return NotImplemented
break
annotations = getattr(base, '__annotations__', {})
if (isinstance(annotations, typing.Mapping) and
attr in annotations and
isinstance(other, _ProtocolMeta) and
other._is_protocol):
break
else:
return NotImplemented
return True
if '__subclasshook__' not in cls.__dict__:
cls.__subclasshook__ = _proto_hook
def __instancecheck__(self, instance):
# We need this method for situations where attributes are
# assigned in __init__.
if ((not getattr(self, '_is_protocol', False) or
_is_callable_members_only(self)) and
issubclass(instance.__class__, self)):
return True
if self._is_protocol:
if all(hasattr(instance, attr) and
(not callable(getattr(self, attr, None)) or
getattr(instance, attr) is not None)
for attr in _get_protocol_attrs(self)):
return True
return super(GenericMeta, self).__instancecheck__(instance)
def __subclasscheck__(self, cls):
if self.__origin__ is not None:
if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']:
raise TypeError("Parameterized generics cannot be used with class "
"or instance checks")
return False
if (self.__dict__.get('_is_protocol', None) and
not self.__dict__.get('_is_runtime_protocol', None)):
if sys._getframe(1).f_globals['__name__'] in ['abc',
'functools',
'typing']:
return False
raise TypeError("Instance and class checks can only be used with"
" @runtime protocols")
if (self.__dict__.get('_is_runtime_protocol', None) and
not _is_callable_members_only(self)):
if sys._getframe(1).f_globals['__name__'] in ['abc',
'functools',
'typing']:
return super(GenericMeta, self).__subclasscheck__(cls)
raise TypeError("Protocols with non-method members"
" don't support issubclass()")
return super(GenericMeta, self).__subclasscheck__(cls)
@typing._tp_cache
def __getitem__(self, params):
# We also need to copy this from GenericMeta.__getitem__ to get
# special treatment of "Protocol". (Comments removed for brevity.)
if not isinstance(params, tuple):
params = (params,)
if not params and _gorg(self) is not typing.Tuple:
raise TypeError(
f"Parameter list to {self.__qualname__}[...] cannot be empty")
msg = "Parameters to generic types must be types."
params = tuple(_type_check(p, msg) for p in params)
if self in (typing.Generic, Protocol):
if not all(isinstance(p, typing.TypeVar) for p in params):
raise TypeError(
f"Parameters to {repr(self)}[...] must all be type variables")
if len(set(params)) != len(params):
raise TypeError(
f"Parameters to {repr(self)}[...] must all be unique")
tvars = params
args = params
elif self in (typing.Tuple, typing.Callable):
tvars = _type_vars(params)
args = params
elif self.__origin__ in (typing.Generic, Protocol):
raise TypeError(f"Cannot subscript already-subscripted {repr(self)}")
else:
_check_generic(self, params)
tvars = _type_vars(params)
args = params
prepend = (self,) if self.__origin__ is None else ()
return self.__class__(self.__name__,
prepend + self.__bases__,
_no_slots_copy(self.__dict__),
tvars=tvars,
args=args,
origin=self,
extra=self.__extra__,
orig_bases=self.__orig_bases__)
class Protocol(metaclass=_ProtocolMeta):
"""Base class for protocol classes. Protocol classes are defined as::
class Proto(Protocol):
def meth(self) -> int:
...
Such classes are primarily used with static type checkers that recognize
structural subtyping (static duck-typing), for example::
class C:
def meth(self) -> int:
return 0
def func(x: Proto) -> int:
return x.meth()
func(C()) # Passes static type check
See PEP 544 for details. Protocol classes decorated with
@typing_extensions.runtime act as simple-minded runtime protocol that checks
only the presence of given attributes, ignoring their type signatures.
Protocol classes can be generic, they are defined as::
class GenProto(Protocol[T]):
def meth(self) -> T:
...
"""
__slots__ = ()
_is_protocol = True
def __new__(cls, *args, **kwds):
if _gorg(cls) is Protocol:
raise TypeError("Type Protocol cannot be instantiated; "
"it can be used only as a base class")
return typing._generic_new(cls.__next_in_mro__, cls, *args, **kwds)
# 3.8+
if hasattr(typing, 'runtime_checkable'):
runtime_checkable = typing.runtime_checkable
# 3.6-3.7
else:
def runtime_checkable(cls):
"""Mark a protocol class as a runtime protocol, so that it
can be used with isinstance() and issubclass(). Raise TypeError
if applied to a non-protocol class.
This allows a simple-minded structural check very similar to the
one-offs in collections.abc such as Hashable.
"""
if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol:
raise TypeError('@runtime_checkable can be only applied to protocol classes,'
f' got {cls!r}')
cls._is_runtime_protocol = True
return cls
# Exists for backwards compatibility.
runtime = runtime_checkable
# 3.8+
if hasattr(typing, 'SupportsIndex'):
SupportsIndex = typing.SupportsIndex
# 3.6-3.7
else:
@runtime_checkable
class SupportsIndex(Protocol):
__slots__ = ()
@abc.abstractmethod
def __index__(self) -> int:
pass
if sys.version_info >= (3, 9, 2):
# The standard library TypedDict in Python 3.8 does not store runtime information
# about which (if any) keys are optional. See https://bugs.python.org/issue38834
# The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
# keyword with old-style TypedDict(). See https://bugs.python.org/issue42059
TypedDict = typing.TypedDict
else:
def _check_fails(cls, other):
try:
if sys._getframe(1).f_globals['__name__'] not in ['abc',
'functools',
'typing']:
# Typed dicts are only for static structural subtyping.
raise TypeError('TypedDict does not support instance and class checks')
except (AttributeError, ValueError):
pass
return False
def _dict_new(*args, **kwargs):
if not args:
raise TypeError('TypedDict.__new__(): not enough arguments')
_, args = args[0], args[1:] # allow the "cls" keyword be passed
return dict(*args, **kwargs)
_dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)'
def _typeddict_new(*args, total=True, **kwargs):
if not args:
raise TypeError('TypedDict.__new__(): not enough arguments')
_, args = args[0], args[1:] # allow the "cls" keyword be passed
if args:
typename, args = args[0], args[1:] # allow the "_typename" keyword be passed
elif '_typename' in kwargs:
typename = kwargs.pop('_typename')
import warnings
warnings.warn("Passing '_typename' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
raise TypeError("TypedDict.__new__() missing 1 required positional "
"argument: '_typename'")
if args:
try:
fields, = args # allow the "_fields" keyword be passed
except ValueError:
raise TypeError('TypedDict.__new__() takes from 2 to 3 '
f'positional arguments but {len(args) + 2} '
'were given')
elif '_fields' in kwargs and len(kwargs) == 1:
fields = kwargs.pop('_fields')
import warnings
warnings.warn("Passing '_fields' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
fields = None
if fields is None:
fields = kwargs
elif kwargs:
raise TypeError("TypedDict takes either a dict or keyword arguments,"
" but not both")
ns = {'__annotations__': dict(fields)}
try:
# Setting correct module is necessary to make typed dict classes pickleable.
ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
pass
return _TypedDictMeta(typename, (), ns, total=total)
_typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,'
' /, *, total=True, **kwargs)')
class _TypedDictMeta(type):
def __init__(cls, name, bases, ns, total=True):
super().__init__(name, bases, ns)
def __new__(cls, name, bases, ns, total=True):
# Create new typed dict class object.
# This method is called directly when TypedDict is subclassed,
# or via _typeddict_new when TypedDict is instantiated. This way
# TypedDict supports all three syntaxes described in its docstring.
# Subclasses and instances of TypedDict return actual dictionaries
# via _dict_new.
ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new
tp_dict = super().__new__(cls, name, (dict,), ns)
annotations = {}
own_annotations = ns.get('__annotations__', {})
own_annotation_keys = set(own_annotations.keys())
msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
own_annotations = {
n: typing._type_check(tp, msg) for n, tp in own_annotations.items()
}
required_keys = set()
optional_keys = set()
for base in bases:
annotations.update(base.__dict__.get('__annotations__', {}))
required_keys.update(base.__dict__.get('__required_keys__', ()))
optional_keys.update(base.__dict__.get('__optional_keys__', ()))
annotations.update(own_annotations)
if total:
required_keys.update(own_annotation_keys)
else:
optional_keys.update(own_annotation_keys)
tp_dict.__annotations__ = annotations
tp_dict.__required_keys__ = frozenset(required_keys)
tp_dict.__optional_keys__ = frozenset(optional_keys)
if not hasattr(tp_dict, '__total__'):
tp_dict.__total__ = total
return tp_dict
__instancecheck__ = __subclasscheck__ = _check_fails
TypedDict = _TypedDictMeta('TypedDict', (dict,), {})
TypedDict.__module__ = __name__
TypedDict.__doc__ = \
"""A simple typed name space. At runtime it is equivalent to a plain dict.
TypedDict creates a dictionary type that expects all of its
instances to have a certain set of keys, with each key
associated with a value of a consistent type. This expectation
is not checked at runtime but is only enforced by type checkers.
Usage::
class Point2D(TypedDict):
x: int
y: int
label: str
a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
The type info can be accessed via the Point2D.__annotations__ dict, and
the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
TypedDict supports two additional equivalent forms::
Point2D = TypedDict('Point2D', x=int, y=int, label=str)
Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
The class syntax is only supported in Python 3.6+, while two other
syntax forms work for Python 2.7 and 3.2+
"""
# Python 3.9+ has PEP 593 (Annotated and modified get_type_hints)
if hasattr(typing, 'Annotated'):
Annotated = typing.Annotated
get_type_hints = typing.get_type_hints
# Not exported and not a public API, but needed for get_origin() and get_args()
# to work.
_AnnotatedAlias = typing._AnnotatedAlias
# 3.7-3.8
elif PEP_560:
class _AnnotatedAlias(typing._GenericAlias, _root=True):
"""Runtime representation of an annotated type.
At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
with extra annotations. The alias behaves like a normal typing alias,
instantiating is the same as instantiating the underlying type, binding
it to types is also the same.
"""
def __init__(self, origin, metadata):
if isinstance(origin, _AnnotatedAlias):
metadata = origin.__metadata__ + metadata
origin = origin.__origin__
super().__init__(origin, origin)
self.__metadata__ = metadata
def copy_with(self, params):
assert len(params) == 1
new_type = params[0]
return _AnnotatedAlias(new_type, self.__metadata__)
def __repr__(self):
return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, "
f"{', '.join(repr(a) for a in self.__metadata__)}]")
def __reduce__(self):
return operator.getitem, (
Annotated, (self.__origin__,) + self.__metadata__
)
def __eq__(self, other):
if not isinstance(other, _AnnotatedAlias):
return NotImplemented
if self.__origin__ != other.__origin__:
return False
return self.__metadata__ == other.__metadata__
def __hash__(self):
return hash((self.__origin__, self.__metadata__))
class Annotated:
"""Add context specific metadata to a type.
Example: Annotated[int, runtime_check.Unsigned] indicates to the
hypothetical runtime_check module that this type is an unsigned int.
Every other consumer of this type can ignore this metadata and treat
this type as int.
The first argument to Annotated must be a valid type (and will be in
the __origin__ field), the remaining arguments are kept as a tuple in
the __extra__ field.
Details:
- It's an error to call `Annotated` with less than two arguments.
- Nested Annotated are flattened::
Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
- Instantiating an annotated type is equivalent to instantiating the
underlying type::
Annotated[C, Ann1](5) == C(5)
- Annotated can be used as a generic type alias::
Optimized = Annotated[T, runtime.Optimize()]
Optimized[int] == Annotated[int, runtime.Optimize()]
OptimizedList = Annotated[List[T], runtime.Optimize()]
OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
"""
__slots__ = ()
def __new__(cls, *args, **kwargs):
raise TypeError("Type Annotated cannot be instantiated.")
@typing._tp_cache
def __class_getitem__(cls, params):
if not isinstance(params, tuple) or len(params) < 2:
raise TypeError("Annotated[...] should be used "
"with at least two arguments (a type and an "
"annotation).")
msg = "Annotated[t, ...]: t must be a type."
origin = typing._type_check(params[0], msg)
metadata = tuple(params[1:])
return _AnnotatedAlias(origin, metadata)
def __init_subclass__(cls, *args, **kwargs):
raise TypeError(
f"Cannot subclass {cls.__module__}.Annotated"
)
def _strip_annotations(t):
"""Strips the annotations from a given type.
"""
if isinstance(t, _AnnotatedAlias):
return _strip_annotations(t.__origin__)
if isinstance(t, typing._GenericAlias):
stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
if stripped_args == t.__args__:
return t
res = t.copy_with(stripped_args)
res._special = t._special
return res
return t
def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
"""Return type hints for an object.
This is often the same as obj.__annotations__, but it handles
forward references encoded as string literals, adds Optional[t] if a
default value equal to None is set and recursively replaces all
'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
The argument may be a module, class, method, or function. The annotations
are returned as a dictionary. For classes, annotations include also
inherited members.
TypeError is raised if the argument is not of a type that can contain
annotations, and an empty dictionary is returned if no annotations are
present.
BEWARE -- the behavior of globalns and localns is counterintuitive
(unless you are familiar with how eval() and exec() work). The
search order is locals first, then globals.
- If no dict arguments are passed, an attempt is made to use the
globals from obj (or the respective module's globals for classes),
and these are also used as the locals. If the object does not appear
to have globals, an empty dictionary is used.
- If one dict argument is passed, it is used for both globals and
locals.
- If two dict arguments are passed, they specify globals and
locals, respectively.
"""
hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
if include_extras:
return hint
return {k: _strip_annotations(t) for k, t in hint.items()}
# 3.6
else:
def _is_dunder(name):
"""Returns True if name is a __dunder_variable_name__."""
return len(name) > 4 and name.startswith('__') and name.endswith('__')
# Prior to Python 3.7 types did not have `copy_with`. A lot of the equality
# checks, argument expansion etc. are done on the _subs_tre. As a result we
# can't provide a get_type_hints function that strips out annotations.
class AnnotatedMeta(typing.GenericMeta):
"""Metaclass for Annotated"""
def __new__(cls, name, bases, namespace, **kwargs):
if any(b is not object for b in bases):
raise TypeError("Cannot subclass " + str(Annotated))
return super().__new__(cls, name, bases, namespace, **kwargs)
@property
def __metadata__(self):
return self._subs_tree()[2]
def _tree_repr(self, tree):
cls, origin, metadata = tree
if not isinstance(origin, tuple):
tp_repr = typing._type_repr(origin)
else:
tp_repr = origin[0]._tree_repr(origin)
metadata_reprs = ", ".join(repr(arg) for arg in metadata)
return f'{cls}[{tp_repr}, {metadata_reprs}]'
def _subs_tree(self, tvars=None, args=None): # noqa
if self is Annotated:
return Annotated
res = super()._subs_tree(tvars=tvars, args=args)
# Flatten nested Annotated
if isinstance(res[1], tuple) and res[1][0] is Annotated:
sub_tp = res[1][1]
sub_annot = res[1][2]
return (Annotated, sub_tp, sub_annot + res[2])
return res
def _get_cons(self):
"""Return the class used to create instance of this type."""
if self.__origin__ is None:
raise TypeError("Cannot get the underlying type of a "
"non-specialized Annotated type.")
tree = self._subs_tree()
while isinstance(tree, tuple) and tree[0] is Annotated:
tree = tree[1]
if isinstance(tree, tuple):
return tree[0]
else:
return tree
@typing._tp_cache
def __getitem__(self, params):
if not isinstance(params, tuple):
params = (params,)
if self.__origin__ is not None: # specializing an instantiated type
return super().__getitem__(params)
elif not isinstance(params, tuple) or len(params) < 2:
raise TypeError("Annotated[...] should be instantiated "
"with at least two arguments (a type and an "
"annotation).")
else:
msg = "Annotated[t, ...]: t must be a type."
tp = typing._type_check(params[0], msg)
metadata = tuple(params[1:])
return self.__class__(
self.__name__,
self.__bases__,
_no_slots_copy(self.__dict__),
tvars=_type_vars((tp,)),
# Metadata is a tuple so it won't be touched by _replace_args et al.
args=(tp, metadata),
origin=self,
)
def __call__(self, *args, **kwargs):
cons = self._get_cons()
result = cons(*args, **kwargs)
try:
result.__orig_class__ = self
except AttributeError:
pass
return result
def __getattr__(self, attr):
# For simplicity we just don't relay all dunder names
if self.__origin__ is not None and not _is_dunder(attr):
return getattr(self._get_cons(), attr)
raise AttributeError(attr)
def __setattr__(self, attr, value):
if _is_dunder(attr) or attr.startswith('_abc_'):
super().__setattr__(attr, value)
elif self.__origin__ is None:
raise AttributeError(attr)
else:
setattr(self._get_cons(), attr, value)
def __instancecheck__(self, obj):
raise TypeError("Annotated cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("Annotated cannot be used with issubclass().")
class Annotated(metaclass=AnnotatedMeta):
"""Add context specific metadata to a type.
Example: Annotated[int, runtime_check.Unsigned] indicates to the
hypothetical runtime_check module that this type is an unsigned int.
Every other consumer of this type can ignore this metadata and treat
this type as int.
The first argument to Annotated must be a valid type, the remaining
arguments are kept as a tuple in the __metadata__ field.
Details:
- It's an error to call `Annotated` with less than two arguments.
- Nested Annotated are flattened::
Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
- Instantiating an annotated type is equivalent to instantiating the
underlying type::
Annotated[C, Ann1](5) == C(5)
- Annotated can be used as a generic type alias::
Optimized = Annotated[T, runtime.Optimize()]
Optimized[int] == Annotated[int, runtime.Optimize()]
OptimizedList = Annotated[List[T], runtime.Optimize()]
OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
"""
# Python 3.8 has get_origin() and get_args() but those implementations aren't
# Annotated-aware, so we can't use those. Python 3.9's versions don't support
# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
if sys.version_info[:2] >= (3, 10):
get_origin = typing.get_origin
get_args = typing.get_args
# 3.7-3.9
elif PEP_560:
try:
# 3.9+
from typing import _BaseGenericAlias
except ImportError:
_BaseGenericAlias = typing._GenericAlias
try:
# 3.9+
from typing import GenericAlias
except ImportError:
GenericAlias = typing._GenericAlias
def get_origin(tp):
"""Get the unsubscripted version of a type.
This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
and Annotated. Return None for unsupported types. Examples::
get_origin(Literal[42]) is Literal
get_origin(int) is None
get_origin(ClassVar[int]) is ClassVar
get_origin(Generic) is Generic
get_origin(Generic[T]) is Generic
get_origin(Union[T, int]) is Union
get_origin(List[Tuple[T, T]][int]) == list
get_origin(P.args) is P
"""
if isinstance(tp, _AnnotatedAlias):
return Annotated
if isinstance(tp, (typing._GenericAlias, GenericAlias, _BaseGenericAlias,
ParamSpecArgs, ParamSpecKwargs)):
return tp.__origin__
if tp is typing.Generic:
return typing.Generic
return None
def get_args(tp):
"""Get type arguments with all substitutions performed.
For unions, basic simplifications used by Union constructor are performed.
Examples::
get_args(Dict[str, int]) == (str, int)
get_args(int) == ()
get_args(Union[int, Union[T, int], str][int]) == (int, str)
get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
get_args(Callable[[], T][int]) == ([], int)
"""
if isinstance(tp, _AnnotatedAlias):
return (tp.__origin__,) + tp.__metadata__
if isinstance(tp, (typing._GenericAlias, GenericAlias)):
if getattr(tp, "_special", False):
return ()
res = tp.__args__
if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
res = (list(res[:-1]), res[-1])
return res
return ()
# 3.10+
if hasattr(typing, 'TypeAlias'):
TypeAlias = typing.TypeAlias
# 3.9
elif sys.version_info[:2] >= (3, 9):
class _TypeAliasForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
@_TypeAliasForm
def TypeAlias(self, parameters):
"""Special marker indicating that an assignment should
be recognized as a proper type alias definition by type
checkers.
For example::
Predicate: TypeAlias = Callable[..., bool]
It's invalid when used anywhere except as in the example above.
"""
raise TypeError(f"{self} is not subscriptable")
# 3.7-3.8
elif sys.version_info[:2] >= (3, 7):
class _TypeAliasForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
TypeAlias = _TypeAliasForm('TypeAlias',
doc="""Special marker indicating that an assignment should
be recognized as a proper type alias definition by type
checkers.
For example::
Predicate: TypeAlias = Callable[..., bool]
It's invalid when used anywhere except as in the example
above.""")
# 3.6
else:
class _TypeAliasMeta(typing.TypingMeta):
"""Metaclass for TypeAlias"""
def __repr__(self):
return 'typing_extensions.TypeAlias'
class _TypeAliasBase(typing._FinalTypingBase, metaclass=_TypeAliasMeta, _root=True):
"""Special marker indicating that an assignment should
be recognized as a proper type alias definition by type
checkers.
For example::
Predicate: TypeAlias = Callable[..., bool]
It's invalid when used anywhere except as in the example above.
"""
__slots__ = ()
def __instancecheck__(self, obj):
raise TypeError("TypeAlias cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("TypeAlias cannot be used with issubclass().")
def __repr__(self):
return 'typing_extensions.TypeAlias'
TypeAlias = _TypeAliasBase(_root=True)
# Python 3.10+ has PEP 612
if hasattr(typing, 'ParamSpecArgs'):
ParamSpecArgs = typing.ParamSpecArgs
ParamSpecKwargs = typing.ParamSpecKwargs
# 3.6-3.9
else:
class _Immutable:
"""Mixin to indicate that object should not be copied."""
__slots__ = ()
def __copy__(self):
return self
def __deepcopy__(self, memo):
return self
class ParamSpecArgs(_Immutable):
"""The args for a ParamSpec object.
Given a ParamSpec object P, P.args is an instance of ParamSpecArgs.
ParamSpecArgs objects have a reference back to their ParamSpec:
P.args.__origin__ is P
This type is meant for runtime introspection and has no special meaning to
static type checkers.
"""
def __init__(self, origin):
self.__origin__ = origin
def __repr__(self):
return f"{self.__origin__.__name__}.args"
class ParamSpecKwargs(_Immutable):
"""The kwargs for a ParamSpec object.
Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs.
ParamSpecKwargs objects have a reference back to their ParamSpec:
P.kwargs.__origin__ is P
This type is meant for runtime introspection and has no special meaning to
static type checkers.
"""
def __init__(self, origin):
self.__origin__ = origin
def __repr__(self):
return f"{self.__origin__.__name__}.kwargs"
# 3.10+
if hasattr(typing, 'ParamSpec'):
ParamSpec = typing.ParamSpec
# 3.6-3.9
else:
# Inherits from list as a workaround for Callable checks in Python < 3.9.2.
class ParamSpec(list):
"""Parameter specification variable.
Usage::
P = ParamSpec('P')
Parameter specification variables exist primarily for the benefit of static
type checkers. They are used to forward the parameter types of one
callable to another callable, a pattern commonly found in higher order
functions and decorators. They are only valid when used in ``Concatenate``,
or s the first argument to ``Callable``. In Python 3.10 and higher,
they are also supported in user-defined Generics at runtime.
See class Generic for more information on generic types. An
example for annotating a decorator::
T = TypeVar('T')
P = ParamSpec('P')
def add_logging(f: Callable[P, T]) -> Callable[P, T]:
'''A type-safe decorator to add logging to a function.'''
def inner(*args: P.args, **kwargs: P.kwargs) -> T:
logging.info(f'{f.__name__} was called')
return f(*args, **kwargs)
return inner
@add_logging
def add_two(x: float, y: float) -> float:
'''Add two numbers together.'''
return x + y
Parameter specification variables defined with covariant=True or
contravariant=True can be used to declare covariant or contravariant
generic types. These keyword arguments are valid, but their actual semantics
are yet to be decided. See PEP 612 for details.
Parameter specification variables can be introspected. e.g.:
P.__name__ == 'T'
P.__bound__ == None
P.__covariant__ == False
P.__contravariant__ == False
Note that only parameter specification variables defined in global scope can
be pickled.
"""
# Trick Generic __parameters__.
__class__ = typing.TypeVar
@property
def args(self):
return ParamSpecArgs(self)
@property
def kwargs(self):
return ParamSpecKwargs(self)
def __init__(self, name, *, bound=None, covariant=False, contravariant=False):
super().__init__([self])
self.__name__ = name
self.__covariant__ = bool(covariant)
self.__contravariant__ = bool(contravariant)
if bound:
self.__bound__ = typing._type_check(bound, 'Bound must be a type.')
else:
self.__bound__ = None
# for pickling:
try:
def_mod = sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
def_mod = None
if def_mod != 'typing_extensions':
self.__module__ = def_mod
def __repr__(self):
if self.__covariant__:
prefix = '+'
elif self.__contravariant__:
prefix = '-'
else:
prefix = '~'
return prefix + self.__name__
def __hash__(self):
return object.__hash__(self)
def __eq__(self, other):
return self is other
def __reduce__(self):
return self.__name__
# Hack to get typing._type_check to pass.
def __call__(self, *args, **kwargs):
pass
if not PEP_560:
# Only needed in 3.6.
def _get_type_vars(self, tvars):
if self not in tvars:
tvars.append(self)
# 3.6-3.9
if not hasattr(typing, 'Concatenate'):
# Inherits from list as a workaround for Callable checks in Python < 3.9.2.
class _ConcatenateGenericAlias(list):
# Trick Generic into looking into this for __parameters__.
if PEP_560:
__class__ = typing._GenericAlias
else:
__class__ = typing._TypingBase
# Flag in 3.8.
_special = False
# Attribute in 3.6 and earlier.
_gorg = typing.Generic
def __init__(self, origin, args):
super().__init__(args)
self.__origin__ = origin
self.__args__ = args
def __repr__(self):
_type_repr = typing._type_repr
return (f'{_type_repr(self.__origin__)}'
f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]')
def __hash__(self):
return hash((self.__origin__, self.__args__))
# Hack to get typing._type_check to pass in Generic.
def __call__(self, *args, **kwargs):
pass
@property
def __parameters__(self):
return tuple(
tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
)
if not PEP_560:
# Only required in 3.6.
def _get_type_vars(self, tvars):
if self.__origin__ and self.__parameters__:
typing._get_type_vars(self.__parameters__, tvars)
# 3.6-3.9
@typing._tp_cache
def _concatenate_getitem(self, parameters):
if parameters == ():
raise TypeError("Cannot take a Concatenate of no types.")
if not isinstance(parameters, tuple):
parameters = (parameters,)
if not isinstance(parameters[-1], ParamSpec):
raise TypeError("The last parameter to Concatenate should be a "
"ParamSpec variable.")
msg = "Concatenate[arg, ...]: each arg must be a type."
parameters = tuple(typing._type_check(p, msg) for p in parameters)
return _ConcatenateGenericAlias(self, parameters)
# 3.10+
if hasattr(typing, 'Concatenate'):
Concatenate = typing.Concatenate
_ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa
# 3.9
elif sys.version_info[:2] >= (3, 9):
@_TypeAliasForm
def Concatenate(self, parameters):
"""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
higher order function which adds, removes or transforms parameters of a
callable.
For example::
Callable[Concatenate[int, P], int]
See PEP 612 for detailed information.
"""
return _concatenate_getitem(self, parameters)
# 3.7-8
elif sys.version_info[:2] >= (3, 7):
class _ConcatenateForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
def __getitem__(self, parameters):
return _concatenate_getitem(self, parameters)
Concatenate = _ConcatenateForm(
'Concatenate',
doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
higher order function which adds, removes or transforms parameters of a
callable.
For example::
Callable[Concatenate[int, P], int]
See PEP 612 for detailed information.
""")
# 3.6
else:
class _ConcatenateAliasMeta(typing.TypingMeta):
"""Metaclass for Concatenate."""
def __repr__(self):
return 'typing_extensions.Concatenate'
class _ConcatenateAliasBase(typing._FinalTypingBase,
metaclass=_ConcatenateAliasMeta,
_root=True):
"""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
higher order function which adds, removes or transforms parameters of a
callable.
For example::
Callable[Concatenate[int, P], int]
See PEP 612 for detailed information.
"""
__slots__ = ()
def __instancecheck__(self, obj):
raise TypeError("Concatenate cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("Concatenate cannot be used with issubclass().")
def __repr__(self):
return 'typing_extensions.Concatenate'
def __getitem__(self, parameters):
return _concatenate_getitem(self, parameters)
Concatenate = _ConcatenateAliasBase(_root=True)
# 3.10+
if hasattr(typing, 'TypeGuard'):
TypeGuard = typing.TypeGuard
# 3.9
elif sys.version_info[:2] >= (3, 9):
class _TypeGuardForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
@_TypeGuardForm
def TypeGuard(self, parameters):
"""Special typing form used to annotate the return type of a user-defined
type guard function. ``TypeGuard`` only accepts a single type argument.
At runtime, functions marked this way should return a boolean.
``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
type checkers to determine a more precise type of an expression within a
program's code flow. Usually type narrowing is done by analyzing
conditional code flow and applying the narrowing to a block of code. The
conditional expression here is sometimes referred to as a "type guard".
Sometimes it would be convenient to use a user-defined boolean function
as a type guard. Such a function should use ``TypeGuard[...]`` as its
return type to alert static type checkers to this intention.
Using ``-> TypeGuard`` tells the static type checker that for a given
function:
1. The return value is a boolean.
2. If the return value is ``True``, the type of its argument
is the type inside ``TypeGuard``.
For example::
def is_str(val: Union[str, float]):
# "isinstance" type guard
if isinstance(val, str):
# Type of ``val`` is narrowed to ``str``
...
else:
# Else, type of ``val`` is narrowed to ``float``.
...
Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
form of ``TypeA`` (it can even be a wider form) and this may lead to
type-unsafe results. The main reason is to allow for things like
narrowing ``List[object]`` to ``List[str]`` even though the latter is not
a subtype of the former, since ``List`` is invariant. The responsibility of
writing type-safe type guards is left to the user.
``TypeGuard`` also works with type variables. For more information, see
PEP 647 (User-Defined Type Guards).
"""
item = typing._type_check(parameters, f'{self} accepts only single type.')
return typing._GenericAlias(self, (item,))
# 3.7-3.8
elif sys.version_info[:2] >= (3, 7):
class _TypeGuardForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
def __getitem__(self, parameters):
item = typing._type_check(parameters,
f'{self._name} accepts only a single type')
return typing._GenericAlias(self, (item,))
TypeGuard = _TypeGuardForm(
'TypeGuard',
doc="""Special typing form used to annotate the return type of a user-defined
type guard function. ``TypeGuard`` only accepts a single type argument.
At runtime, functions marked this way should return a boolean.
``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
type checkers to determine a more precise type of an expression within a
program's code flow. Usually type narrowing is done by analyzing
conditional code flow and applying the narrowing to a block of code. The
conditional expression here is sometimes referred to as a "type guard".
Sometimes it would be convenient to use a user-defined boolean function
as a type guard. Such a function should use ``TypeGuard[...]`` as its
return type to alert static type checkers to this intention.
Using ``-> TypeGuard`` tells the static type checker that for a given
function:
1. The return value is a boolean.
2. If the return value is ``True``, the type of its argument
is the type inside ``TypeGuard``.
For example::
def is_str(val: Union[str, float]):
# "isinstance" type guard
if isinstance(val, str):
# Type of ``val`` is narrowed to ``str``
...
else:
# Else, type of ``val`` is narrowed to ``float``.
...
Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
form of ``TypeA`` (it can even be a wider form) and this may lead to
type-unsafe results. The main reason is to allow for things like
narrowing ``List[object]`` to ``List[str]`` even though the latter is not
a subtype of the former, since ``List`` is invariant. The responsibility of
writing type-safe type guards is left to the user.
``TypeGuard`` also works with type variables. For more information, see
PEP 647 (User-Defined Type Guards).
""")
# 3.6
else:
class _TypeGuard(typing._FinalTypingBase, _root=True):
"""Special typing form used to annotate the return type of a user-defined
type guard function. ``TypeGuard`` only accepts a single type argument.
At runtime, functions marked this way should return a boolean.
``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
type checkers to determine a more precise type of an expression within a
program's code flow. Usually type narrowing is done by analyzing
conditional code flow and applying the narrowing to a block of code. The
conditional expression here is sometimes referred to as a "type guard".
Sometimes it would be convenient to use a user-defined boolean function
as a type guard. Such a function should use ``TypeGuard[...]`` as its
return type to alert static type checkers to this intention.
Using ``-> TypeGuard`` tells the static type checker that for a given
function:
1. The return value is a boolean.
2. If the return value is ``True``, the type of its argument
is the type inside ``TypeGuard``.
For example::
def is_str(val: Union[str, float]):
# "isinstance" type guard
if isinstance(val, str):
# Type of ``val`` is narrowed to ``str``
...
else:
# Else, type of ``val`` is narrowed to ``float``.
...
Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
form of ``TypeA`` (it can even be a wider form) and this may lead to
type-unsafe results. The main reason is to allow for things like
narrowing ``List[object]`` to ``List[str]`` even though the latter is not
a subtype of the former, since ``List`` is invariant. The responsibility of
writing type-safe type guards is left to the user.
``TypeGuard`` also works with type variables. For more information, see
PEP 647 (User-Defined Type Guards).
"""
__slots__ = ('__type__',)
def __init__(self, tp=None, **kwds):
self.__type__ = tp
def __getitem__(self, item):
cls = type(self)
if self.__type__ is None:
return cls(typing._type_check(item,
f'{cls.__name__[1:]} accepts only a single type.'),
_root=True)
raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted')
def _eval_type(self, globalns, localns):
new_tp = typing._eval_type(self.__type__, globalns, localns)
if new_tp == self.__type__:
return self
return type(self)(new_tp, _root=True)
def __repr__(self):
r = super().__repr__()
if self.__type__ is not None:
r += f'[{typing._type_repr(self.__type__)}]'
return r
def __hash__(self):
return hash((type(self).__name__, self.__type__))
def __eq__(self, other):
if not isinstance(other, _TypeGuard):
return NotImplemented
if self.__type__ is not None:
return self.__type__ == other.__type__
return self is other
TypeGuard = _TypeGuard(_root=True)
if hasattr(typing, "Self"):
Self = typing.Self
elif sys.version_info[:2] >= (3, 9):
class _SelfForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
@_SelfForm
def Self(self, params):
"""Used to spell the type of "self" in classes.
Example::
from typing import Self
class ReturnsSelf:
def parse(self, data: bytes) -> Self:
...
return self
"""
raise TypeError(f"{self} is not subscriptable")
elif sys.version_info[:2] >= (3, 7):
class _SelfForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
Self = _SelfForm(
"Self",
doc="""Used to spell the type of "self" in classes.
Example::
from typing import Self
class ReturnsSelf:
def parse(self, data: bytes) -> Self:
...
return self
"""
)
else:
class _Self(typing._FinalTypingBase, _root=True):
"""Used to spell the type of "self" in classes.
Example::
from typing import Self
class ReturnsSelf:
def parse(self, data: bytes) -> Self:
...
return self
"""
__slots__ = ()
def __instancecheck__(self, obj):
raise TypeError(f"{self} cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError(f"{self} cannot be used with issubclass().")
Self = _Self(_root=True)
if hasattr(typing, 'Required'):
Required = typing.Required
NotRequired = typing.NotRequired
elif sys.version_info[:2] >= (3, 9):
class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
@_ExtensionsSpecialForm
def Required(self, parameters):
"""A special typing construct to mark a key of a total=False TypedDict
as required. For example:
class Movie(TypedDict, total=False):
title: Required[str]
year: int
m = Movie(
title='The Matrix', # typechecker error if key is omitted
year=1999,
)
There is no runtime checking that a required key is actually provided
when instantiating a related TypedDict.
"""
item = typing._type_check(parameters, f'{self._name} accepts only single type')
return typing._GenericAlias(self, (item,))
@_ExtensionsSpecialForm
def NotRequired(self, parameters):
"""A special typing construct to mark a key of a TypedDict as
potentially missing. For example:
class Movie(TypedDict):
title: str
year: NotRequired[int]
m = Movie(
title='The Matrix', # typechecker error if key is omitted
year=1999,
)
"""
item = typing._type_check(parameters, f'{self._name} accepts only single type')
return typing._GenericAlias(self, (item,))
elif sys.version_info[:2] >= (3, 7):
class _RequiredForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
def __getitem__(self, parameters):
item = typing._type_check(parameters,
'{} accepts only single type'.format(self._name))
return typing._GenericAlias(self, (item,))
Required = _RequiredForm(
'Required',
doc="""A special typing construct to mark a key of a total=False TypedDict
as required. For example:
class Movie(TypedDict, total=False):
title: Required[str]
year: int
m = Movie(
title='The Matrix', # typechecker error if key is omitted
year=1999,
)
There is no runtime checking that a required key is actually provided
when instantiating a related TypedDict.
""")
NotRequired = _RequiredForm(
'NotRequired',
doc="""A special typing construct to mark a key of a TypedDict as
potentially missing. For example:
class Movie(TypedDict):
title: str
year: NotRequired[int]
m = Movie(
title='The Matrix', # typechecker error if key is omitted
year=1999,
)
""")
else:
# NOTE: Modeled after _Final's implementation when _FinalTypingBase available
class _MaybeRequired(typing._FinalTypingBase, _root=True):
__slots__ = ('__type__',)
def __init__(self, tp=None, **kwds):
self.__type__ = tp
def __getitem__(self, item):
cls = type(self)
if self.__type__ is None:
return cls(typing._type_check(item,
'{} accepts only single type.'.format(cls.__name__[1:])),
_root=True)
raise TypeError('{} cannot be further subscripted'
.format(cls.__name__[1:]))
def _eval_type(self, globalns, localns):
new_tp = typing._eval_type(self.__type__, globalns, localns)
if new_tp == self.__type__:
return self
return type(self)(new_tp, _root=True)
def __repr__(self):
r = super().__repr__()
if self.__type__ is not None:
r += '[{}]'.format(typing._type_repr(self.__type__))
return r
def __hash__(self):
return hash((type(self).__name__, self.__type__))
def __eq__(self, other):
if not isinstance(other, type(self)):
return NotImplemented
if self.__type__ is not None:
return self.__type__ == other.__type__
return self is other
class _Required(_MaybeRequired, _root=True):
"""A special typing construct to mark a key of a total=False TypedDict
as required. For example:
class Movie(TypedDict, total=False):
title: Required[str]
year: int
m = Movie(
title='The Matrix', # typechecker error if key is omitted
year=1999,
)
There is no runtime checking that a required key is actually provided
when instantiating a related TypedDict.
"""
class _NotRequired(_MaybeRequired, _root=True):
"""A special typing construct to mark a key of a TypedDict as
potentially missing. For example:
class Movie(TypedDict):
title: str
year: NotRequired[int]
m = Movie(
title='The Matrix', # typechecker error if key is omitted
year=1999,
)
"""
Required = _Required(_root=True)
NotRequired = _NotRequired(_root=True)
| 99,348
|
Python
|
.py
| 2,073
| 36.509407
| 110
| 0.584408
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,614
|
dataclasses.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/dataclasses.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# flake8: noqa
# Be explicit about which import error we want to catch
try:
import dataclasses
# Python 3.6
except ImportError:
from zato.common.ext._dataclasses import _FIELDS, _PARAMS
from zato.common.ext._dataclasses import * # noqa
# Python 3.6+
else:
from dataclasses import _FIELDS, _PARAMS
from dataclasses import *
| 501
|
Python
|
.py
| 17
| 26.941176
| 64
| 0.732218
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,615
|
validate_.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/validate_.py
|
"""
This module is a modified vendor copy of the configobj package from https://pypi.org/project/configobj/
# validate.py
# A Validator object
# Copyright (C) 2005-2014:
# (name) : (email)
# Michael Foord: fuzzyman AT voidspace DOT org DOT uk
# Mark Andrews: mark AT la-la DOT com
# Nicola Larosa: nico AT tekNico DOT net
# Rob Dennis: rdennis AT gmail DOT com
# Eli Courtwright: eli AT courtwright DOT org
# This software is licensed under the terms of the BSD license.
# http://opensource.org/licenses/BSD-3-Clause
# ConfigObj 5 - main repository for documentation and issue tracking:
# https://github.com/DiffSK/configobj
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the names of Michael Foord, Eli Courtwright or Rob Dennis,
nor the name of Voidspace, may be used to endorse or promote
products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# flake8: noqa
"""
The Validator object is used to check that supplied values
conform to a specification.
The value can be supplied as a string - e.g. from a config file.
In this case the check will also *convert* the value to
the required type. This allows you to add validation
as a transparent layer to access data stored as strings.
The validation checks that the data is correct *and*
converts it to the expected type.
Some standard checks are provided for basic data types.
Additional checks are easy to write. They can be
provided when the ``Validator`` is instantiated or
added afterwards.
The standard functions work with the following basic data types :
* integers
* floats
* booleans
* strings
* ip_addr
plus lists of these datatypes
Adding additional checks is done through coding simple functions.
The full set of standard checks are :
* 'integer': matches integer values (including negative)
Takes optional 'min' and 'max' arguments : ::
integer()
integer(3, 9) # any value from 3 to 9
integer(min=0) # any positive value
integer(max=9)
* 'float': matches float values
Has the same parameters as the integer check.
* 'boolean': matches boolean values - ``True`` or ``False``
Acceptable string values for True are :
true, on, yes, 1
Acceptable string values for False are :
false, off, no, 0
Any other value raises an error.
* 'ip_addr': matches an Internet Protocol address, v.4, represented
by a dotted-quad string, i.e. '1.2.3.4'.
* 'string': matches any string.
Takes optional keyword args 'min' and 'max'
to specify min and max lengths of the string.
* 'list': matches any list.
Takes optional keyword args 'min', and 'max' to specify min and
max sizes of the list. (Always returns a list.)
* 'tuple': matches any tuple.
Takes optional keyword args 'min', and 'max' to specify min and
max sizes of the tuple. (Always returns a tuple.)
* 'int_list': Matches a list of integers.
Takes the same arguments as list.
* 'float_list': Matches a list of floats.
Takes the same arguments as list.
* 'bool_list': Matches a list of boolean values.
Takes the same arguments as list.
* 'ip_addr_list': Matches a list of IP addresses.
Takes the same arguments as list.
* 'string_list': Matches a list of strings.
Takes the same arguments as list.
* 'mixed_list': Matches a list with different types in
specific positions. List size must match
the number of arguments.
Each position can be one of :
'integer', 'float', 'ip_addr', 'string', 'boolean'
So to specify a list with two strings followed
by two integers, you write the check as : ::
mixed_list('string', 'string', 'integer', 'integer')
* 'pass': This check matches everything ! It never fails
and the value is unchanged.
It is also the default if no check is specified.
* 'option': This check matches any from a list of options.
You specify this check with : ::
option('option 1', 'option 2', 'option 3')
You can supply a default value (returned if no value is supplied)
using the default keyword argument.
You specify a list argument for default using a list constructor syntax in
the check : ::
checkname(arg1, arg2, default=list('val 1', 'val 2', 'val 3'))
A badly formatted set of arguments will raise a ``VdtParamError``.
"""
__version__ = '1.0.1'
__all__ = (
'__version__',
'dottedQuadToNum',
'numToDottedQuad',
'ValidateError',
'VdtUnknownCheckError',
'VdtParamError',
'VdtTypeError',
'VdtValueError',
'VdtValueTooSmallError',
'VdtValueTooBigError',
'VdtValueTooShortError',
'VdtValueTooLongError',
'VdtMissingValue',
'Validator',
'is_integer',
'is_float',
'is_boolean',
'is_list',
'is_tuple',
'is_ip_addr',
'is_string',
'is_int_list',
'is_bool_list',
'is_float_list',
'is_string_list',
'is_ip_addr_list',
'is_mixed_list',
'is_option',
'__docformat__',
)
import re
import sys
from pprint import pprint
#TODO - #21 - six is part of the repo now, but we didn't switch over to it here
# this could be replaced if six is used for compatibility, or there are no
# more assertions about items being a string
if sys.version_info < (3,):
string_type = basestring
else:
string_type = str
# so tests that care about unicode on 2.x can specify unicode, and the same
# tests when run on 3.x won't complain about a undefined name "unicode"
# since all strings are unicode on 3.x we just want to pass it through
# unchanged
unicode = lambda x: x
# in python 3, all ints are equivalent to python 2 longs, and they'll
# never show "L" in the repr
long = int
_list_arg = re.compile(r'''
(?:
([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*list\(
(
(?:
\s*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)
\s*,\s*
)*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)? # last one
)
\)
)
''', re.VERBOSE | re.DOTALL) # two groups
_list_members = re.compile(r'''
(
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s=][^,=]*?) # unquoted
)
(?:
(?:\s*,\s*)|(?:\s*$) # comma
)
''', re.VERBOSE | re.DOTALL) # one group
_paramstring = r'''
(?:
(
(?:
[a-zA-Z_][a-zA-Z0-9_]*\s*=\s*list\(
(?:
\s*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)
\s*,\s*
)*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)? # last one
\)
)|
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s=][^,=]*?)| # unquoted
(?: # keyword argument
[a-zA-Z_][a-zA-Z0-9_]*\s*=\s*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s=][^,=]*?) # unquoted
)
)
)
)
(?:
(?:\s*,\s*)|(?:\s*$) # comma
)
)
'''
_matchstring = '^%s*' % _paramstring
# Python pre 2.2.1 doesn't have bool
try:
bool
except NameError:
def bool(val):
"""Simple boolean equivalent function. """
if val:
return 1
else:
return 0
def dottedQuadToNum(ip):
"""
Convert decimal dotted quad string to long integer
>>> int(dottedQuadToNum('1 '))
1
>>> int(dottedQuadToNum(' 1.2'))
16777218
>>> int(dottedQuadToNum(' 1.2.3 '))
16908291
>>> int(dottedQuadToNum('1.2.3.4'))
16909060
>>> dottedQuadToNum('255.255.255.255')
4294967295
>>> dottedQuadToNum('255.255.255.256')
Traceback (most recent call last):
ValueError: Not a good dotted-quad IP: 255.255.255.256
"""
# import here to avoid it when ip_addr values are not used
import socket, struct
try:
return struct.unpack('!L',
socket.inet_aton(ip.strip()))[0]
except socket.error:
raise ValueError('Not a good dotted-quad IP: %s' % ip)
return
def numToDottedQuad(num):
"""
Convert int or long int to dotted quad string
>>> numToDottedQuad(long(-1))
Traceback (most recent call last):
ValueError: Not a good numeric IP: -1
>>> numToDottedQuad(long(1))
'0.0.0.1'
>>> numToDottedQuad(long(16777218))
'1.0.0.2'
>>> numToDottedQuad(long(16908291))
'1.2.0.3'
>>> numToDottedQuad(long(16909060))
'1.2.3.4'
>>> numToDottedQuad(long(4294967295))
'255.255.255.255'
>>> numToDottedQuad(long(4294967296))
Traceback (most recent call last):
ValueError: Not a good numeric IP: 4294967296
>>> numToDottedQuad(-1)
Traceback (most recent call last):
ValueError: Not a good numeric IP: -1
>>> numToDottedQuad(1)
'0.0.0.1'
>>> numToDottedQuad(16777218)
'1.0.0.2'
>>> numToDottedQuad(16908291)
'1.2.0.3'
>>> numToDottedQuad(16909060)
'1.2.3.4'
>>> numToDottedQuad(4294967295)
'255.255.255.255'
>>> numToDottedQuad(4294967296)
Traceback (most recent call last):
ValueError: Not a good numeric IP: 4294967296
"""
# import here to avoid it when ip_addr values are not used
import socket, struct
# no need to intercept here, 4294967295L is fine
if num > long(4294967295) or num < 0:
raise ValueError('Not a good numeric IP: %s' % num)
try:
return socket.inet_ntoa(
struct.pack('!L', long(num)))
except (socket.error, struct.error, OverflowError):
raise ValueError('Not a good numeric IP: %s' % num)
class ValidateError(Exception):
"""
This error indicates that the check failed.
It can be the base class for more specific errors.
Any check function that fails ought to raise this error.
(or a subclass)
>>> raise ValidateError
Traceback (most recent call last):
ValidateError
"""
class VdtMissingValue(ValidateError):
"""No value was supplied to a check that needed one."""
class VdtUnknownCheckError(ValidateError):
"""An unknown check function was requested"""
def __init__(self, value):
"""
>>> raise VdtUnknownCheckError('yoda')
Traceback (most recent call last):
VdtUnknownCheckError: the check "yoda" is unknown.
"""
ValidateError.__init__(self, 'the check "%s" is unknown.' % (value,))
class VdtParamError(SyntaxError):
"""An incorrect parameter was passed"""
def __init__(self, name, value):
"""
>>> raise VdtParamError('yoda', 'jedi')
Traceback (most recent call last):
VdtParamError: passed an incorrect value "jedi" for parameter "yoda".
"""
SyntaxError.__init__(self, 'passed an incorrect value "%s" for parameter "%s".' % (value, name))
class VdtTypeError(ValidateError):
"""The value supplied was of the wrong type"""
def __init__(self, value):
"""
>>> raise VdtTypeError('jedi')
Traceback (most recent call last):
VdtTypeError: the value "jedi" is of the wrong type.
"""
ValidateError.__init__(self, 'the value "%s" is of the wrong type.' % (value,))
class VdtValueError(ValidateError):
"""The value supplied was of the correct type, but was not an allowed value."""
def __init__(self, value):
"""
>>> raise VdtValueError('jedi')
Traceback (most recent call last):
VdtValueError: the value "jedi" is unacceptable.
"""
ValidateError.__init__(self, 'the value "%s" is unacceptable.' % (value,))
class VdtValueTooSmallError(VdtValueError):
"""The value supplied was of the correct type, but was too small."""
def __init__(self, value):
"""
>>> raise VdtValueTooSmallError('0')
Traceback (most recent call last):
VdtValueTooSmallError: the value "0" is too small.
"""
ValidateError.__init__(self, 'the value "%s" is too small.' % (value,))
class VdtValueTooBigError(VdtValueError):
"""The value supplied was of the correct type, but was too big."""
def __init__(self, value):
"""
>>> raise VdtValueTooBigError('1')
Traceback (most recent call last):
VdtValueTooBigError: the value "1" is too big.
"""
ValidateError.__init__(self, 'the value "%s" is too big.' % (value,))
class VdtValueTooShortError(VdtValueError):
"""The value supplied was of the correct type, but was too short."""
def __init__(self, value):
"""
>>> raise VdtValueTooShortError('jed')
Traceback (most recent call last):
VdtValueTooShortError: the value "jed" is too short.
"""
ValidateError.__init__(
self,
'the value "%s" is too short.' % (value,))
class VdtValueTooLongError(VdtValueError):
"""The value supplied was of the correct type, but was too long."""
def __init__(self, value):
"""
>>> raise VdtValueTooLongError('jedie')
Traceback (most recent call last):
VdtValueTooLongError: the value "jedie" is too long.
"""
ValidateError.__init__(self, 'the value "%s" is too long.' % (value,))
class Validator:
"""
Validator is an object that allows you to register a set of 'checks'.
These checks take input and test that it conforms to the check.
This can also involve converting the value from a string into
the correct datatype.
The ``check`` method takes an input string which configures which
check is to be used and applies that check to a supplied value.
An example input string would be:
'int_range(param1, param2)'
You would then provide something like:
>>> def int_range_check(value, min, max):
... # turn min and max from strings to integers
... min = int(min)
... max = int(max)
... # check that value is of the correct type.
... # possible valid inputs are integers or strings
... # that represent integers
... if not isinstance(value, (int, long, string_type)):
... raise VdtTypeError(value)
... elif isinstance(value, string_type):
... # if we are given a string
... # attempt to convert to an integer
... try:
... value = int(value)
... except ValueError:
... raise VdtValueError(value)
... # check the value is between our constraints
... if not min <= value:
... raise VdtValueTooSmallError(value)
... if not value <= max:
... raise VdtValueTooBigError(value)
... return value
>>> fdict = {'int_range': int_range_check}
>>> vtr1 = Validator(fdict)
>>> vtr1.check('int_range(20, 40)', '30')
30
>>> vtr1.check('int_range(20, 40)', '60')
Traceback (most recent call last):
VdtValueTooBigError: the value "60" is too big.
New functions can be added with : ::
>>> vtr2 = Validator()
>>> vtr2.functions['int_range'] = int_range_check
Or by passing in a dictionary of functions when Validator
is instantiated.
Your functions *can* use keyword arguments,
but the first argument should always be 'value'.
If the function doesn't take additional arguments,
the parentheses are optional in the check.
It can be written with either of : ::
keyword = function_name
keyword = function_name()
The first program to utilise Validator() was Michael Foord's
ConfigObj, an alternative to ConfigParser which supports lists and
can validate a config file using a config schema.
For more details on using Validator with ConfigObj see:
https://configobj.readthedocs.org/en/latest/configobj.html
"""
# this regex does the initial parsing of the checks
_func_re = re.compile(r'(.+?)\((.*)\)', re.DOTALL)
# this regex takes apart keyword arguments
_key_arg = re.compile(r'^([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.*)$', re.DOTALL)
# this regex finds keyword=list(....) type values
_list_arg = _list_arg
# this regex takes individual values out of lists - in one pass
_list_members = _list_members
# These regexes check a set of arguments for validity
# and then pull the members out
_paramfinder = re.compile(_paramstring, re.VERBOSE | re.DOTALL)
_matchfinder = re.compile(_matchstring, re.VERBOSE | re.DOTALL)
def __init__(self, functions=None):
"""
>>> vtri = Validator()
"""
self.functions = {
'': self._pass,
'integer': is_integer,
'float': is_float,
'boolean': is_boolean,
'ip_addr': is_ip_addr,
'string': is_string,
'list': is_list,
'tuple': is_tuple,
'int_list': is_int_list,
'float_list': is_float_list,
'bool_list': is_bool_list,
'ip_addr_list': is_ip_addr_list,
'string_list': is_string_list,
'mixed_list': is_mixed_list,
'pass': self._pass,
'option': is_option,
'force_list': force_list,
}
if functions is not None:
self.functions.update(functions)
# tekNico: for use by ConfigObj
self.baseErrorClass = ValidateError
self._cache = {}
def check(self, check, value, missing=False):
"""
Usage: check(check, value)
Arguments:
check: string representing check to apply (including arguments)
value: object to be checked
Returns value, converted to correct type if necessary
If the check fails, raises a ``ValidateError`` subclass.
>>> vtor.check('yoda', '')
Traceback (most recent call last):
VdtUnknownCheckError: the check "yoda" is unknown.
>>> vtor.check('yoda()', '')
Traceback (most recent call last):
VdtUnknownCheckError: the check "yoda" is unknown.
>>> vtor.check('string(default="")', '', missing=True)
''
"""
fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check)
if missing:
if default is None:
# no information needed here - to be handled by caller
raise VdtMissingValue()
value = self._handle_none(default)
if value is None:
return None
return self._check_value(value, fun_name, fun_args, fun_kwargs)
def _handle_none(self, value):
if value == 'None':
return None
elif value in ("'None'", '"None"'):
# Special case a quoted None
value = self._unquote(value)
return value
def _parse_with_caching(self, check):
if check in self._cache:
fun_name, fun_args, fun_kwargs, default = self._cache[check]
# We call list and dict below to work with *copies* of the data
# rather than the original (which are mutable of course)
fun_args = list(fun_args)
fun_kwargs = dict(fun_kwargs)
else:
fun_name, fun_args, fun_kwargs, default = self._parse_check(check)
fun_kwargs = dict([(str(key), value) for (key, value) in list(fun_kwargs.items())])
self._cache[check] = fun_name, list(fun_args), dict(fun_kwargs), default
return fun_name, fun_args, fun_kwargs, default
def _check_value(self, value, fun_name, fun_args, fun_kwargs):
try:
fun = self.functions[fun_name]
except KeyError:
raise VdtUnknownCheckError(fun_name)
else:
return fun(value, *fun_args, **fun_kwargs)
def _parse_check(self, check):
fun_match = self._func_re.match(check)
if fun_match:
fun_name = fun_match.group(1)
arg_string = fun_match.group(2)
arg_match = self._matchfinder.match(arg_string)
if arg_match is None:
# Bad syntax
raise VdtParamError('Bad syntax in check "%s".' % check)
fun_args = []
fun_kwargs = {}
# pull out args of group 2
for arg in self._paramfinder.findall(arg_string):
# args may need whitespace removing (before removing quotes)
arg = arg.strip()
listmatch = self._list_arg.match(arg)
if listmatch:
key, val = self._list_handle(listmatch)
fun_kwargs[key] = val
continue
keymatch = self._key_arg.match(arg)
if keymatch:
val = keymatch.group(2)
if not val in ("'None'", '"None"'):
# Special case a quoted None
val = self._unquote(val)
fun_kwargs[keymatch.group(1)] = val
continue
fun_args.append(self._unquote(arg))
else:
# allows for function names without (args)
return check, (), {}, None
# Default must be deleted if the value is specified too,
# otherwise the check function will get a spurious "default" keyword arg
default = fun_kwargs.pop('default', None)
return fun_name, fun_args, fun_kwargs, default
def _unquote(self, val):
"""Unquote a value if necessary."""
if (len(val) >= 2) and (val[0] in ("'", '"')) and (val[0] == val[-1]):
val = val[1:-1]
return val
def _list_handle(self, listmatch):
"""Take apart a ``keyword=list('val, 'val')`` type string."""
out = []
name = listmatch.group(1)
args = listmatch.group(2)
for arg in self._list_members.findall(args):
out.append(self._unquote(arg))
return name, out
def _pass(self, value):
"""
Dummy check that always passes
>>> vtor.check('', 0)
0
>>> vtor.check('', '0')
'0'
"""
return value
def get_default_value(self, check):
"""
Given a check, return the default value for the check
(converted to the right type).
If the check doesn't specify a default value then a
``KeyError`` will be raised.
"""
fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check)
if default is None:
raise KeyError('Check "%s" has no default value.' % check)
value = self._handle_none(default)
if value is None:
return value
return self._check_value(value, fun_name, fun_args, fun_kwargs)
def _is_num_param(names, values, to_float=False):
"""
Return numbers from inputs or raise VdtParamError.
Lets ``None`` pass through.
Pass in keyword argument ``to_float=True`` to
use float for the conversion rather than int.
>>> _is_num_param(('', ''), (0, 1.0))
[0, 1]
>>> _is_num_param(('', ''), (0, 1.0), to_float=True)
[0.0, 1.0]
>>> _is_num_param(('a'), ('a'))
Traceback (most recent call last):
VdtParamError: passed an incorrect value "a" for parameter "a".
"""
fun = to_float and float or int
out_params = []
for (name, val) in zip(names, values):
if val is None:
out_params.append(val)
elif isinstance(val, (int, long, float, string_type)):
try:
out_params.append(fun(val))
except ValueError as e:
raise VdtParamError(name, val)
else:
raise VdtParamError(name, val)
return out_params
# built in checks
# you can override these by setting the appropriate name
# in Validator.functions
# note: if the params are specified wrongly in your input string,
# you will also raise errors.
def is_integer(value, min=None, max=None):
"""
A check that tests that a given value is an integer (int, or long)
and optionally, between bounds. A negative value is accepted, while
a float will fail.
If the value is a string, then the conversion is done - if possible.
Otherwise a VdtError is raised.
>>> vtor.check('integer', '-1')
-1
>>> vtor.check('integer', '0')
0
>>> vtor.check('integer', 9)
9
>>> vtor.check('integer', 'a')
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
>>> vtor.check('integer', '2.2')
Traceback (most recent call last):
VdtTypeError: the value "2.2" is of the wrong type.
>>> vtor.check('integer(10)', '20')
20
>>> vtor.check('integer(max=20)', '15')
15
>>> vtor.check('integer(10)', '9')
Traceback (most recent call last):
VdtValueTooSmallError: the value "9" is too small.
>>> vtor.check('integer(10)', 9)
Traceback (most recent call last):
VdtValueTooSmallError: the value "9" is too small.
>>> vtor.check('integer(max=20)', '35')
Traceback (most recent call last):
VdtValueTooBigError: the value "35" is too big.
>>> vtor.check('integer(max=20)', 35)
Traceback (most recent call last):
VdtValueTooBigError: the value "35" is too big.
>>> vtor.check('integer(0, 9)', False)
0
"""
(min_val, max_val) = _is_num_param(('min', 'max'), (min, max))
if not isinstance(value, (int, long, string_type)):
raise VdtTypeError(value)
if isinstance(value, string_type):
# if it's a string - does it represent an integer ?
try:
value = int(value)
except ValueError:
raise VdtTypeError(value)
if (min_val is not None) and (value < min_val):
raise VdtValueTooSmallError(value)
if (max_val is not None) and (value > max_val):
raise VdtValueTooBigError(value)
return value
def is_float(value, min=None, max=None):
"""
A check that tests that a given value is a float
(an integer will be accepted), and optionally - that it is between bounds.
If the value is a string, then the conversion is done - if possible.
Otherwise a VdtError is raised.
This can accept negative values.
>>> vtor.check('float', '2')
2.0
From now on we multiply the value to avoid comparing decimals
>>> vtor.check('float', '-6.8') * 10
-68.0
>>> vtor.check('float', '12.2') * 10
122.0
>>> vtor.check('float', 8.4) * 10
84.0
>>> vtor.check('float', 'a')
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
>>> vtor.check('float(10.1)', '10.2') * 10
102.0
>>> vtor.check('float(max=20.2)', '15.1') * 10
151.0
>>> vtor.check('float(10.0)', '9.0')
Traceback (most recent call last):
VdtValueTooSmallError: the value "9.0" is too small.
>>> vtor.check('float(max=20.0)', '35.0')
Traceback (most recent call last):
VdtValueTooBigError: the value "35.0" is too big.
"""
(min_val, max_val) = _is_num_param(
('min', 'max'), (min, max), to_float=True)
if not isinstance(value, (int, long, float, string_type)):
raise VdtTypeError(value)
if not isinstance(value, float):
# if it's a string - does it represent a float ?
try:
value = float(value)
except ValueError:
raise VdtTypeError(value)
if (min_val is not None) and (value < min_val):
raise VdtValueTooSmallError(value)
if (max_val is not None) and (value > max_val):
raise VdtValueTooBigError(value)
return value
bool_dict = {
True: True, 'on': True, '1': True, 'true': True, 'yes': True,
False: False, 'off': False, '0': False, 'false': False, 'no': False,
}
def is_boolean(value):
"""
Check if the value represents a boolean.
>>> vtor.check('boolean', 0)
0
>>> vtor.check('boolean', False)
0
>>> vtor.check('boolean', '0')
0
>>> vtor.check('boolean', 'off')
0
>>> vtor.check('boolean', 'false')
0
>>> vtor.check('boolean', 'no')
0
>>> vtor.check('boolean', 'nO')
0
>>> vtor.check('boolean', 'NO')
0
>>> vtor.check('boolean', 1)
1
>>> vtor.check('boolean', True)
1
>>> vtor.check('boolean', '1')
1
>>> vtor.check('boolean', 'on')
1
>>> vtor.check('boolean', 'true')
1
>>> vtor.check('boolean', 'yes')
1
>>> vtor.check('boolean', 'Yes')
1
>>> vtor.check('boolean', 'YES')
1
>>> vtor.check('boolean', '')
Traceback (most recent call last):
VdtTypeError: the value "" is of the wrong type.
>>> vtor.check('boolean', 'up')
Traceback (most recent call last):
VdtTypeError: the value "up" is of the wrong type.
"""
if isinstance(value, string_type):
try:
return bool_dict[value.lower()]
except KeyError:
raise VdtTypeError(value)
# we do an equality test rather than an identity test
# this ensures Python 2.2 compatibilty
# and allows 0 and 1 to represent True and False
if value == False:
return False
elif value == True:
return True
else:
raise VdtTypeError(value)
def is_ip_addr(value):
"""
Check that the supplied value is an Internet Protocol address, v.4,
represented by a dotted-quad string, i.e. '1.2.3.4'.
>>> vtor.check('ip_addr', '1 ')
'1'
>>> vtor.check('ip_addr', ' 1.2')
'1.2'
>>> vtor.check('ip_addr', ' 1.2.3 ')
'1.2.3'
>>> vtor.check('ip_addr', '1.2.3.4')
'1.2.3.4'
>>> vtor.check('ip_addr', '0.0.0.0')
'0.0.0.0'
>>> vtor.check('ip_addr', '255.255.255.255')
'255.255.255.255'
>>> vtor.check('ip_addr', '255.255.255.256')
Traceback (most recent call last):
VdtValueError: the value "255.255.255.256" is unacceptable.
>>> vtor.check('ip_addr', '1.2.3.4.5')
Traceback (most recent call last):
VdtValueError: the value "1.2.3.4.5" is unacceptable.
>>> vtor.check('ip_addr', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
"""
if not isinstance(value, string_type):
raise VdtTypeError(value)
value = value.strip()
try:
dottedQuadToNum(value)
except ValueError:
raise VdtValueError(value)
return value
def is_list(value, min=None, max=None):
"""
Check that the value is a list of values.
You can optionally specify the minimum and maximum number of members.
It does no check on list members.
>>> vtor.check('list', ())
[]
>>> vtor.check('list', [])
[]
>>> vtor.check('list', (1, 2))
[1, 2]
>>> vtor.check('list', [1, 2])
[1, 2]
>>> vtor.check('list(3)', (1, 2))
Traceback (most recent call last):
VdtValueTooShortError: the value "(1, 2)" is too short.
>>> vtor.check('list(max=5)', (1, 2, 3, 4, 5, 6))
Traceback (most recent call last):
VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long.
>>> vtor.check('list(min=3, max=5)', (1, 2, 3, 4))
[1, 2, 3, 4]
>>> vtor.check('list', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('list', '12')
Traceback (most recent call last):
VdtTypeError: the value "12" is of the wrong type.
"""
(min_len, max_len) = _is_num_param(('min', 'max'), (min, max))
if isinstance(value, string_type):
raise VdtTypeError(value)
try:
num_members = len(value)
except TypeError:
raise VdtTypeError(value)
if min_len is not None and num_members < min_len:
raise VdtValueTooShortError(value)
if max_len is not None and num_members > max_len:
raise VdtValueTooLongError(value)
return list(value)
def is_tuple(value, min=None, max=None):
"""
Check that the value is a tuple of values.
You can optionally specify the minimum and maximum number of members.
It does no check on members.
>>> vtor.check('tuple', ())
()
>>> vtor.check('tuple', [])
()
>>> vtor.check('tuple', (1, 2))
(1, 2)
>>> vtor.check('tuple', [1, 2])
(1, 2)
>>> vtor.check('tuple(3)', (1, 2))
Traceback (most recent call last):
VdtValueTooShortError: the value "(1, 2)" is too short.
>>> vtor.check('tuple(max=5)', (1, 2, 3, 4, 5, 6))
Traceback (most recent call last):
VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long.
>>> vtor.check('tuple(min=3, max=5)', (1, 2, 3, 4))
(1, 2, 3, 4)
>>> vtor.check('tuple', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('tuple', '12')
Traceback (most recent call last):
VdtTypeError: the value "12" is of the wrong type.
"""
return tuple(is_list(value, min, max))
def is_string(value, min=None, max=None):
"""
Check that the supplied value is a string.
You can optionally specify the minimum and maximum number of members.
>>> vtor.check('string', '0')
'0'
>>> vtor.check('string', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('string(2)', '12')
'12'
>>> vtor.check('string(2)', '1')
Traceback (most recent call last):
VdtValueTooShortError: the value "1" is too short.
>>> vtor.check('string(min=2, max=3)', '123')
'123'
>>> vtor.check('string(min=2, max=3)', '1234')
Traceback (most recent call last):
VdtValueTooLongError: the value "1234" is too long.
"""
if not isinstance(value, string_type):
raise VdtTypeError(value)
(min_len, max_len) = _is_num_param(('min', 'max'), (min, max))
try:
num_members = len(value)
except TypeError:
raise VdtTypeError(value)
if min_len is not None and num_members < min_len:
raise VdtValueTooShortError(value)
if max_len is not None and num_members > max_len:
raise VdtValueTooLongError(value)
return value
def is_int_list(value, min=None, max=None):
"""
Check that the value is a list of integers.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is an integer.
>>> vtor.check('int_list', ())
[]
>>> vtor.check('int_list', [])
[]
>>> vtor.check('int_list', (1, 2))
[1, 2]
>>> vtor.check('int_list', [1, 2])
[1, 2]
>>> vtor.check('int_list', [1, 'a'])
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
"""
return [is_integer(mem) for mem in is_list(value, min, max)]
def is_bool_list(value, min=None, max=None):
"""
Check that the value is a list of booleans.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a boolean.
>>> vtor.check('bool_list', ())
[]
>>> vtor.check('bool_list', [])
[]
>>> check_res = vtor.check('bool_list', (True, False))
>>> check_res == [True, False]
1
>>> check_res = vtor.check('bool_list', [True, False])
>>> check_res == [True, False]
1
>>> vtor.check('bool_list', [True, 'a'])
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
"""
return [is_boolean(mem) for mem in is_list(value, min, max)]
def is_float_list(value, min=None, max=None):
"""
Check that the value is a list of floats.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a float.
>>> vtor.check('float_list', ())
[]
>>> vtor.check('float_list', [])
[]
>>> vtor.check('float_list', (1, 2.0))
[1.0, 2.0]
>>> vtor.check('float_list', [1, 2.0])
[1.0, 2.0]
>>> vtor.check('float_list', [1, 'a'])
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
"""
return [is_float(mem) for mem in is_list(value, min, max)]
def is_string_list(value, min=None, max=None):
"""
Check that the value is a list of strings.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a string.
>>> vtor.check('string_list', ())
[]
>>> vtor.check('string_list', [])
[]
>>> vtor.check('string_list', ('a', 'b'))
['a', 'b']
>>> vtor.check('string_list', ['a', 1])
Traceback (most recent call last):
VdtTypeError: the value "1" is of the wrong type.
>>> vtor.check('string_list', 'hello')
Traceback (most recent call last):
VdtTypeError: the value "hello" is of the wrong type.
"""
if isinstance(value, string_type):
raise VdtTypeError(value)
return [is_string(mem) for mem in is_list(value, min, max)]
def is_ip_addr_list(value, min=None, max=None):
"""
Check that the value is a list of IP addresses.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is an IP address.
>>> vtor.check('ip_addr_list', ())
[]
>>> vtor.check('ip_addr_list', [])
[]
>>> vtor.check('ip_addr_list', ('1.2.3.4', '5.6.7.8'))
['1.2.3.4', '5.6.7.8']
>>> vtor.check('ip_addr_list', ['a'])
Traceback (most recent call last):
VdtValueError: the value "a" is unacceptable.
"""
return [is_ip_addr(mem) for mem in is_list(value, min, max)]
def force_list(value, min=None, max=None):
"""
Check that a value is a list, coercing strings into
a list with one member. Useful where users forget the
trailing comma that turns a single value into a list.
You can optionally specify the minimum and maximum number of members.
A minumum of greater than one will fail if the user only supplies a
string.
>>> vtor.check('force_list', ())
[]
>>> vtor.check('force_list', [])
[]
>>> vtor.check('force_list', 'hello')
['hello']
"""
if not isinstance(value, (list, tuple)):
value = [value]
return is_list(value, min, max)
fun_dict = {
'integer': is_integer,
'float': is_float,
'ip_addr': is_ip_addr,
'string': is_string,
'boolean': is_boolean,
}
def is_mixed_list(value, *args):
"""
Check that the value is a list.
Allow specifying the type of each member.
Work on lists of specific lengths.
You specify each member as a positional argument specifying type
Each type should be one of the following strings :
'integer', 'float', 'ip_addr', 'string', 'boolean'
So you can specify a list of two strings, followed by
two integers as :
mixed_list('string', 'string', 'integer', 'integer')
The length of the list must match the number of positional
arguments you supply.
>>> mix_str = "mixed_list('integer', 'float', 'ip_addr', 'string', 'boolean')"
>>> check_res = vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', True))
>>> check_res == [1, 2.0, '1.2.3.4', 'a', True]
1
>>> check_res = vtor.check(mix_str, ('1', '2.0', '1.2.3.4', 'a', 'True'))
>>> check_res == [1, 2.0, '1.2.3.4', 'a', True]
1
>>> vtor.check(mix_str, ('b', 2.0, '1.2.3.4', 'a', True))
Traceback (most recent call last):
VdtTypeError: the value "b" is of the wrong type.
>>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a'))
Traceback (most recent call last):
VdtValueTooShortError: the value "(1, 2.0, '1.2.3.4', 'a')" is too short.
>>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', 1, 'b'))
Traceback (most recent call last):
VdtValueTooLongError: the value "(1, 2.0, '1.2.3.4', 'a', 1, 'b')" is too long.
>>> vtor.check(mix_str, 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('mixed_list("yoda")', ('a'))
Traceback (most recent call last):
VdtParamError: passed an incorrect value "KeyError('yoda',)" for parameter "'mixed_list'"
"""
try:
length = len(value)
except TypeError:
raise VdtTypeError(value)
if length < len(args):
raise VdtValueTooShortError(value)
elif length > len(args):
raise VdtValueTooLongError(value)
try:
return [fun_dict[arg](val) for arg, val in zip(args, value)]
except KeyError as e:
raise VdtParamError('mixed_list', e)
def is_option(value, *options):
"""
This check matches the value to any of a set of options.
>>> vtor.check('option("yoda", "jedi")', 'yoda')
'yoda'
>>> vtor.check('option("yoda", "jedi")', 'jed')
Traceback (most recent call last):
VdtValueError: the value "jed" is unacceptable.
>>> vtor.check('option("yoda", "jedi")', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
"""
if not isinstance(value, string_type):
raise VdtTypeError(value)
if not value in options:
raise VdtValueError(value)
return value
def _test(value, *args, **keywargs):
"""
A function that exists for test purposes.
>>> checks = [
... '3, 6, min=1, max=3, test=list(a, b, c)',
... '3',
... '3, 6',
... '3,',
... 'min=1, test="a b c"',
... 'min=5, test="a, b, c"',
... 'min=1, max=3, test="a, b, c"',
... 'min=-100, test=-99',
... 'min=1, max=3',
... '3, 6, test="36"',
... '3, 6, test="a, b, c"',
... '3, max=3, test=list("a", "b", "c")',
... '''3, max=3, test=list("'a'", 'b', "x=(c)")''',
... "test='x=fish(3)'",
... ]
>>> v = Validator({'test': _test})
>>> for entry in checks:
... pprint(v.check(('test(%s)' % entry), 3))
(3, ('3', '6'), {'max': '3', 'min': '1', 'test': ['a', 'b', 'c']})
(3, ('3',), {})
(3, ('3', '6'), {})
(3, ('3',), {})
(3, (), {'min': '1', 'test': 'a b c'})
(3, (), {'min': '5', 'test': 'a, b, c'})
(3, (), {'max': '3', 'min': '1', 'test': 'a, b, c'})
(3, (), {'min': '-100', 'test': '-99'})
(3, (), {'max': '3', 'min': '1'})
(3, ('3', '6'), {'test': '36'})
(3, ('3', '6'), {'test': 'a, b, c'})
(3, ('3',), {'max': '3', 'test': ['a', 'b', 'c']})
(3, ('3',), {'max': '3', 'test': ["'a'", 'b', 'x=(c)']})
(3, (), {'test': 'x=fish(3)'})
>>> v = Validator()
>>> v.check('integer(default=6)', '3')
3
>>> v.check('integer(default=6)', None, True)
6
>>> v.get_default_value('integer(default=6)')
6
>>> v.get_default_value('float(default=6)')
6.0
>>> v.get_default_value('pass(default=None)')
>>> v.get_default_value("string(default='None')")
'None'
>>> v.get_default_value('pass')
Traceback (most recent call last):
KeyError: 'Check "pass" has no default value.'
>>> v.get_default_value('pass(default=list(1, 2, 3, 4))')
['1', '2', '3', '4']
>>> v = Validator()
>>> v.check("pass(default=None)", None, True)
>>> v.check("pass(default='None')", None, True)
'None'
>>> v.check('pass(default="None")', None, True)
'None'
>>> v.check('pass(default=list(1, 2, 3, 4))', None, True)
['1', '2', '3', '4']
Bug test for unicode arguments
>>> v = Validator()
>>> v.check(unicode('string(min=4)'), unicode('test')) == unicode('test')
True
>>> v = Validator()
>>> v.get_default_value(unicode('string(min=4, default="1234")')) == unicode('1234')
True
>>> v.check(unicode('string(min=4, default="1234")'), unicode('test')) == unicode('test')
True
>>> v = Validator()
>>> default = v.get_default_value('string(default=None)')
>>> default == None
1
"""
return (value, args, keywargs)
def _test2():
"""
>>>
>>> v = Validator()
>>> v.get_default_value('string(default="#ff00dd")')
'#ff00dd'
>>> v.get_default_value('integer(default=3) # comment')
3
"""
def _test3():
r"""
>>> vtor.check('string(default="")', '', missing=True)
''
>>> vtor.check('string(default="\n")', '', missing=True)
'\n'
>>> print(vtor.check('string(default="\n")', '', missing=True))
<BLANKLINE>
<BLANKLINE>
>>> vtor.check('string()', '\n')
'\n'
>>> vtor.check('string(default="\n\n\n")', '', missing=True)
'\n\n\n'
>>> vtor.check('string()', 'random \n text goes here\n\n')
'random \n text goes here\n\n'
>>> vtor.check('string(default=" \nrandom text\ngoes \n here\n\n ")',
... '', missing=True)
' \nrandom text\ngoes \n here\n\n '
>>> vtor.check("string(default='\n\n\n')", '', missing=True)
'\n\n\n'
>>> vtor.check("option('\n','a','b',default='\n')", '', missing=True)
'\n'
>>> vtor.check("string_list()", ['foo', '\n', 'bar'])
['foo', '\n', 'bar']
>>> vtor.check("string_list(default=list('\n'))", '', missing=True)
['\n']
"""
if __name__ == '__main__':
# run the code tests in doctest format
import sys
import doctest
m = sys.modules.get('__main__')
globs = m.__dict__.copy()
globs.update({
'vtor': Validator(),
})
failures, tests = doctest.testmod(
m, globs=globs,
optionflags=doctest.IGNORE_EXCEPTION_DETAIL | doctest.ELLIPSIS)
assert not failures, '{} failures out of {} tests'.format(failures, tests)
| 48,293
|
Python
|
.py
| 1,263
| 31.111639
| 104
| 0.581939
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,616
|
__init__.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/future/__init__.py
|
# -*- coding: utf-8 -*-
"""
This module is a modified vendor copy of the python-future package from https://github.com/PythonCharmers/python-future
Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
"""
future: Easy, safe support for Python 2/3 compatibility
=======================================================
``future`` is the missing compatibility layer between Python 2 and Python
3. It allows you to use a single, clean Python 3.x-compatible codebase to
support both Python 2 and Python 3 with minimal overhead.
It is designed to be used as follows::
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import (
bytes, dict, int, list, object, range, str,
ascii, chr, hex, input, next, oct, open,
pow, round, super,
filter, map, zip)
followed by predominantly standard, idiomatic Python 3 code that then runs
similarly on Python 2.6/2.7 and Python 3.3+.
The imports have no effect on Python 3. On Python 2, they shadow the
corresponding builtins, which normally have different semantics on Python 3
versus 2, to provide their Python 3 semantics.
Standard library reorganization
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
``future`` supports the standard library reorganization (PEP 3108) through the
following Py3 interfaces:
>>> # Top-level packages with Py3 names provided on Py2:
>>> import html.parser
>>> import queue
>>> import tkinter.dialog
>>> import xmlrpc.client
>>> # etc.
>>> # Aliases provided for extensions to existing Py2 module names:
>>> from zato.common.ext.future.standard_library import install_aliases
>>> install_aliases()
>>> from collections import Counter, OrderedDict # backported to Py2.6
>>> from collections import UserDict, UserList, UserString
>>> import urllib.request
>>> from itertools import filterfalse, zip_longest
>>> from subprocess import getoutput, getstatusoutput
Automatic conversion
--------------------
An included script called `futurize
<http://python-future.org/automatic_conversion.html>`_ aids in converting
code (from either Python 2 or Python 3) to code compatible with both
platforms. It is similar to ``python-modernize`` but goes further in
providing Python 3 compatibility through the use of the backported types
and builtin functions in ``future``.
Documentation
-------------
See: http://python-future.org
Credits
-------
:Author: Ed Schofield, Jordan M. Adler, et al
:Sponsor: Python Charmers Pty Ltd, Australia, and Python Charmers Pte
Ltd, Singapore. http://pythoncharmers.com
:Others: See docs/credits.rst or http://python-future.org/credits.html
Licensing
---------
Copyright 2013-2019 Python Charmers Pty Ltd, Australia.
The software is distributed under an MIT licence. See LICENSE.txt.
"""
__title__ = 'future'
__author__ = 'Ed Schofield'
__license__ = 'MIT'
__copyright__ = 'Copyright 2013-2019 Python Charmers Pty Ltd'
__ver_major__ = 0
__ver_minor__ = 18
__ver_patch__ = 2
__ver_sub__ = ''
__version__ = "%d.%d.%d%s" % (__ver_major__, __ver_minor__,
__ver_patch__, __ver_sub__)
| 4,245
|
Python
|
.py
| 89
| 44.292135
| 119
| 0.72128
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,617
|
__init__.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/future/utils/__init__.py
|
"""
This module is a modified vendor copy of the python-future package from https://github.com/PythonCharmers/python-future
Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
"""
A selection of cross-compatible functions for Python 2 and 3.
This module exports useful functions for 2/3 compatible code:
* bind_method: binds functions to classes
* ``native_str_to_bytes`` and ``bytes_to_native_str``
* ``native_str``: always equal to the native platform string object (because
this may be shadowed by imports from zato.common.ext.future.builtins)
* lists: lrange(), lmap(), lzip(), lfilter()
* iterable method compatibility:
- iteritems, iterkeys, itervalues
- viewitems, viewkeys, viewvalues
These use the original method if available, otherwise they use items,
keys, values.
* types:
* text_type: unicode in Python 2, str in Python 3
* string_types: basestring in Python 2, str in Python 3
* binary_type: str in Python 2, bytes in Python 3
* integer_types: (int, long) in Python 2, int in Python 3
* class_types: (type, types.ClassType) in Python 2, type in Python 3
* bchr(c):
Take an integer and make a 1-character byte string
* bord(c)
Take the result of indexing on a byte string and make an integer
* tobytes(s)
Take a text string, a byte string, or a sequence of characters taken
from a byte string, and make a byte string.
* raise_from()
* raise_with_traceback()
This module also defines these decorators:
* ``python_2_unicode_compatible``
* ``with_metaclass``
* ``implements_iterator``
Some of the functions in this module come from the following sources:
* Jinja2 (BSD licensed: see
https://github.com/mitsuhiko/jinja2/blob/master/LICENSE)
* Pandas compatibility module pandas.compat
* six.py by Benjamin Peterson
* Django
"""
import types
import sys
import numbers
import functools
import copy
import inspect
PY3 = sys.version_info[0] >= 3
PY34_PLUS = sys.version_info[0:2] >= (3, 4)
PY35_PLUS = sys.version_info[0:2] >= (3, 5)
PY36_PLUS = sys.version_info[0:2] >= (3, 6)
PY2 = sys.version_info[0] == 2
PY26 = sys.version_info[0:2] == (2, 6)
PY27 = sys.version_info[0:2] == (2, 7)
PYPY = hasattr(sys, 'pypy_translation_info')
def python_2_unicode_compatible(cls):
"""
A decorator that defines __unicode__ and __str__ methods under Python
2. Under Python 3, this decorator is a no-op.
To support Python 2 and 3 with a single code base, define a __str__
method returning unicode text and apply this decorator to the class, like
this::
>>> from zato.common.ext.future.utils import python_2_unicode_compatible
>>> @python_2_unicode_compatible
... class MyClass(object):
... def __str__(self):
... return u'Unicode string: \u5b54\u5b50'
>>> a = MyClass()
Then, after this import:
>>> from zato.common.ext.future.builtins import str
the following is ``True`` on both Python 3 and 2::
>>> str(a) == a.encode('utf-8').decode('utf-8')
True
and, on a Unicode-enabled terminal with the right fonts, these both print the
Chinese characters for Confucius::
>>> print(a)
>>> print(str(a))
The implementation comes from django.utils.encoding.
"""
if not PY3:
cls.__unicode__ = cls.__str__
cls.__str__ = lambda self: self.__unicode__().encode('utf-8')
return cls
def with_metaclass(meta, *bases):
"""
Function from jinja2/_compat.py. License: BSD.
Use it like this::
class BaseForm(object):
pass
class FormType(type):
pass
class Form(with_metaclass(FormType, BaseForm)):
pass
This requires a bit of explanation: the basic idea is to make a
dummy metaclass for one level of class instantiation that replaces
itself with the actual metaclass. Because of internal type checks
we also need to make sure that we downgrade the custom metaclass
for one level to something closer to type (that's why __call__ and
__init__ comes back from type etc.).
This has the advantage over six.with_metaclass of not introducing
dummy classes into the final MRO.
"""
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
# Definitions from pandas.compat and six.py follow:
if PY3:
def bchr(s):
return bytes([s])
def bstr(s):
if isinstance(s, str):
return bytes(s, 'latin-1')
else:
return bytes(s)
def bord(s):
return s
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
else:
# Python 2
def bchr(s):
return chr(s)
def bstr(s):
return str(s)
def bord(s):
return ord(s)
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
###
if PY3:
def tobytes(s):
if isinstance(s, bytes):
return s
else:
if isinstance(s, str):
return s.encode('latin-1')
else:
return bytes(s)
else:
# Python 2
def tobytes(s):
if isinstance(s, unicode):
return s.encode('latin-1')
else:
return ''.join(s)
tobytes.__doc__ = """
Encodes to latin-1 (where the first 256 chars are the same as
ASCII.)
"""
if PY3:
def native_str_to_bytes(s, encoding='utf-8'):
return s.encode(encoding)
def bytes_to_native_str(b, encoding='utf-8'):
return b.decode(encoding)
def text_to_native_str(t, encoding=None):
return t
else:
# Python 2
def native_str_to_bytes(s, encoding=None):
from zato.common.ext.future.types import newbytes # to avoid a circular import
return newbytes(s)
def bytes_to_native_str(b, encoding=None):
return native(b)
def text_to_native_str(t, encoding='ascii'):
"""
Use this to create a Py2 native string when "from __future__ import
unicode_literals" is in effect.
"""
return unicode(t).encode(encoding)
native_str_to_bytes.__doc__ = """
On Py3, returns an encoded string.
On Py2, returns a newbytes type, ignoring the ``encoding`` argument.
"""
if PY3:
# list-producing versions of the major Python iterating functions
def lrange(*args, **kwargs):
return list(range(*args, **kwargs))
def lzip(*args, **kwargs):
return list(zip(*args, **kwargs))
def lmap(*args, **kwargs):
return list(map(*args, **kwargs))
def lfilter(*args, **kwargs):
return list(filter(*args, **kwargs))
else:
import __builtin__
# Python 2-builtin ranges produce lists
lrange = __builtin__.range
lzip = __builtin__.zip
lmap = __builtin__.map
lfilter = __builtin__.filter
def isidentifier(s, dotted=False):
'''
A function equivalent to the str.isidentifier method on Py3
'''
if dotted:
return all(isidentifier(a) for a in s.split('.'))
if PY3:
return s.isidentifier()
else:
import re
_name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$")
return bool(_name_re.match(s))
def viewitems(obj, **kwargs):
"""
Function for iterating over dictionary items with the same set-like
behaviour on Py2.7 as on Py3.
Passes kwargs to method."""
func = getattr(obj, "viewitems", None)
if not func:
func = obj.items
return func(**kwargs)
def viewkeys(obj, **kwargs):
"""
Function for iterating over dictionary keys with the same set-like
behaviour on Py2.7 as on Py3.
Passes kwargs to method."""
func = getattr(obj, "viewkeys", None)
if not func:
func = obj.keys
return func(**kwargs)
def viewvalues(obj, **kwargs):
"""
Function for iterating over dictionary values with the same set-like
behaviour on Py2.7 as on Py3.
Passes kwargs to method."""
func = getattr(obj, "viewvalues", None)
if not func:
func = obj.values
return func(**kwargs)
def iteritems(obj, **kwargs):
"""Use this only if compatibility with Python versions before 2.7 is
required. Otherwise, prefer viewitems().
"""
func = getattr(obj, "iteritems", None)
if not func:
func = obj.items
return func(**kwargs)
def iterkeys(obj, **kwargs):
"""Use this only if compatibility with Python versions before 2.7 is
required. Otherwise, prefer viewkeys().
"""
func = getattr(obj, "iterkeys", None)
if not func:
func = obj.keys
return func(**kwargs)
def itervalues(obj, **kwargs):
"""Use this only if compatibility with Python versions before 2.7 is
required. Otherwise, prefer viewvalues().
"""
func = getattr(obj, "itervalues", None)
if not func:
func = obj.values
return func(**kwargs)
def bind_method(cls, name, func):
"""Bind a method to class, python 2 and python 3 compatible.
Parameters
----------
cls : type
class to receive bound method
name : basestring
name of method on class instance
func : function
function to be bound as method
Returns
-------
None
"""
# only python 2 has an issue with bound/unbound methods
if not PY3:
setattr(cls, name, types.MethodType(func, None, cls))
else:
setattr(cls, name, func)
def getexception():
return sys.exc_info()[1]
def _get_caller_globals_and_locals():
"""
Returns the globals and locals of the calling frame.
Is there an alternative to frame hacking here?
"""
caller_frame = inspect.stack()[2]
myglobals = caller_frame[0].f_globals
mylocals = caller_frame[0].f_locals
return myglobals, mylocals
def _repr_strip(mystring):
"""
Returns the string without any initial or final quotes.
"""
r = repr(mystring)
if r.startswith("'") and r.endswith("'"):
return r[1:-1]
else:
return r
if PY3:
def raise_from(exc, cause):
"""
Equivalent to:
raise EXCEPTION from CAUSE
on Python 3. (See PEP 3134).
"""
myglobals, mylocals = _get_caller_globals_and_locals()
# We pass the exception and cause along with other globals
# when we exec():
myglobals = myglobals.copy()
myglobals['__python_future_raise_from_exc'] = exc
myglobals['__python_future_raise_from_cause'] = cause
execstr = "raise __python_future_raise_from_exc from __python_future_raise_from_cause"
exec(execstr, myglobals, mylocals)
def raise_(tp, value=None, tb=None):
"""
A function that matches the Python 2.x ``raise`` statement. This
allows re-raising exceptions with the cls value and traceback on
Python 2 and 3.
"""
if isinstance(tp, BaseException):
# If the first object is an instance, the type of the exception
# is the class of the instance, the instance itself is the value,
# and the second object must be None.
if value is not None:
raise TypeError("instance exception may not have a separate value")
exc = tp
elif isinstance(tp, type) and not issubclass(tp, BaseException):
# If the first object is a class, it becomes the type of the
# exception.
raise TypeError("class must derive from BaseException, not %s" % tp.__name__)
else:
# The second object is used to determine the exception value: If it
# is an instance of the class, the instance becomes the exception
# value. If the second object is a tuple, it is used as the argument
# list for the class constructor; if it is None, an empty argument
# list is used, and any other object is treated as a single argument
# to the constructor. The instance so created by calling the
# constructor is used as the exception value.
if isinstance(value, tp):
exc = value
elif isinstance(value, tuple):
exc = tp(*value)
elif value is None:
exc = tp()
else:
exc = tp(value)
if exc.__traceback__ is not tb:
raise exc.with_traceback(tb)
raise exc
def raise_with_traceback(exc, traceback=Ellipsis):
if traceback == Ellipsis:
_, _, traceback = sys.exc_info()
raise exc.with_traceback(traceback)
else:
def raise_from(exc, cause):
"""
Equivalent to:
raise EXCEPTION from CAUSE
on Python 3. (See PEP 3134).
"""
# Is either arg an exception class (e.g. IndexError) rather than
# instance (e.g. IndexError('my message here')? If so, pass the
# name of the class undisturbed through to "raise ... from ...".
if isinstance(exc, type) and issubclass(exc, Exception):
e = exc()
# exc = exc.__name__
# execstr = "e = " + _repr_strip(exc) + "()"
# myglobals, mylocals = _get_caller_globals_and_locals()
# exec(execstr, myglobals, mylocals)
else:
e = exc
e.__suppress_context__ = False
if isinstance(cause, type) and issubclass(cause, Exception):
e.__cause__ = cause()
e.__cause__.__traceback__ = sys.exc_info()[2]
e.__suppress_context__ = True
elif cause is None:
e.__cause__ = None
e.__suppress_context__ = True
elif isinstance(cause, BaseException):
e.__cause__ = cause
object.__setattr__(e.__cause__, '__traceback__', sys.exc_info()[2])
e.__suppress_context__ = True
else:
raise TypeError("exception causes must derive from BaseException")
e.__context__ = sys.exc_info()[1]
raise e
exec('''
def raise_(tp, value=None, tb=None):
raise tp, value, tb
def raise_with_traceback(exc, traceback=Ellipsis):
if traceback == Ellipsis:
_, _, traceback = sys.exc_info()
raise exc, None, traceback
'''.strip())
raise_with_traceback.__doc__ = (
"""Raise exception with existing traceback.
If traceback is not passed, uses sys.exc_info() to get traceback."""
)
# Deprecated alias for backward compatibility with ``future`` versions < 0.11:
reraise = raise_
def implements_iterator(cls):
'''
From jinja2/_compat.py. License: BSD.
Use as a decorator like this::
@implements_iterator
class UppercasingIterator(object):
def __init__(self, iterable):
self._iter = iter(iterable)
def __iter__(self):
return self
def __next__(self):
return next(self._iter).upper()
'''
if PY3:
return cls
else:
cls.next = cls.__next__
del cls.__next__
return cls
if PY3:
get_next = lambda x: x.next
else:
get_next = lambda x: x.__next__
def encode_filename(filename):
if PY3:
return filename
else:
if isinstance(filename, unicode):
return filename.encode('utf-8')
return filename
def is_new_style(cls):
"""
Python 2.7 has both new-style and old-style classes. Old-style classes can
be pesky in some circumstances, such as when using inheritance. Use this
function to test for whether a class is new-style. (Python 3 only has
new-style classes.)
"""
return hasattr(cls, '__class__') and ('__dict__' in dir(cls)
or hasattr(cls, '__slots__'))
# The native platform string and bytes types. Useful because ``str`` and
# ``bytes`` are redefined on Py2 by ``from zato.common.ext.future.builtins import *``.
native_str = str
native_bytes = bytes
def istext(obj):
"""
Deprecated. Use::
>>> isinstance(obj, str)
after this import:
>>> from zato.common.ext.future.builtins import str
"""
return isinstance(obj, type(u''))
def isbytes(obj):
"""
Deprecated. Use::
>>> isinstance(obj, bytes)
after this import:
>>> from zato.common.ext.future.builtins import bytes
"""
return isinstance(obj, type(b''))
def isnewbytes(obj):
"""
Equivalent to the result of ``type(obj) == type(newbytes)``
in other words, it is REALLY a newbytes instance, not a Py2 native str
object?
Note that this does not cover subclasses of newbytes, and it is not
equivalent to ininstance(obj, newbytes)
"""
return type(obj).__name__ == 'newbytes'
def isint(obj):
"""
Deprecated. Tests whether an object is a Py3 ``int`` or either a Py2 ``int`` or
``long``.
Instead of using this function, you can use:
>>> from zato.common.ext.future.builtins import int
>>> isinstance(obj, int)
The following idiom is equivalent:
>>> from numbers import Integral
>>> isinstance(obj, Integral)
"""
return isinstance(obj, numbers.Integral)
def native(obj):
"""
On Py3, this is a no-op: native(obj) -> obj
On Py2, returns the corresponding native Py2 types that are
superclasses for backported objects from Py3:
>>> from builtins import str, bytes, int
>>> native(str(u'ABC'))
u'ABC'
>>> type(native(str(u'ABC')))
unicode
>>> native(bytes(b'ABC'))
b'ABC'
>>> type(native(bytes(b'ABC')))
bytes
>>> native(int(10**20))
100000000000000000000L
>>> type(native(int(10**20)))
long
Existing native types on Py2 will be returned unchanged:
>>> type(native(u'ABC'))
unicode
"""
if hasattr(obj, '__native__'):
return obj.__native__()
else:
return obj
# Implementation of exec_ is from ``six``:
if PY3:
import builtins
exec_ = getattr(builtins, "exec")
else:
def exec_(code, globs=None, locs=None):
"""Execute code in a namespace."""
if globs is None:
frame = sys._getframe(1)
globs = frame.f_globals
if locs is None:
locs = frame.f_locals
del frame
elif locs is None:
locs = globs
exec("""exec code in globs, locs""")
# Defined here for backward compatibility:
def old_div(a, b):
"""
DEPRECATED: import ``old_div`` from ``past.utils`` instead.
Equivalent to ``a / b`` on Python 2 without ``from __future__ import
division``.
TODO: generalize this to other objects (like arrays etc.)
"""
if isinstance(a, numbers.Integral) and isinstance(b, numbers.Integral):
return a // b
else:
return a / b
def as_native_str(encoding='utf-8'):
'''
A decorator to turn a function or method call that returns text, i.e.
unicode, into one that returns a native platform str.
Use it as a decorator like this::
from __future__ import unicode_literals
class MyClass(object):
@as_native_str(encoding='ascii')
def __repr__(self):
return next(self._iter).upper()
'''
if PY3:
return lambda f: f
else:
def encoder(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
return f(*args, **kwargs).encode(encoding=encoding)
return wrapper
return encoder
# listvalues and listitems definitions from Nick Coghlan's (withdrawn)
# PEP 496:
try:
dict.iteritems
except AttributeError:
# Python 3
def listvalues(d):
return list(d.values())
def listitems(d):
return list(d.items())
else:
# Python 2
def listvalues(d):
return d.values()
def listitems(d):
return d.items()
if PY3:
def ensure_new_type(obj):
return obj
else:
def ensure_new_type(obj):
from zato.common.ext.future.types.newbytes import newbytes
from zato.common.ext.future.types.newstr import newstr
from zato.common.ext.future.types.newint import newint
from zato.common.ext.future.types.newdict import newdict
native_type = type(native(obj))
# Upcast only if the type is already a native (non-future) type
if issubclass(native_type, type(obj)):
# Upcast
if native_type == str: # i.e. Py2 8-bit str
return newbytes(obj)
elif native_type == unicode:
return newstr(obj)
elif native_type == int:
return newint(obj)
elif native_type == long:
return newint(obj)
elif native_type == dict:
return newdict(obj)
else:
return obj
else:
# Already a new type
assert type(obj) in [newbytes, newstr]
return obj
__all__ = ['PY2', 'PY26', 'PY3', 'PYPY',
'as_native_str', 'binary_type', 'bind_method', 'bord', 'bstr',
'bytes_to_native_str', 'class_types', 'encode_filename',
'ensure_new_type', 'exec_', 'get_next', 'getexception',
'implements_iterator', 'integer_types', 'is_new_style', 'isbytes',
'isidentifier', 'isint', 'isnewbytes', 'istext', 'iteritems',
'iterkeys', 'itervalues', 'lfilter', 'listitems', 'listvalues',
'lmap', 'lrange', 'lzip', 'native', 'native_bytes', 'native_str',
'native_str_to_bytes', 'old_div',
'python_2_unicode_compatible', 'raise_',
'raise_with_traceback', 'reraise', 'string_types',
'text_to_native_str', 'text_type', 'tobytes', 'viewitems',
'viewkeys', 'viewvalues', 'with_metaclass'
]
| 23,233
|
Python
|
.py
| 636
| 29.47956
| 119
| 0.624276
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,618
|
__init__.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/future/standard_library/__init__.py
|
from __future__ import absolute_import, division, print_function
"""
This module is a modified vendor copy of the python-future package from https://github.com/PythonCharmers/python-future
Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
"""
Python 3 reorganized the standard library (PEP 3108). This module exposes
several standard library modules to Python 2 under their new Python 3
names.
It is designed to be used as follows::
from future import standard_library
standard_library.install_aliases()
And then these normal Py3 imports work on both Py3 and Py2::
import builtins
import copyreg
import queue
import reprlib
import socketserver
import winreg # on Windows only
import test.support
import html, html.parser, html.entites
import http, http.client, http.server
import http.cookies, http.cookiejar
import urllib.parse, urllib.request, urllib.response, urllib.error, urllib.robotparser
import xmlrpc.client, xmlrpc.server
import _thread
import _dummy_thread
import _markupbase
from itertools import filterfalse, zip_longest
from sys import intern
from collections import UserDict, UserList, UserString
from collections import OrderedDict, Counter, ChainMap # even on Py2.6
from subprocess import getoutput, getstatusoutput
from subprocess import check_output # even on Py2.6
(The renamed modules and functions are still available under their old
names on Python 2.)
This is a cleaner alternative to this idiom (see
http://docs.pythonsprints.com/python3_porting/py-porting.html)::
try:
import queue
except ImportError:
import Queue as queue
Limitations
-----------
We don't currently support these modules, but would like to::
import dbm
import dbm.dumb
import dbm.gnu
import collections.abc # on Py33
import pickle # should (optionally) bring in cPickle on Python 2
"""
import sys
import logging
import imp
import contextlib
import types
import copy
import os
# Make a dedicated logger; leave the root logger to be configured
# by the application.
flog = logging.getLogger('future_stdlib')
_formatter = logging.Formatter(logging.BASIC_FORMAT)
_handler = logging.StreamHandler()
_handler.setFormatter(_formatter)
flog.addHandler(_handler)
flog.setLevel(logging.WARN)
from zato.common.ext.future.utils import PY2, PY3
# The modules that are defined under the same names on Py3 but with
# different contents in a significant way (e.g. submodules) are:
# pickle (fast one)
# dbm
# urllib
# test
# email
REPLACED_MODULES = set(['test', 'urllib', 'pickle', 'dbm']) # add email and dbm when we support it
# The following module names are not present in Python 2.x, so they cause no
# potential clashes between the old and new names:
# http
# html
# tkinter
# xmlrpc
# Keys: Py2 / real module names
# Values: Py3 / simulated module names
RENAMES = {
# 'cStringIO': 'io', # there's a new io module in Python 2.6
# that provides StringIO and BytesIO
# 'StringIO': 'io', # ditto
# 'cPickle': 'pickle',
'__builtin__': 'builtins',
'copy_reg': 'copyreg',
'Queue': 'queue',
'future.moves.socketserver': 'socketserver',
'ConfigParser': 'configparser',
'repr': 'reprlib',
# 'FileDialog': 'tkinter.filedialog',
# 'tkFileDialog': 'tkinter.filedialog',
# 'SimpleDialog': 'tkinter.simpledialog',
# 'tkSimpleDialog': 'tkinter.simpledialog',
# 'tkColorChooser': 'tkinter.colorchooser',
# 'tkCommonDialog': 'tkinter.commondialog',
# 'Dialog': 'tkinter.dialog',
# 'Tkdnd': 'tkinter.dnd',
# 'tkFont': 'tkinter.font',
# 'tkMessageBox': 'tkinter.messagebox',
# 'ScrolledText': 'tkinter.scrolledtext',
# 'Tkconstants': 'tkinter.constants',
# 'Tix': 'tkinter.tix',
# 'ttk': 'tkinter.ttk',
# 'Tkinter': 'tkinter',
'_winreg': 'winreg',
'thread': '_thread',
'dummy_thread': '_dummy_thread',
# 'anydbm': 'dbm', # causes infinite import loop
# 'whichdb': 'dbm', # causes infinite import loop
# anydbm and whichdb are handled by fix_imports2
# 'dbhash': 'dbm.bsd',
# 'dumbdbm': 'dbm.dumb',
# 'dbm': 'dbm.ndbm',
# 'gdbm': 'dbm.gnu',
'future.moves.xmlrpc': 'xmlrpc',
# 'future.backports.email': 'email', # for use by urllib
# 'DocXMLRPCServer': 'xmlrpc.server',
# 'SimpleXMLRPCServer': 'xmlrpc.server',
# 'httplib': 'http.client',
# 'htmlentitydefs' : 'html.entities',
# 'HTMLParser' : 'html.parser',
# 'Cookie': 'http.cookies',
# 'cookielib': 'http.cookiejar',
# 'BaseHTTPServer': 'http.server',
# 'SimpleHTTPServer': 'http.server',
# 'CGIHTTPServer': 'http.server',
# 'future.backports.test': 'test', # primarily for renaming test_support to support
# 'commands': 'subprocess',
# 'urlparse' : 'urllib.parse',
# 'robotparser' : 'urllib.robotparser',
# 'abc': 'collections.abc', # for Py33
# 'future.utils.six.moves.html': 'html',
# 'future.utils.six.moves.http': 'http',
'future.moves.html': 'html',
'future.moves.http': 'http',
# 'future.backports.urllib': 'urllib',
# 'future.utils.six.moves.urllib': 'urllib',
'future.moves._markupbase': '_markupbase',
}
# It is complicated and apparently brittle to mess around with the
# ``sys.modules`` cache in order to support "import urllib" meaning two
# different things (Py2.7 urllib and backported Py3.3-like urllib) in different
# contexts. So we require explicit imports for these modules.
assert len(set(RENAMES.values()) & set(REPLACED_MODULES)) == 0
# Harmless renames that we can insert.
# These modules need names from elsewhere being added to them:
# subprocess: should provide getoutput and other fns from commands
# module but these fns are missing: getstatus, mk2arg,
# mkarg
# re: needs an ASCII constant that works compatibly with Py3
# etc: see lib2to3/fixes/fix_imports.py
# (New module name, new object name, old module name, old object name)
MOVES = [('collections', 'UserList', 'UserList', 'UserList'),
('collections', 'UserDict', 'UserDict', 'UserDict'),
('collections', 'UserString','UserString', 'UserString'),
('collections', 'ChainMap', 'future.backports.misc', 'ChainMap'),
('itertools', 'filterfalse','itertools', 'ifilterfalse'),
('itertools', 'zip_longest','itertools', 'izip_longest'),
('sys', 'intern','__builtin__', 'intern'),
# The re module has no ASCII flag in Py2, but this is the default.
# Set re.ASCII to a zero constant. stat.ST_MODE just happens to be one
# (and it exists on Py2.6+).
('re', 'ASCII','stat', 'ST_MODE'),
('base64', 'encodebytes','base64', 'encodestring'),
('base64', 'decodebytes','base64', 'decodestring'),
('subprocess', 'getoutput', 'commands', 'getoutput'),
('subprocess', 'getstatusoutput', 'commands', 'getstatusoutput'),
('subprocess', 'check_output', 'future.backports.misc', 'check_output'),
('math', 'ceil', 'future.backports.misc', 'ceil'),
('collections', 'OrderedDict', 'future.backports.misc', 'OrderedDict'),
('collections', 'Counter', 'future.backports.misc', 'Counter'),
('collections', 'ChainMap', 'future.backports.misc', 'ChainMap'),
('itertools', 'count', 'future.backports.misc', 'count'),
('reprlib', 'recursive_repr', 'future.backports.misc', 'recursive_repr'),
('functools', 'cmp_to_key', 'future.backports.misc', 'cmp_to_key'),
# This is no use, since "import urllib.request" etc. still fails:
# ('urllib', 'error', 'future.moves.urllib', 'error'),
# ('urllib', 'parse', 'future.moves.urllib', 'parse'),
# ('urllib', 'request', 'future.moves.urllib', 'request'),
# ('urllib', 'response', 'future.moves.urllib', 'response'),
# ('urllib', 'robotparser', 'future.moves.urllib', 'robotparser'),
]
# A minimal example of an import hook:
# class WarnOnImport(object):
# def __init__(self, *args):
# self.module_names = args
#
# def find_module(self, fullname, path=None):
# if fullname in self.module_names:
# self.path = path
# return self
# return None
#
# def load_module(self, name):
# if name in sys.modules:
# return sys.modules[name]
# module_info = imp.find_module(name, self.path)
# module = imp.load_module(name, *module_info)
# sys.modules[name] = module
# flog.warning("Imported deprecated module %s", name)
# return module
class RenameImport(object):
"""
A class for import hooks mapping Py3 module names etc. to the Py2 equivalents.
"""
# Different RenameImport classes are created when importing this module from
# different source files. This causes isinstance(hook, RenameImport) checks
# to produce inconsistent results. We add this RENAMER attribute here so
# remove_hooks() and install_hooks() can find instances of these classes
# easily:
RENAMER = True
def __init__(self, old_to_new):
'''
Pass in a dictionary-like object mapping from old names to new
names. E.g. {'ConfigParser': 'configparser', 'cPickle': 'pickle'}
'''
self.old_to_new = old_to_new
both = set(old_to_new.keys()) & set(old_to_new.values())
assert (len(both) == 0 and
len(set(old_to_new.values())) == len(old_to_new.values())), \
'Ambiguity in renaming (handler not implemented)'
self.new_to_old = dict((new, old) for (old, new) in old_to_new.items())
def find_module(self, fullname, path=None):
# Handles hierarchical importing: package.module.module2
new_base_names = set([s.split('.')[0] for s in self.new_to_old])
# Before v0.12: Was: if fullname in set(self.old_to_new) | new_base_names:
if fullname in new_base_names:
return self
return None
def load_module(self, name):
path = None
if name in sys.modules:
return sys.modules[name]
elif name in self.new_to_old:
# New name. Look up the corresponding old (Py2) name:
oldname = self.new_to_old[name]
module = self._find_and_load_module(oldname)
# module.__future_module__ = True
else:
module = self._find_and_load_module(name)
# In any case, make it available under the requested (Py3) name
sys.modules[name] = module
return module
def _find_and_load_module(self, name, path=None):
"""
Finds and loads it. But if there's a . in the name, handles it
properly.
"""
bits = name.split('.')
while len(bits) > 1:
# Treat the first bit as a package
packagename = bits.pop(0)
package = self._find_and_load_module(packagename, path)
try:
path = package.__path__
except AttributeError:
# This could be e.g. moves.
flog.debug('Package {0} has no __path__.'.format(package))
if name in sys.modules:
return sys.modules[name]
flog.debug('What to do here?')
name = bits[0]
module_info = imp.find_module(name, path)
return imp.load_module(name, *module_info)
class hooks(object):
"""
Acts as a context manager. Saves the state of sys.modules and restores it
after the 'with' block.
Use like this:
>>> from future import standard_library
>>> with standard_library.hooks():
... import http.client
>>> import requests
For this to work, http.client will be scrubbed from sys.modules after the
'with' block. That way the modules imported in the 'with' block will
continue to be accessible in the current namespace but not from any
imported modules (like requests).
"""
def __enter__(self):
# flog.debug('Entering hooks context manager')
self.old_sys_modules = copy.copy(sys.modules)
self.hooks_were_installed = detect_hooks()
# self.scrubbed = scrub_py2_sys_modules()
install_hooks()
return self
def __exit__(self, *args):
# flog.debug('Exiting hooks context manager')
# restore_sys_modules(self.scrubbed)
if not self.hooks_were_installed:
remove_hooks()
# scrub_future_sys_modules()
# Sanity check for is_py2_stdlib_module(): We aren't replacing any
# builtin modules names:
if PY2:
assert len(set(RENAMES.values()) & set(sys.builtin_module_names)) == 0
def is_py2_stdlib_module(m):
"""
Tries to infer whether the module m is from the Python 2 standard library.
This may not be reliable on all systems.
"""
if PY3:
return False
if not 'stdlib_path' in is_py2_stdlib_module.__dict__:
stdlib_files = [contextlib.__file__, os.__file__, copy.__file__]
stdlib_paths = [os.path.split(f)[0] for f in stdlib_files]
if not len(set(stdlib_paths)) == 1:
# This seems to happen on travis-ci.org. Very strange. We'll try to
# ignore it.
flog.warn('Multiple locations found for the Python standard '
'library: %s' % stdlib_paths)
# Choose the first one arbitrarily
is_py2_stdlib_module.stdlib_path = stdlib_paths[0]
if m.__name__ in sys.builtin_module_names:
return True
if hasattr(m, '__file__'):
modpath = os.path.split(m.__file__)
if (modpath[0].startswith(is_py2_stdlib_module.stdlib_path) and
'site-packages' not in modpath[0]):
return True
return False
def scrub_py2_sys_modules():
"""
Removes any Python 2 standard library modules from ``sys.modules`` that
would interfere with Py3-style imports using import hooks. Examples are
modules with the same names (like urllib or email).
(Note that currently import hooks are disabled for modules like these
with ambiguous names anyway ...)
"""
if PY3:
return {}
scrubbed = {}
for modulename in REPLACED_MODULES & set(RENAMES.keys()):
if not modulename in sys.modules:
continue
module = sys.modules[modulename]
if is_py2_stdlib_module(module):
flog.debug('Deleting (Py2) {} from sys.modules'.format(modulename))
scrubbed[modulename] = sys.modules[modulename]
del sys.modules[modulename]
return scrubbed
def scrub_future_sys_modules():
"""
Deprecated.
"""
return {}
class suspend_hooks(object):
"""
Acts as a context manager. Use like this:
>>> from future import standard_library
>>> standard_library.install_hooks()
>>> import http.client
>>> # ...
>>> with standard_library.suspend_hooks():
>>> import requests # incompatible with ``future``'s standard library hooks
If the hooks were disabled before the context, they are not installed when
the context is left.
"""
def __enter__(self):
self.hooks_were_installed = detect_hooks()
remove_hooks()
# self.scrubbed = scrub_future_sys_modules()
return self
def __exit__(self, *args):
if self.hooks_were_installed:
install_hooks()
# restore_sys_modules(self.scrubbed)
def restore_sys_modules(scrubbed):
"""
Add any previously scrubbed modules back to the sys.modules cache,
but only if it's safe to do so.
"""
clash = set(sys.modules) & set(scrubbed)
if len(clash) != 0:
# If several, choose one arbitrarily to raise an exception about
first = list(clash)[0]
raise ImportError('future module {} clashes with Py2 module'
.format(first))
sys.modules.update(scrubbed)
def install_aliases():
"""
Monkey-patches the standard library in Py2.6/7 to provide
aliases for better Py3 compatibility.
"""
if PY3:
return
# if hasattr(install_aliases, 'run_already'):
# return
for (newmodname, newobjname, oldmodname, oldobjname) in MOVES:
__import__(newmodname)
# We look up the module in sys.modules because __import__ just returns the
# top-level package:
newmod = sys.modules[newmodname]
# newmod.__future_module__ = True
__import__(oldmodname)
oldmod = sys.modules[oldmodname]
obj = getattr(oldmod, oldobjname)
setattr(newmod, newobjname, obj)
# Hack for urllib so it appears to have the same structure on Py2 as on Py3
import urllib
from zato.common.ext.future.backports.urllib import request
from zato.common.ext.future.backports.urllib import response
from zato.common.ext.future.backports.urllib import parse
from zato.common.ext.future.backports.urllib import error
from zato.common.ext.future.backports.urllib import robotparser
urllib.request = request
urllib.response = response
urllib.parse = parse
urllib.error = error
urllib.robotparser = robotparser
sys.modules['urllib.request'] = request
sys.modules['urllib.response'] = response
sys.modules['urllib.parse'] = parse
sys.modules['urllib.error'] = error
sys.modules['urllib.robotparser'] = robotparser
# Patch the test module so it appears to have the same structure on Py2 as on Py3
try:
import test
except ImportError:
pass
try:
from zato.common.ext.future.moves.test import support
except ImportError:
pass
else:
test.support = support
sys.modules['test.support'] = support
# Patch the dbm module so it appears to have the same structure on Py2 as on Py3
try:
import dbm
except ImportError:
pass
else:
from zato.common.ext.future.moves.dbm import dumb
dbm.dumb = dumb
sys.modules['dbm.dumb'] = dumb
try:
from zato.common.ext.future.moves.dbm import gnu
except ImportError:
pass
else:
dbm.gnu = gnu
sys.modules['dbm.gnu'] = gnu
try:
from zato.common.ext.future.moves.dbm import ndbm
except ImportError:
pass
else:
dbm.ndbm = ndbm
sys.modules['dbm.ndbm'] = ndbm
# install_aliases.run_already = True
def install_hooks():
"""
This function installs the future.standard_library import hook into
sys.meta_path.
"""
if PY3:
return
install_aliases()
flog.debug('sys.meta_path was: {0}'.format(sys.meta_path))
flog.debug('Installing hooks ...')
# Add it unless it's there already
newhook = RenameImport(RENAMES)
if not detect_hooks():
sys.meta_path.append(newhook)
flog.debug('sys.meta_path is now: {0}'.format(sys.meta_path))
def enable_hooks():
"""
Deprecated. Use install_hooks() instead. This will be removed by
``future`` v1.0.
"""
install_hooks()
def remove_hooks(scrub_sys_modules=False):
"""
This function removes the import hook from sys.meta_path.
"""
if PY3:
return
flog.debug('Uninstalling hooks ...')
# Loop backwards, so deleting items keeps the ordering:
for i, hook in list(enumerate(sys.meta_path))[::-1]:
if hasattr(hook, 'RENAMER'):
del sys.meta_path[i]
# Explicit is better than implicit. In the future the interface should
# probably change so that scrubbing the import hooks requires a separate
# function call. Left as is for now for backward compatibility with
# v0.11.x.
if scrub_sys_modules:
scrub_future_sys_modules()
def disable_hooks():
"""
Deprecated. Use remove_hooks() instead. This will be removed by
``future`` v1.0.
"""
remove_hooks()
def detect_hooks():
"""
Returns True if the import hooks are installed, False if not.
"""
flog.debug('Detecting hooks ...')
present = any([hasattr(hook, 'RENAMER') for hook in sys.meta_path])
if present:
flog.debug('Detected.')
else:
flog.debug('Not detected.')
return present
# As of v0.12, this no longer happens implicitly:
# if not PY3:
# install_hooks()
if not hasattr(sys, 'py2_modules'):
sys.py2_modules = {}
def cache_py2_modules():
"""
Currently this function is unneeded, as we are not attempting to provide import hooks
for modules with ambiguous names: email, urllib, pickle.
"""
if len(sys.py2_modules) != 0:
return
assert not detect_hooks()
import urllib
sys.py2_modules['urllib'] = urllib
import email
sys.py2_modules['email'] = email
import pickle
sys.py2_modules['pickle'] = pickle
# Not all Python installations have test module. (Anaconda doesn't, for example.)
# try:
# import test
# except ImportError:
# sys.py2_modules['test'] = None
# sys.py2_modules['test'] = test
# import dbm
# sys.py2_modules['dbm'] = dbm
def import_(module_name, backport=False):
"""
Pass a (potentially dotted) module name of a Python 3 standard library
module. This function imports the module compatibly on Py2 and Py3 and
returns the top-level module.
Example use:
>>> http = import_('http.client')
>>> http = import_('http.server')
>>> urllib = import_('urllib.request')
Then:
>>> conn = http.client.HTTPConnection(...)
>>> response = urllib.request.urlopen('https://mywebsite.com')
>>> # etc.
Use as follows:
>>> package_name = import_(module_name)
On Py3, equivalent to this:
>>> import module_name
On Py2, equivalent to this if backport=False:
>>> from zato.common.ext.future.moves import module_name
or to this if backport=True:
>>> from zato.common.ext.future.backports import module_name
except that it also handles dotted module names such as ``http.client``
The effect then is like this:
>>> from zato.common.ext.future.backports import module
>>> from zato.common.ext.future.backports.module import submodule
>>> module.submodule = submodule
Note that this would be a SyntaxError in Python:
>>> from zato.common.ext.future.backports import http.client
"""
# Python 2.6 doesn't have importlib in the stdlib, so it requires
# the backported ``importlib`` package from PyPI as a dependency to use
# this function:
import importlib
if PY3:
return __import__(module_name)
else:
# client.blah = blah
# Then http.client = client
# etc.
if backport:
prefix = 'future.backports'
else:
prefix = 'future.moves'
parts = prefix.split('.') + module_name.split('.')
modules = []
for i, part in enumerate(parts):
sofar = '.'.join(parts[:i+1])
modules.append(importlib.import_module(sofar))
for i, part in reversed(list(enumerate(parts))):
if i == 0:
break
setattr(modules[i-1], part, modules[i])
# Return the next-most top-level module after future.backports / future.moves:
return modules[2]
def from_import(module_name, *symbol_names, **kwargs):
"""
Example use:
>>> HTTPConnection = from_import('http.client', 'HTTPConnection')
>>> HTTPServer = from_import('http.server', 'HTTPServer')
>>> urlopen, urlparse = from_import('urllib.request', 'urlopen', 'urlparse')
Equivalent to this on Py3:
>>> from module_name import symbol_names[0], symbol_names[1], ...
and this on Py2:
>>> from zato.common.ext.future.moves.module_name import symbol_names[0], ...
or:
>>> from zato.common.ext.future.backports.module_name import symbol_names[0], ...
except that it also handles dotted module names such as ``http.client``.
"""
if PY3:
return __import__(module_name)
else:
if 'backport' in kwargs and bool(kwargs['backport']):
prefix = 'future.backports'
else:
prefix = 'future.moves'
parts = prefix.split('.') + module_name.split('.')
module = importlib.import_module(prefix + '.' + module_name)
output = [getattr(module, name) for name in symbol_names]
if len(output) == 1:
return output[0]
else:
return output
class exclude_local_folder_imports(object):
"""
A context-manager that prevents standard library modules like configparser
from being imported from the local python-future source folder on Py3.
(This was need prior to v0.16.0 because the presence of a configparser
folder would otherwise have prevented setuptools from running on Py3. Maybe
it's not needed any more?)
"""
def __init__(self, *args):
assert len(args) > 0
self.module_names = args
# Disallow dotted module names like http.client:
if any(['.' in m for m in self.module_names]):
raise NotImplementedError('Dotted module names are not supported')
def __enter__(self):
self.old_sys_path = copy.copy(sys.path)
self.old_sys_modules = copy.copy(sys.modules)
if sys.version_info[0] < 3:
return
# The presence of all these indicates we've found our source folder,
# because `builtins` won't have been installed in site-packages by setup.py:
FUTURE_SOURCE_SUBFOLDERS = ['future', 'past', 'libfuturize', 'libpasteurize', 'builtins']
# Look for the future source folder:
for folder in self.old_sys_path:
if all([os.path.exists(os.path.join(folder, subfolder))
for subfolder in FUTURE_SOURCE_SUBFOLDERS]):
# Found it. Remove it.
sys.path.remove(folder)
# Ensure we import the system module:
for m in self.module_names:
# Delete the module and any submodules from sys.modules:
# for key in list(sys.modules):
# if key == m or key.startswith(m + '.'):
# try:
# del sys.modules[key]
# except KeyError:
# pass
try:
module = __import__(m, level=0)
except ImportError:
# There's a problem importing the system module. E.g. the
# winreg module is not available except on Windows.
pass
def __exit__(self, *args):
# Restore sys.path and sys.modules:
sys.path = self.old_sys_path
for m in set(self.old_sys_modules.keys()) - set(sys.modules.keys()):
sys.modules[m] = self.old_sys_modules[m]
TOP_LEVEL_MODULES = ['builtins',
'copyreg',
'html',
'http',
'queue',
'reprlib',
'socketserver',
'test',
'tkinter',
'winreg',
'xmlrpc',
'_dummy_thread',
'_markupbase',
'_thread',
]
def import_top_level_modules():
with exclude_local_folder_imports(*TOP_LEVEL_MODULES):
for m in TOP_LEVEL_MODULES:
try:
__import__(m)
except ImportError: # e.g. winreg
pass
| 29,229
|
Python
|
.py
| 704
| 34.039773
| 119
| 0.628883
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,619
|
__init__.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/future/moves/__init__.py
|
"""
This module is a modified vendor copy of the python-future package from https://github.com/PythonCharmers/python-future
Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
# future.moves package
from __future__ import absolute_import
import sys
__future_module__ = True
from zato.common.ext.future.standard_library import import_top_level_modules
if sys.version_info[0] >= 3:
import_top_level_modules()
| 1,449
|
Python
|
.py
| 26
| 54.346154
| 119
| 0.810868
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,620
|
error.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/future/moves/urllib/error.py
|
"""
This module is a modified vendor copy of the python-future package from https://github.com/PythonCharmers/python-future
Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from __future__ import absolute_import
from zato.common.ext.future.standard_library import suspend_hooks
from zato.common.ext.future.utils import PY3
if PY3:
from urllib.error import *
else:
__future_module__ = True
# We use this method to get at the original Py2 urllib before any renaming magic
# ContentTooShortError = sys.py2_modules['urllib'].ContentTooShortError
with suspend_hooks():
from urllib import ContentTooShortError
from urllib2 import URLError, HTTPError
| 1,724
|
Python
|
.py
| 31
| 53.16129
| 119
| 0.801663
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,621
|
robotparser.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/future/moves/urllib/robotparser.py
|
"""
This module is a modified vendor copy of the python-future package from https://github.com/PythonCharmers/python-future
Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from __future__ import absolute_import
from zato.common.ext.future.utils import PY3
if PY3:
from urllib.robotparser import *
else:
__future_module__ = True
from robotparser import *
| 1,408
|
Python
|
.py
| 26
| 52.461538
| 119
| 0.80814
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,622
|
response.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/future/moves/urllib/response.py
|
"""
This module is a modified vendor copy of the python-future package from https://github.com/PythonCharmers/python-future
Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from future import standard_library
from zato.common.ext.future.utils import PY3
if PY3:
from urllib.response import *
else:
__future_module__ = True
with standard_library.suspend_hooks():
from urllib import (addbase,
addclosehook,
addinfo,
addinfourl)
| 1,571
|
Python
|
.py
| 30
| 47.7
| 119
| 0.763518
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,623
|
request.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/future/moves/urllib/request.py
|
"""
This module is a modified vendor copy of the python-future package from https://github.com/PythonCharmers/python-future
Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from __future__ import absolute_import
from zato.common.ext.future.standard_library import suspend_hooks
from zato.common.ext.future.utils import PY3
if PY3:
from urllib.request import *
# This aren't in __all__:
from urllib.request import (getproxies,
pathname2url,
proxy_bypass,
quote,
request_host,
thishost,
unquote,
url2pathname,
urlcleanup,
urljoin,
urlopen,
urlparse,
urlretrieve,
urlsplit,
urlunparse)
from urllib.parse import (splitattr,
splithost,
splitpasswd,
splitport,
splitquery,
splittag,
splittype,
splituser,
splitvalue,
to_bytes,
unwrap)
else:
__future_module__ = True
with suspend_hooks():
from urllib import *
from urllib2 import *
from urlparse import *
# Rename:
from urllib import toBytes # missing from __all__ on Py2.6
to_bytes = toBytes
# from urllib import (pathname2url,
# url2pathname,
# getproxies,
# urlretrieve,
# urlcleanup,
# URLopener,
# FancyURLopener,
# proxy_bypass)
# from urllib2 import (
# AbstractBasicAuthHandler,
# AbstractDigestAuthHandler,
# BaseHandler,
# CacheFTPHandler,
# FileHandler,
# FTPHandler,
# HTTPBasicAuthHandler,
# HTTPCookieProcessor,
# HTTPDefaultErrorHandler,
# HTTPDigestAuthHandler,
# HTTPErrorProcessor,
# HTTPHandler,
# HTTPPasswordMgr,
# HTTPPasswordMgrWithDefaultRealm,
# HTTPRedirectHandler,
# HTTPSHandler,
# URLError,
# build_opener,
# install_opener,
# OpenerDirector,
# ProxyBasicAuthHandler,
# ProxyDigestAuthHandler,
# ProxyHandler,
# Request,
# UnknownHandler,
# urlopen,
# )
# from urlparse import (
# urldefrag
# urljoin,
# urlparse,
# urlunparse,
# urlsplit,
# urlunsplit,
# parse_qs,
# parse_q"
# )
| 4,741
|
Python
|
.py
| 106
| 32.40566
| 119
| 0.472421
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,624
|
__init__.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/future/moves/urllib/__init__.py
|
"""
This module is a modified vendor copy of the python-future package from https://github.com/PythonCharmers/python-future
Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from __future__ import absolute_import
from zato.common.ext.future.utils import PY3
if not PY3:
__future_module__ = True
| 1,339
|
Python
|
.py
| 23
| 56.782609
| 119
| 0.81145
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,625
|
parse.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/future/moves/urllib/parse.py
|
"""
This module is a modified vendor copy of the python-future package from https://github.com/PythonCharmers/python-future
Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from __future__ import absolute_import
from zato.common.ext.future.standard_library import suspend_hooks
from zato.common.ext.future.utils import PY3
if PY3:
from urllib.parse import *
else:
__future_module__ = True
from urlparse import (ParseResult, SplitResult, parse_qs, parse_qsl,
urldefrag, urljoin, urlparse, urlsplit,
urlunparse, urlunsplit)
# we use this method to get at the original py2 urllib before any renaming
# quote = sys.py2_modules['urllib'].quote
# quote_plus = sys.py2_modules['urllib'].quote_plus
# unquote = sys.py2_modules['urllib'].unquote
# unquote_plus = sys.py2_modules['urllib'].unquote_plus
# urlencode = sys.py2_modules['urllib'].urlencode
# splitquery = sys.py2_modules['urllib'].splitquery
with suspend_hooks():
from urllib import (quote,
quote_plus,
unquote,
unquote_plus,
urlencode,
splitquery)
| 2,290
|
Python
|
.py
| 43
| 46.372093
| 119
| 0.717605
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,626
|
messages.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/imbox/messages.py
|
# -*- coding: utf-8 -*-
"""
This module is a modified vendor copy of the Imbox package from https://pypi.org/project/imbox/
The MIT License (MIT)
Copyright (c) 2013 Martin Rusev
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import logging
from zato.common.ext.imbox.query import build_search_query
from zato.common.ext.imbox.parser import fetch_email_by_uid
logger = logging.getLogger(__name__)
class Messages:
IMAP_ATTRIBUTE_LOOKUP = {
'unread': '(UNSEEN)',
'flagged': '(FLAGGED)',
'unflagged': '(UNFLAGGED)',
'sent_from': '(FROM "{}")',
'sent_to': '(TO "{}")',
'date__gt': '(SINCE "{}")',
'date__lt': '(BEFORE "{}")',
'date__on': '(ON "{}")',
'subject': '(SUBJECT "{}")',
'uid__range': '(UID {})',
'text': '(TEXT "{}")',
}
FOLDER_LOOKUP = {}
def __init__(self,
connection,
parser_policy,
**kwargs):
self.connection = connection
self.parser_policy = parser_policy
self.kwargs = kwargs
self._uid_list = self._query_uids(**kwargs)
logger.debug("Fetch all messages for UID in {}".format(self._uid_list))
def _fetch_email(self, uid):
return fetch_email_by_uid(uid=uid,
connection=self.connection,
parser_policy=self.parser_policy)
def _query_uids(self, **kwargs):
query_ = build_search_query(self.IMAP_ATTRIBUTE_LOOKUP, **kwargs)
_, data = self.connection.uid('search', None, query_)
if data[0] is None:
return []
return data[0].split()
def _fetch_email_list(self):
for uid in self._uid_list:
yield uid, self._fetch_email(uid)
def __repr__(self):
if len(self.kwargs) > 0:
return 'Messages({})'.format('\n'.join('{}={}'.format(key, value)
for key, value in self.kwargs.items()))
return 'Messages(ALL)'
def __iter__(self):
return self._fetch_email_list()
def __next__(self):
return self
def __len__(self):
return len(self._uid_list)
def __getitem__(self, index):
uids = self._uid_list[index]
if not isinstance(uids, list):
uid = uids
return uid, self._fetch_email(uid)
return [(uid, self._fetch_email(uid))
for uid in uids]
| 3,459
|
Python
|
.py
| 79
| 35.683544
| 95
| 0.622726
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,627
|
imbox.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/imbox/imbox.py
|
# -*- coding: utf-8 -*-
"""
This module is a modified vendor copy of the Imbox package from https://pypi.org/project/imbox/
The MIT License (MIT)
Copyright (c) 2013 Martin Rusev
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
# stdlib
import imaplib
import logging
# Zato
from zato.common.ext.imbox.imap import ImapTransport
from zato.common.ext.imbox.messages import Messages
from zato.common.ext.imbox.vendors import GmailMessages, hostname_vendorname_dict, name_authentication_string_dict
# ################################################################################################################################
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class Imbox:
authentication_error_message = None
def __init__(self, hostname, username=None, password=None, ssl=True,
port=None, ssl_context=None, policy=None, starttls=False,
vendor=None):
self.server = ImapTransport(hostname, ssl=ssl, port=port, ssl_context=ssl_context, starttls=starttls)
self.hostname = hostname
self.username = username
self.password = password
self.parser_policy = policy
self.vendor = vendor or hostname_vendorname_dict.get(self.hostname)
if self.vendor is not None:
self.authentication_error_message = name_authentication_string_dict.get(self.vendor)
try:
self.connection = self.server.connect(username, password)
except imaplib.IMAP4.error as e:
if self.authentication_error_message is None:
raise
raise imaplib.IMAP4.error(
self.authentication_error_message + '\n' + str(e))
logger.info("Connected to IMAP Server with user {username} on {hostname}{ssl}".format(
hostname=hostname, username=username, ssl=(" over SSL" if ssl or starttls else "")))
# ################################################################################################################################
def __enter__(self):
return self
# ################################################################################################################################
def __exit__(self, type, value, traceback):
self.logout()
# ################################################################################################################################
def logout(self):
self.connection.close()
self.connection.logout()
logger.info(f"Disconnected from IMAP Server {self.username}@{self.hostname}")
# ################################################################################################################################
def mark_seen(self, uid):
logger.info("Mark UID {} with \\Seen FLAG".format(int(uid)))
self.connection.uid('STORE', uid, '+FLAGS', '(\\Seen)')
# ################################################################################################################################
def mark_flag(self, uid):
logger.info("Mark UID {} with \\Flagged FLAG".format(int(uid)))
self.connection.uid('STORE', uid, '+FLAGS', '(\\Flagged)')
# ################################################################################################################################
def delete(self, uid):
logger.info("Mark UID {} with \\Deleted FLAG and expunge.".format(int(uid)))
self.connection.uid('STORE', uid, '+FLAGS', '(\\Deleted)')
self.connection.expunge()
# ################################################################################################################################
def copy(self, uid, destination_folder):
logger.info("Copy UID {} to {} folder".format(int(uid), str(destination_folder)))
return self.connection.uid('COPY', uid, destination_folder)
# ################################################################################################################################
def move(self, uid, destination_folder):
logger.info("Move UID {} to {} folder".format(int(uid), str(destination_folder)))
if self.copy(uid, destination_folder):
self.delete(uid)
# ################################################################################################################################
def messages(self, **kwargs):
folder = kwargs.get('folder', False)
messages_class = Messages
if self.vendor == 'gmail':
messages_class = GmailMessages
if folder:
self.connection.select(messages_class.FOLDER_LOOKUP.get((folder.lower())) or folder)
msg = " from folder '{}'".format(folder)
del kwargs['folder']
else:
msg = " from inbox"
logger.info("Fetch list of messages{}".format(msg))
return messages_class(connection=self.connection, parser_policy=self.parser_policy, **kwargs)
# ################################################################################################################################
def folders(self):
return self.connection.list()
# ################################################################################################################################
# ################################################################################################################################
| 6,743
|
Python
|
.py
| 106
| 57.311321
| 130
| 0.472471
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,628
|
imap.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/imbox/imap.py
|
# -*- coding: utf-8 -*-
"""
This module is a modified vendor copy of the Imbox package from https://pypi.org/project/imbox/
The MIT License (MIT)
Copyright (c) 2013 Martin Rusev
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the 'Software'), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
# stdlib
import logging
import ssl as pythonssllib
from imaplib import IMAP4, IMAP4_SSL
# ################################################################################################################################
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class ImapTransport:
def __init__(self, hostname, port=None, ssl=True, ssl_context=None, starttls=False):
self.hostname = hostname
if ssl:
self.port = port or 993
if ssl_context is None:
ssl_context = pythonssllib.create_default_context()
self.server = IMAP4_SSL(self.hostname, self.port, ssl_context=ssl_context)
else:
self.port = port or 143
self.server = IMAP4(self.hostname, self.port)
if starttls:
self.server.starttls()
logger.debug(f'Created IMAP4 transport for {self.hostname}:{self.port}')
# ################################################################################################################################
def list_folders(self):
logger.debug('Listing all folders in mailbox')
return self.server.list()
# ################################################################################################################################
def connect(self, username, password):
self.server.login(username, password)
self.server.select()
logger.debug(f'Logged into server {self.hostname} and selected mailbox "INBOX"')
return self.server
# ################################################################################################################################
# ################################################################################################################################
| 3,377
|
Python
|
.py
| 55
| 56.636364
| 130
| 0.502876
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,629
|
utils.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/imbox/utils.py
|
# -*- coding: utf-8 -*-
"""
This module is a modified vendor copy of the Imbox package from https://pypi.org/project/imbox/
The MIT License (MIT)
Copyright (c) 2013 Martin Rusev
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import logging
logger = logging.getLogger(__name__)
def str_encode(value='', encoding=None, errors='strict'):
logger.debug("Encode str {} with {} and errors {}".format(value, encoding, errors))
return str(value, encoding, errors)
def str_decode(value='', encoding=None, errors='strict'):
if isinstance(value, str):
return bytes(value, encoding, errors).decode('utf-8')
elif isinstance(value, bytes):
return value.decode(encoding or 'utf-8', errors=errors)
else:
raise TypeError("Cannot decode '{}' object".format(value.__class__))
| 1,789
|
Python
|
.py
| 32
| 53.1875
| 95
| 0.766896
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,630
|
__init__.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/imbox/__init__.py
|
# -*- coding: utf-8 -*-
"""
This module is a modified vendor copy of the Imbox package from https://pypi.org/project/imbox/
The MIT License (MIT)
Copyright (c) 2013 Martin Rusev
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from zato.common.ext.imbox.imbox import Imbox
__all__ = ['Imbox']
| 1,277
|
Python
|
.py
| 22
| 56.681818
| 95
| 0.801123
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,631
|
query.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/imbox/query.py
|
# -*- coding: utf-8 -*-
"""
This module is a modified vendor copy of the Imbox package from https://pypi.org/project/imbox/
The MIT License (MIT)
Copyright (c) 2013 Martin Rusev
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import datetime
def build_search_query(imap_attribute_lookup, **kwargs):
query = []
for name, value in kwargs.items():
if value is not None:
if isinstance(value, datetime.date):
value = value.strftime('%d-%b-%Y')
if isinstance(value, str) and '"' in value:
value = value.replace('"', "'")
query.append(imap_attribute_lookup[name].format(value))
if query:
return " ".join(query)
return "(ALL)"
| 1,707
|
Python
|
.py
| 33
| 47.363636
| 95
| 0.739627
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,632
|
parser.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/imbox/parser.py
|
# -*- coding: utf-8 -*-
"""
This module is a modified vendor copy of the Imbox package from https://pypi.org/project/imbox/
The MIT License (MIT)
Copyright (c) 2013 Martin Rusev
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import imaplib
import io
import re
import email
import chardet
import base64
import quopri
import sys
import time
from datetime import datetime
from email.header import decode_header
from zato.common.ext.imbox.utils import str_encode, str_decode
import logging
logger = logging.getLogger(__name__)
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
def keys(self):
return self.__dict__.keys()
def __repr__(self):
return str(self.__dict__)
def decode_mail_header(value, default_charset='us-ascii'):
"""
Decode a header value into a unicode string.
"""
try:
headers = decode_header(value)
except email.errors.HeaderParseError:
return str_decode(str_encode(value, default_charset, 'replace'), default_charset)
else:
for index, (text, charset) in enumerate(headers):
logger.debug("Mail header no. {index}: {data} encoding {charset}".format(
index=index,
data=str_decode(text, charset or 'utf-8', 'replace'),
charset=charset))
try:
headers[index] = str_decode(text, charset or default_charset,
'replace')
except LookupError:
# if the charset is unknown, force default
headers[index] = str_decode(text, default_charset, 'replace')
return ''.join(headers)
def get_mail_addresses(message, header_name):
"""
Retrieve all email addresses from one message header.
"""
headers = message.get_all(header_name, [])
addresses = email.utils.getaddresses(headers)
for index, (address_name, address_email) in enumerate(addresses):
addresses[index] = {'name': decode_mail_header(address_name),
'email': address_email}
logger.debug("{} Mail address in message: <{}> {}".format(
header_name.upper(), address_name, address_email))
return addresses
def decode_param(param):
name, v = param.split('=', 1)
values = v.split('\n')
value_results = []
for value in values:
match = re.search(r'=\?((?:\w|-)+)\?([QB])\?(.+)\?=', value)
if match:
encoding, type_, code = match.groups()
if type_ == 'Q':
value = quopri.decodestring(code)
elif type_ == 'B':
value = base64.decodebytes(code.encode())
value = str_encode(value, encoding)
value_results.append(value)
if value_results:
v = ''.join(value_results)
logger.debug("Decoded parameter {} - {}".format(name, v))
return name, v
def parse_attachment(message_part):
# Check again if this is a valid attachment
content_disposition = message_part.get("Content-Disposition", None)
if content_disposition is not None and not message_part.is_multipart():
dispositions = [
disposition.strip()
for disposition in content_disposition.split(";")
if disposition.strip()
]
if dispositions[0].lower() in ["attachment", "inline"]:
file_data = message_part.get_payload(decode=True)
attachment = {
'content-type': message_part.get_content_type(),
'size': len(file_data),
'content': io.BytesIO(file_data),
'content-id': message_part.get("Content-ID", None)
}
filename = message_part.get_param('name')
if filename:
attachment['filename'] = filename
filename_parts = []
for param in dispositions[1:]:
if param:
name, value = decode_param(param)
# Check for split filename
s_name = name.split("*")
if s_name[0] == 'filename':
# If this is a split file name - use the number after the * as an index to insert this part
if len(s_name) > 1:
filename_parts.insert(int(s_name[1]),value[1:-1] if value.startswith('"') else value)
else:
filename_parts.insert(0,value[1:-1] if value.startswith('"') else value)
if 'create-date' in name:
attachment['create-date'] = value
attachment['filename'] = "".join(filename_parts)
return attachment
return None
def decode_content(message):
content = message.get_payload(decode=True)
charset = message.get_content_charset('utf-8')
try:
return content.decode(charset, 'ignore')
except LookupError:
encoding = chardet.detect(content).get('encoding')
if encoding:
return content.decode(encoding, 'ignore')
return content
except AttributeError:
return content
def fetch_email_by_uid(uid, connection, parser_policy):
message, data = connection.uid('fetch', uid, '(BODY.PEEK[] FLAGS)')
logger.debug("Fetched message for UID {}".format(int(uid)))
raw_headers, raw_email = data[0]
email_object = parse_email(raw_email, policy=parser_policy)
flags = parse_flags(raw_headers.decode())
email_object.__dict__['flags'] = flags
return email_object
def parse_flags(headers):
"""Copied from https://github.com/girishramnani/gmail/blob/master/gmail/message.py"""
if len(headers) == 0:
return []
if sys.version_info[0] == 3:
headers = bytes(headers, "ascii")
return list(imaplib.ParseFlags(headers))
def parse_email(raw_email, policy=None):
if isinstance(raw_email, bytes):
raw_email = str_encode(raw_email, 'utf-8', errors='ignore')
if policy is not None:
email_parse_kwargs = {'policy': policy}
else:
email_parse_kwargs = {}
try:
email_message = email.message_from_string(
raw_email, **email_parse_kwargs)
except UnicodeEncodeError:
email_message = email.message_from_string(
raw_email.encode('utf-8'), **email_parse_kwargs)
maintype = email_message.get_content_maintype()
parsed_email = {'raw_email': raw_email}
body = {
"plain": [],
"html": []
}
attachments = []
if maintype in ('multipart', 'image'):
logger.debug("Multipart message. Will process parts.")
for part in email_message.walk():
content_type = part.get_content_type()
part_maintype = part.get_content_maintype()
content_disposition = part.get('Content-Disposition', None)
if content_disposition or not part_maintype == "text":
content = part.get_payload(decode=True)
else:
content = decode_content(part)
is_inline = content_disposition is None \
or content_disposition.startswith("inline")
if content_type == "text/plain" and is_inline:
body['plain'].append(content)
elif content_type == "text/html" and is_inline:
body['html'].append(content)
elif content_disposition:
attachment = parse_attachment(part)
if attachment:
attachments.append(attachment)
elif maintype == 'text':
payload = decode_content(email_message)
body['plain'].append(payload)
parsed_email['attachments'] = attachments
parsed_email['body'] = body
email_dict = dict(email_message.items())
parsed_email['sent_from'] = get_mail_addresses(email_message, 'from')
parsed_email['sent_to'] = get_mail_addresses(email_message, 'to')
parsed_email['cc'] = get_mail_addresses(email_message, 'cc')
parsed_email['bcc'] = get_mail_addresses(email_message, 'bcc')
value_headers_keys = ['subject', 'date', 'message-id']
key_value_header_keys = ['received-spf',
'mime-version',
'x-spam-status',
'x-spam-score',
'content-type']
parsed_email['headers'] = []
for key, value in email_dict.items():
if key.lower() in value_headers_keys:
valid_key_name = key.lower().replace('-', '_')
parsed_email[valid_key_name] = decode_mail_header(value)
if key.lower() in key_value_header_keys:
parsed_email['headers'].append({'Name': key,
'Value': value})
if parsed_email.get('date'):
timetuple = email.utils.parsedate(parsed_email['date'])
parsed_date = datetime.fromtimestamp(time.mktime(timetuple)) \
if timetuple else None
parsed_email['parsed_date'] = parsed_date
logger.info("Downloaded and parsed mail '{}' with {} attachments".format(
parsed_email.get('subject'), len(parsed_email.get('attachments'))))
return Struct(**parsed_email)
| 10,222
|
Python
|
.py
| 228
| 35.311404
| 115
| 0.615857
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,633
|
version.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/imbox/version.py
|
# -*- coding: utf-8 -*-
"""
This module is a modified vendor copy of the Imbox package from https://pypi.org/project/imbox/
The MIT License (MIT)
Copyright (c) 2013 Martin Rusev
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
__version__ = "0.9.8"
VERSION = __version__.split('.')
| 1,265
|
Python
|
.py
| 22
| 56.181818
| 95
| 0.794498
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,634
|
gmail.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/imbox/vendors/gmail.py
|
# -*- coding: utf-8 -*-
"""
This module is a modified vendor copy of the Imbox package from https://pypi.org/project/imbox/
The MIT License (MIT)
Copyright (c) 2013 Martin Rusev
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from zato.common.ext.imbox.messages import Messages
from zato.common.ext.imbox.vendors.helpers import merge_two_dicts
class GmailMessages(Messages):
authentication_error_message = ('If you\'re not using an app-specific password, grab one here: '
'https://myaccount.google.com/apppasswords')
hostname = 'imap.gmail.com'
name = 'gmail'
FOLDER_LOOKUP = {
'all_mail': '"[Gmail]/All Mail"',
'all': '"[Gmail]/All Mail"',
'all mail': '"[Gmail]/All Mail"',
'sent': '"[Gmail]/Sent Mail"',
'sent mail': '"[Gmail]/Sent Mail"',
'sent_mail': '"[Gmail]/Sent Mail"',
'drafts': '"[Gmail]/Drafts"',
'important': '"[Gmail]/Important"',
'spam': '"[Gmail]/Spam"',
'starred': '"[Gmail]/Starred"',
'trash': '"[Gmail]/Trash"',
}
GMAIL_IMAP_ATTRIBUTE_LOOKUP_DIFF = {
'subject': '(X-GM-RAW "subject:\'{}\'")',
'label': '(X-GM-LABELS "{}")',
'raw': '(X-GM-RAW "{}")'
}
def __init__(self,
connection,
parser_policy,
**kwargs):
self.IMAP_ATTRIBUTE_LOOKUP = merge_two_dicts(self.IMAP_ATTRIBUTE_LOOKUP,
self.GMAIL_IMAP_ATTRIBUTE_LOOKUP_DIFF)
super().__init__(connection, parser_policy, **kwargs)
| 2,583
|
Python
|
.py
| 52
| 42.634615
| 100
| 0.661104
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,635
|
__init__.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/imbox/vendors/__init__.py
|
# -*- coding: utf-8 -*-
"""
This module is a modified vendor copy of the Imbox package from https://pypi.org/project/imbox/
The MIT License (MIT)
Copyright (c) 2013 Martin Rusev
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from zato.common.ext.imbox.vendors.gmail import GmailMessages
vendors = [GmailMessages]
hostname_vendorname_dict = {vendor.hostname: vendor.name for vendor in vendors}
name_authentication_string_dict = {vendor.name: vendor.authentication_error_message for vendor in vendors}
__all__ = [v.__name__ for v in vendors]
__all__ += ['hostname_vendorname_dict',
'name_authentication_string_dict']
| 1,616
|
Python
|
.py
| 27
| 58
| 106
| 0.793409
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,636
|
helpers.py
|
zatosource_zato/code/zato-common/src/zato/common/ext/imbox/vendors/helpers.py
|
# -*- coding: utf-8 -*-
"""
This module is a modified vendor copy of the Imbox package from https://pypi.org/project/imbox/
The MIT License (MIT)
Copyright (c) 2013 Martin Rusev
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
def merge_two_dicts(x, y):
"""from https://stackoverflow.com/a/26853961/4386191"""
z = x.copy() # start with x's keys and values
z.update(y) # modifies z with y's keys and values & returns None
return z
| 1,434
|
Python
|
.py
| 25
| 55.44
| 95
| 0.78174
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,637
|
parsing.py
|
zatosource_zato/code/zato-common/src/zato/common/kvdb/parsing.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# ################################################################################################################################
# stdlib
from logging import getLogger
from string import punctuation
# PyParsing
from pyparsing import alphanums, oneOf, OneOrMore, Optional, White, Word
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
# Redis PyParsing grammar
quot = Optional(oneOf(('"', "'")))
command = oneOf((
'CONFIG', 'DBSIZE', 'DECR', 'DECRBY', 'DEL', 'DUMP', 'ECHO',
'EXISTS', 'EXPIRE', 'EXPIREAT', 'FLUSHDB', 'GET',
'HDEL', 'HEXISTS', 'HGET', 'HGETALL', 'HINCRBY', 'HKEYS', 'HLEN', 'HSET', 'HSETNX',
'HVALS', 'INCR', 'INCRBY', 'INFO', 'KEYS', 'LLEN', 'LPOP', 'LPUSH', 'LPUSHX',
'LRANGE', 'LREM', 'LSET', 'LTRIM', 'MGET', 'MSET', 'MSETNX', 'OBJECT', 'PERSIST',
'PEXPIRE', 'PEXPIREAT', 'PING', 'PSETEX', 'PTTL', 'RANDOMKEY', 'RENAME', 'RENAMENX',
'RESTORE', 'RPOP', 'SADD', 'SET', 'SISMEMBER', 'SMEMBERS', 'SREM', 'TIME', 'TTL', 'TYPE',
'ZADD', 'ZRANGE', 'ZREM'), caseless=True).setResultsName('command')
parameters = (OneOrMore(Word(alphanums + '-' + punctuation))).setResultsName('parameters')
redis_grammar = command + Optional(White().suppress() + parameters)
# ################################################################################################################################
# ################################################################################################################################
| 2,216
|
Python
|
.py
| 32
| 66.875
| 130
| 0.381676
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,638
|
api.py
|
zatosource_zato/code/zato-common/src/zato/common/kvdb/api.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# ################################################################################################################################
# stdlib
from importlib import import_module
from logging import getLogger
# Cryptography
from cryptography.fernet import InvalidToken
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import basestring
# Zato
from zato.common.api import KVDB as _KVDB
from zato.common.const import SECRETS
from zato.common.util import spawn_greenlet
from zato.common.util.kvdb import has_redis_sentinels
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.server.base.parallel import ParallelServer
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class KVDB:
""" A wrapper around the Zato's key-value database.
"""
server: 'ParallelServer'
def __init__(self, config=None, decrypt_func=None):
self.conn = None
self.config = config
self.decrypt_func = decrypt_func
self.conn_class = None # Introduced so it's easier to test the class
self.has_sentinel = False
# ################################################################################################################################
def _get_connection_class(self):
""" Returns a concrete class to create Redis connections off basing on whether we use Redis sentinels or not.
Abstracted out to a separate method so it's easier to test the whole class in separation.
"""
if self.has_sentinel:
from redis.sentinel import Sentinel
return Sentinel
else:
from redis import StrictRedis
return StrictRedis
# ################################################################################################################################
def _parse_sentinels(self, item):
if item:
if isinstance(item, basestring):
item = [item]
out = []
for elem in item:
elem = elem.split(':')
# This will always exist ..
host = elem[0]
# .. which is why we can always use it ..
to_append = [host]
# .. but port can be optional ..
if len(elem) > 1:
port = elem[1]
port = int(port)
to_append.append(port)
out.append(tuple(to_append))
return out
# ################################################################################################################################
def init(self):
config = {}
self.has_sentinel = has_redis_sentinels(self.config)
if self.has_sentinel:
sentinels = self._parse_sentinels(self.config.get('redis_sentinels'))
if not sentinels:
raise ValueError('kvdb.redis_sentinels must be provided')
sentinel_master = self.config.get('redis_sentinels_master', None)
if not sentinel_master:
raise ValueError('kvdb.redis_sentinels_master must be provided')
config['sentinels'] = sentinels
config['sentinel_master'] = sentinel_master
else:
if self.config.get('host'):
config['host'] = self.config.host
if self.config.get('port'):
config['port'] = int(self.config.port)
if self.config.get('unix_socket_path'):
config['unix_socket_path'] = self.config.unix_socket_path
if self.config.get('db'):
config['db'] = int(self.config.db)
if self.config.get('password'):
# Heuristics - gAAA is a prefix of encrypted secrets so there is a chance
# we need to decrypt it. If the decryption fails, this is fine, we need
# assume in such a case that it was an actual password starting with this prefix.
if self.config.password.startswith(SECRETS.Encrypted_Indicator):
try:
config['password'] = self.decrypt_func(self.config.password)
except InvalidToken:
config['password'] = self.config.password
else:
config['password'] = self.config.password
if self.config.get('socket_timeout'):
config['socket_timeout'] = float(self.config.socket_timeout)
if self.config.get('connection_pool'):
split = self.config.connection_pool.split('.')
module, class_name = split[:-1], split[-1]
mod = import_module(module)
config['connection_pool'] = getattr(mod, class_name)
if self.config.get('charset'):
config['charset'] = self.config.charset
if self.config.get('errors'):
config['errors'] = self.config.errors
self.conn_class = self._get_connection_class()
if self.has_sentinel:
instance = self.conn_class(config['sentinels'], min_other_sentinels=0, password=config.get('password'),
socket_timeout=config.get('socket_timeout'), decode_responses=True)
self.conn = instance.master_for(config['sentinel_master'])
else:
self.conn = self.conn_class(charset='utf-8', decode_responses=True, **config)
# Confirm whether we can connect
self.ping()
# ################################################################################################################################
def pubsub(self):
return self.conn.pubsub()
# ################################################################################################################################
def publish(self, *args, **kwargs):
return self.conn.publish(*args, **kwargs)
# ################################################################################################################################
def subscribe(self, *args, **kwargs):
return self.conn.subscribe(*args, **kwargs)
# ################################################################################################################################
def translate(self, system1:'str', key1:'str', value1:'str', system2:'str', key2:'str', default:'str'='') -> 'str':
return self.conn.hget(
_KVDB.SEPARATOR.join(
(_KVDB.TRANSLATION, system1, key1, value1, system2, key2)), 'value2') or default
# ################################################################################################################################
def reconfigure(self, config):
# type: (dict) -> None
self.config = config
self.init()
# ################################################################################################################################
def set_password(self, password):
# type: (dict) -> None
self.config['password'] = password
self.init()
# ################################################################################################################################
def copy(self):
""" Returns an KVDB with the configuration copied over from self. Note that
the object returned isn't initialized, in particular, the connection to the
database won't have been initialized.
"""
kvdb = KVDB()
kvdb.config = self.config
kvdb.decrypt_func = self.decrypt_func
return kvdb
# ################################################################################################################################
def close(self):
self.conn.connection_pool.disconnect()
# ################################################################################################################################
def ping(self):
try:
spawn_greenlet(self.conn.ping)
except Exception as e:
logger.warning('Could not ping %s due to `%s`', self.conn, e.args[0])
else:
logger.info('Redis ping OK -> %s', self.conn)
# ################################################################################################################################
@staticmethod
def is_config_enabled(config):
""" Returns True if the configuration indicates that Redis is enabled.
"""
# type: (dict) -> bool
return config.get('host') and config.get('port')
# ################################################################################################################################
# ################################################################################################################################
| 9,540
|
Python
|
.py
| 174
| 45.701149
| 130
| 0.418817
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,639
|
__init__.py
|
zatosource_zato/code/zato-common/src/zato/common/kvdb/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,640
|
ping.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/ping.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# ################################################################################################################################
# ################################################################################################################################
def get_ping_query(fs_sql_config, engine_params, default_query='select 1+1'):
""" Returns a ping query for input engine and component-wide SQL configuration.
"""
ping_query = None
for key, value in fs_sql_config.items():
if key == engine_params['engine']:
ping_query = value.get('ping_query')
break
if not ping_query:
# We special case SQLite because it is never served from sql.ini
if engine_params['engine'] == 'sqlite':
ping_query = 'SELECT 1'
if not ping_query:
# Use the default one, if we have any ..
if default_query:
ping_query = default_query
# .. otherwise, report an error.
else:
raise ValueError('Could not find ping_query for {}'.format(engine_params))
# If we are here it means that a query was found
return ping_query
# ################################################################################################################################
# ################################################################################################################################
| 1,680
|
Python
|
.py
| 31
| 47.129032
| 130
| 0.428222
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,641
|
const.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/const.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
WMQ_DEFAULT_PRIORITY = 5
| 174
|
Python
|
.py
| 6
| 27.5
| 64
| 0.70303
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,642
|
api.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/api.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
from contextlib import closing
from copy import deepcopy
from datetime import datetime
from io import StringIO
from logging import DEBUG, getLogger
from threading import RLock
from time import time
# SQLAlchemy
from sqlalchemy import and_, create_engine, event, select
from sqlalchemy.exc import IntegrityError, OperationalError
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.orm.query import Query
from sqlalchemy.pool import NullPool
from sqlalchemy.sql.expression import true
from sqlalchemy.sql.type_api import TypeEngine
# Bunch
from bunch import Bunch, bunchify
# Zato
from zato.common.api import DEPLOYMENT_STATUS, GENERIC, HTTP_SOAP, MS_SQL, NotGiven, PUBSUB, SEC_DEF_TYPE, SECRET_SHADOW, \
SERVER_UP_STATUS, UNITTEST, ZATO_NONE, ZATO_ODB_POOL_NAME
from zato.common.exception import Inactive
from zato.common.mssql_direct import MSSQLDirectAPI, SimpleSession
from zato.common.odb import query
from zato.common.odb.ping import get_ping_query
from zato.common.odb.model import APIKeySecurity, Cluster, DeployedService, DeploymentPackage, DeploymentStatus, HTTPBasicAuth, \
JWT, NTLM, OAuth, PubSubEndpoint, SecurityBase, Server, Service, TLSChannelSecurity, VaultConnection
from zato.common.odb.testing import UnittestEngine
from zato.common.odb.query.pubsub import subscription as query_ps_subscription
from zato.common.odb.query import generic as query_generic
from zato.common.util.api import current_host, get_component_name, get_engine_url, new_cid, parse_extra_into_dict, \
parse_tls_channel_security_definition, spawn_greenlet
from zato.common.util.sql import ElemsWithOpaqueMaker, elems_with_opaque
from zato.common.util.url_dispatcher import get_match_target
from zato.sso.odb.query import get_rate_limiting_info as get_sso_user_rate_limiting_info
# ################################################################################################################################
if 0:
from sqlalchemy.orm import Session as SASession
from zato.common.crypto.api import CryptoManager
from zato.common.odb.model import Cluster as ClusterModel, Server as ServerModel
from zato.common.typing_ import anyset, callable_, commondict
from zato.server.base.parallel import ParallelServer
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
rate_limit_keys = 'is_rate_limit_active', 'rate_limit_def', 'rate_limit_type', 'rate_limit_check_parent_def'
unittest_fs_sql_config = {
UNITTEST.SQL_ENGINE: {
'ping_query': 'SELECT 1+1'
}
}
# ################################################################################################################################
ServiceTable = Service.__table__
ServiceTableInsert = ServiceTable.insert
DeployedServiceTable = DeployedService.__table__
DeployedServiceInsert = DeployedServiceTable.insert
DeployedServiceDelete = DeployedServiceTable.delete
# ################################################################################################################################
# ################################################################################################################################
# Based on https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/WriteableTuple
class SQLRow:
def __init__(self, elem):
object.__setattr__(self, '_elem', elem)
# ################################################################################################################################
def __getattr__(self, key):
return getattr(self._elem, key)
# ################################################################################################################################
def __getitem__(self, idx):
return self._elem.__getitem__(idx)
# ################################################################################################################################
def __setitem__(self, idx, value):
return self._elem.__setitem__(idx, value)
# ################################################################################################################################
def __nonzero__(self):
return bool(self._elem)
# ################################################################################################################################
def __repr__(self):
return '<SQLRow at {}>'.format(hex(id(self)))
# ################################################################################################################################
def get_value(self) -> 'commondict':
return self._elem._asdict()
# For backward compatibility
WritableKeyedTuple = SQLRow
# ################################################################################################################################
# ################################################################################################################################
class SessionWrapper:
""" Wraps an SQLAlchemy session.
"""
_Session: 'SASession'
def __init__(self):
self.session_initialized = False
self.pool = None # type: SQLConnectionPool
self.config = None # type: dict
self.is_sqlite = None # type: bool
self.logger = logging.getLogger(self.__class__.__name__)
def init_session(self, *args, **kwargs):
spawn_greenlet(self._init_session, *args, **kwargs)
def _init_session(self, name, config, pool, use_scoped_session=True):
# type: (str, dict, SQLConnectionPool, bool)
self.config = config
self.fs_sql_config = config['fs_sql_config']
self.pool = pool
is_ms_sql_direct = config['engine'] == MS_SQL.ZATO_DIRECT
if is_ms_sql_direct:
self._Session = SimpleSession(self.pool.engine)
else:
if use_scoped_session:
self._Session = scoped_session(sessionmaker(bind=self.pool.engine, query_cls=WritableTupleQuery))
else:
self._Session = sessionmaker(bind=self.pool.engine, query_cls=WritableTupleQuery)
self._session = self._Session()
self.session_initialized = True
self.is_sqlite = self.pool.engine and self.pool.engine.name == 'sqlite'
def session(self) -> 'SASession':
return self._Session()
def close(self):
self._session.close()
# ################################################################################################################################
# ################################################################################################################################
class WritableTupleQuery(Query):
def __iter__(self):
out = super(WritableTupleQuery, self).__iter__()
columns_desc = self.column_descriptions
first_type = columns_desc[0]['type']
len_columns_desc = len(columns_desc)
# This is a simple result of a query such as session.query(ObjectName).count()
if len_columns_desc == 1 and isinstance(first_type, TypeEngine):
return out
# A list of objects, e.g. from .all()
elif len_columns_desc > 1:
return (SQLRow(elem) for elem in out)
# Anything else
else:
return out
# ################################################################################################################################
# ################################################################################################################################
class SQLConnectionPool:
""" A pool of SQL connections wrapping an SQLAlchemy engine.
"""
def __init__(self, name, config, config_no_sensitive, should_init=True):
# type: (str, dict, dict) -> None
self.name = name
self.config = config
self.config_no_sensitive = config_no_sensitive
self.logger = getLogger(self.__class__.__name__)
self.has_debug = self.logger.isEnabledFor(DEBUG)
self.engine = None
self.engine_name = config['engine'] # self.engine.name is 'mysql' while 'self.engine_name' is mysql+pymysql
if should_init:
self.init()
def init(self):
_extra = {
'pool_pre_ping': True, # Make sure SQLAlchemy 1.2+ can refresh connections on transient errors
}
# MySQL only
if self.engine_name.startswith('mysql'):
_extra['pool_recycle'] = 600
# Postgres-only
elif self.engine_name.startswith('postgres'):
_extra['connect_args'] = {'application_name': get_component_name()}
extra = self.config.get('extra') # Optional, hence .get
_extra.update(parse_extra_into_dict(extra))
# SQLite has no pools
if self.engine_name != 'sqlite':
_extra['pool_size'] = int(self.config.get('pool_size', 1))
if _extra['pool_size'] == 0:
_extra['poolclass'] = NullPool
engine_url = get_engine_url(self.config)
try:
self.engine = self._create_engine(engine_url, self.config, _extra)
except Exception as e:
self.logger.warning('Could not create SQL connection `%s`, e:`%s`', self.name, e.args[0])
if self.engine and (not self._is_unittest_engine(engine_url)) and self._is_sa_engine(engine_url):
event.listen(self.engine, 'checkin', self.on_checkin)
event.listen(self.engine, 'checkout', self.on_checkout)
event.listen(self.engine, 'connect', self.on_connect)
event.listen(self.engine, 'first_connect', self.on_first_connect)
self.checkins = 0
self.checkouts = 0
self.checkins = 0
self.checkouts = 0
# ################################################################################################################################
def __str__(self):
return '<{} at {}, config:[{}]>'.format(self.__class__.__name__, hex(id(self)), self.config_no_sensitive)
# ################################################################################################################################
__repr__ = __str__
# ################################################################################################################################
def _is_sa_engine(self, engine_url):
# type: (str)
return 'zato+mssql1' not in engine_url
# ################################################################################################################################
def _is_unittest_engine(self, engine_url):
# type: (str)
return 'zato+unittest' in engine_url
# ################################################################################################################################
def _create_unittest_engine(self, engine_url, config):
# type: (str, dict)
return UnittestEngine(engine_url, config)
# ################################################################################################################################
def _create_engine(self, engine_url, config, extra):
if self._is_unittest_engine(engine_url):
return self._create_unittest_engine(engine_url, config)
elif self._is_sa_engine(engine_url):
return create_engine(engine_url, **extra)
else:
# This is a direct MS SQL connection
connect_kwargs = {
'dsn': config['host'],
'port': config['port'],
'database': config['db_name'],
'user': config['username'],
'password': config['password'],
'login_timeout': 3,
'as_dict': True,
}
for name in MS_SQL.EXTRA_KWARGS:
value = extra.get(name, NotGiven)
if value is not NotGiven:
connect_kwargs[name] = value
return MSSQLDirectAPI(config['name'], config['pool_size'], connect_kwargs, extra)
# ################################################################################################################################
def on_checkin(self, dbapi_conn, conn_record):
if self.has_debug:
self.logger.debug('Checked in dbapi_conn:%s, conn_record:%s', dbapi_conn, conn_record)
self.checkins += 1
# ################################################################################################################################
def on_checkout(self, dbapi_conn, conn_record, conn_proxy):
if self.has_debug:
self.logger.debug('Checked out dbapi_conn:%s, conn_record:%s, conn_proxy:%s',
dbapi_conn, conn_record, conn_proxy)
self.checkouts += 1
self.logger.debug('co-cin-diff %d-%d-%d', self.checkouts, self.checkins, self.checkouts - self.checkins)
# ################################################################################################################################
def on_connect(self, dbapi_conn, conn_record):
if self.has_debug:
self.logger.debug('Connect dbapi_conn:%s, conn_record:%s', dbapi_conn, conn_record)
# ################################################################################################################################
def on_first_connect(self, dbapi_conn, conn_record):
if self.has_debug:
self.logger.debug('First connect dbapi_conn:%s, conn_record:%s', dbapi_conn, conn_record)
# ################################################################################################################################
def ping(self, fs_sql_config):
""" Pings the SQL database and returns the response time, in milliseconds.
"""
if not self.engine:
return
if hasattr(self.engine, 'ping'):
func = self.engine.ping
query = self.engine.ping_query
args = []
else:
func = self.engine.connect().execute
query = get_ping_query(fs_sql_config, self.config)
args = [query]
self.logger.debug('About to ping the SQL connection pool:`%s`, query:`%s`', self.config_no_sensitive, query)
start_time = time()
func(*args)
response_time = time() - start_time
self.logger.debug('Ping OK, pool:`%s`, response_time:`%s` s', self.config_no_sensitive, response_time)
return response_time
# ################################################################################################################################
def _conn(self):
""" Returns an SQLAlchemy connection object.
"""
return self.engine.connect()
# ################################################################################################################################
conn = property(fget=_conn, doc=_conn.__doc__)
# ################################################################################################################################
def _impl(self):
""" Returns the underlying connection's implementation, the SQLAlchemy engine.
"""
return self.engine
# ################################################################################################################################
impl = property(fget=_impl, doc=_impl.__doc__)
# ################################################################################################################################
class PoolStore:
""" A main class for accessing all of the SQL connection pools. Each server
thread has its own store.
"""
def __init__(self, sql_conn_class=SQLConnectionPool):
self.sql_conn_class = sql_conn_class
self._lock = RLock()
self.wrappers = {}
self.logger = getLogger(self.__class__.__name__)
# ################################################################################################################################
def __getitem__(self, name, enforce_is_active=True):
""" Checks out the connection pool. If enforce_is_active is False,
the pool's is_active flag will be ignored.
"""
with self._lock:
if enforce_is_active:
wrapper = self.wrappers[name]
if wrapper.config.get('is_active', True):
return wrapper
raise Inactive(name)
else:
return self.wrappers[name]
# ################################################################################################################################
get = __getitem__
# ################################################################################################################################
def __setitem__(self, name, config):
""" Stops a connection pool if it exists and replaces it with a new one
using updated settings.
"""
with self._lock:
if name in self.wrappers:
del self[name]
config_no_sensitive = {}
for key in config:
if key != 'callback_func':
config_no_sensitive[key] = config[key]
config_no_sensitive['password'] = SECRET_SHADOW
pool = self.sql_conn_class(name, config, config_no_sensitive)
wrapper = SessionWrapper()
wrapper.init_session(name, config, pool)
self.wrappers[name] = wrapper
set_item = __setitem__
# ################################################################################################################################
def add_unittest_item(self, name, fs_sql_config=unittest_fs_sql_config):
self.set_item(name, {
'password': 'password.{}'.format(new_cid),
'engine': UNITTEST.SQL_ENGINE,
'fs_sql_config': fs_sql_config,
'is_active': True,
})
# ################################################################################################################################
def __delitem__(self, name):
""" Stops a pool and deletes it from the store.
"""
with self._lock:
engine = self.wrappers[name].pool.engine
if engine:
engine.dispose()
del self.wrappers[name]
# ################################################################################################################################
def __str__(self):
out = StringIO()
out.write('<{} at {} wrappers:['.format(self.__class__.__name__, hex(id(self))))
out.write(', '.join(sorted(self.wrappers.keys())))
out.write(']>')
return out.getvalue()
# ################################################################################################################################
__repr__ = __str__
# ################################################################################################################################
def change_password(self, name, password):
""" Updates the password which means recreating the pool using the new
password.
"""
with self._lock:
# Do not check if the connection is active when changing the password,
# sometimes it is desirable to change it even if it is Inactive.
item = self.get(name, enforce_is_active=False)
item.pool.engine.dispose()
config = deepcopy(self.wrappers[name].pool.config)
config['password'] = password
self[name] = config
# ################################################################################################################################
def cleanup_on_stop(self):
""" Invoked when the server is stopping.
"""
with self._lock:
for _ignored_name, wrapper in self.wrappers.items():
if wrapper.pool:
if wrapper.pool.engine:
wrapper.pool.engine.dispose()
# ################################################################################################################################
class _Server:
""" A plain Python object which is used instead of an SQLAlchemy model so the latter is not tied to a session
for as long a server is up.
"""
def __init__(self, odb_server, odb_cluster):
self.id = odb_server.id
self.name = odb_server.name
self.last_join_status = odb_server.last_join_status
self.token = odb_server.token
self.cluster_id = odb_cluster.id
self.cluster = odb_cluster
# ################################################################################################################################
class ODBManager(SessionWrapper):
""" Manages connections to a given component's Operational Database.
"""
parallel_server: 'ParallelServer'
well_known_data:'str'
token:'str'
crypto_manager:'CryptoManager'
server_id:'int'
server_name:'str'
cluster_id:'int'
pool:'SQLConnectionPool'
decrypt_func:'callable_'
server:'ServerModel'
cluster:'ClusterModel'
# ################################################################################################################################
def on_deployment_finished(self):
""" Commits all the implicit BEGIN blocks opened by SELECTs.
"""
self._session.commit()
# ################################################################################################################################
def fetch_server(self, odb_config):
""" Fetches the server from the ODB. Also sets the 'cluster' attribute
to the value pointed to by the server's .cluster attribute.
"""
if not self.session_initialized:
self.init_session(ZATO_ODB_POOL_NAME, odb_config, self.pool, False)
with closing(self.session()) as session:
try:
server = session.query(Server).\
filter(Server.token == self.token).\
one()
self.server = _Server(server, server.cluster)
self.server_id = server.id
self.cluster = server.cluster
self.cluster_id = server.cluster.id
return self.server
except Exception:
msg = 'Could not find server in ODB, token:`{}`'.format(
self.token)
logger.error(msg)
raise
# ################################################################################################################################
def get_servers(self, up_status=SERVER_UP_STATUS.RUNNING, filter_out_self=True):
""" Returns all servers matching criteria provided on input.
"""
with closing(self.session()) as session:
query = session.query(Server).\
filter(Server.cluster_id == self.cluster_id)
if up_status:
query = query.filter(Server.up_status == up_status)
if filter_out_self:
query = query.filter(Server.id != self.server_id)
return query.all()
# ################################################################################################################################
def get_default_internal_pubsub_endpoint(self):
with closing(self.session()) as session:
return session.query(PubSubEndpoint).\
filter(PubSubEndpoint.name==PUBSUB.DEFAULT.INTERNAL_ENDPOINT_NAME).\
filter(PubSubEndpoint.endpoint_type==PUBSUB.ENDPOINT_TYPE.INTERNAL.id).\
filter(PubSubEndpoint.cluster_id==self.cluster_id).\
one()
# ################################################################################################################################
def get_missing_services(self, server, locally_deployed) -> 'anyset':
""" Returns services deployed on the server given on input that are not among locally_deployed.
"""
missing = set()
with closing(self.session()) as session:
server_services = session.query(
Service.id, Service.name,
DeployedService.source_path, DeployedService.source).\
join(DeployedService, Service.id==DeployedService.service_id).\
join(Server, DeployedService.server_id==Server.id).\
filter(Service.is_internal!=true()).\
all()
for item in server_services:
if item.name not in locally_deployed:
missing.add(item)
return missing
# ################################################################################################################################
def server_up_down(self, token, status, update_host=False, bind_host=None, bind_port=None, preferred_address=None,
crypto_use_tls=None):
""" Updates the information regarding the server is RUNNING or CLEAN_DOWN etc.
and what host it's running on.
"""
with closing(self.session()) as session:
server = session.query(Server).\
filter(Server.token==token).\
first()
# It may be the case that the server has been deleted from web-admin before it shut down,
# in which case during the shut down it will not be able to find itself in ODB anymore.
if not server:
logger.info('No server found for token `%s`, status:`%s`', token, status)
return
server.up_status = status
server.up_mod_date = datetime.utcnow()
if update_host:
server.host = current_host()
server.bind_host = bind_host
server.bind_port = bind_port
server.preferred_address = preferred_address
server.crypto_use_tls = crypto_use_tls
session.add(server)
session.commit()
# ################################################################################################################################
def _copy_rate_limiting_config(self, copy_from, copy_to, _keys=rate_limit_keys):
for key in _keys:
copy_to[key] = copy_from.get(key)
# ################################################################################################################################
def get_url_security(self, cluster_id, connection=None, any_internal=HTTP_SOAP.ACCEPT.ANY_INTERNAL):
""" Returns the security configuration of HTTP URLs.
"""
# Temporary cache of security definitions visited so as not to
# look the same ones for each HTTP object that uses them.
sec_def_cache = {}
with closing(self.session()) as session:
# What DB class to fetch depending on the string value of the security type.
sec_type_db_class = {
SEC_DEF_TYPE.APIKEY: APIKeySecurity,
SEC_DEF_TYPE.BASIC_AUTH: HTTPBasicAuth,
SEC_DEF_TYPE.JWT: JWT,
SEC_DEF_TYPE.NTLM: NTLM,
SEC_DEF_TYPE.OAUTH: OAuth,
SEC_DEF_TYPE.TLS_CHANNEL_SEC: TLSChannelSecurity,
SEC_DEF_TYPE.VAULT: VaultConnection,
}
result = {}
q = query.http_soap_security_list(session, cluster_id, connection)
columns = Bunch()
# So ConfigDict has its data in the format it expects
for c in q.statement.columns:
columns[c.name] = None
for item in elems_with_opaque(q):
target = get_match_target({
'http_accept': item.get('http_accept'),
'http_method': item.get('method'),
'soap_action': item.soap_action,
'url_path': item.url_path,
}, http_methods_allowed_re=self.parallel_server.http_methods_allowed_re)
result[target] = Bunch()
result[target].is_active = item.is_active
result[target].transport = item.transport
result[target].data_format = item.data_format
result[target].sec_use_rbac = item.sec_use_rbac
if item.security_id:
# Ignore WS-Security (WSS) which has been removed in 3.2
if item.sec_type == 'wss':
continue
# For later use
result[target].sec_def = Bunch()
# We either have already seen this security definition ..
if item.security_id in sec_def_cache:
sec_def = sec_def_cache[item.security_id]
# .. or we have not, in which case we need to look it up
# and then cache it for later use.
else:
# Will raise KeyError if the DB gets somehow misconfigured.
db_class = sec_type_db_class[item.sec_type]
sec_def_item = session.query(db_class).\
filter(db_class.id==item.security_id).\
one()
sec_def = bunchify(sec_def_item.asdict())
ElemsWithOpaqueMaker.process_config_dict(sec_def)
sec_def_cache[item.security_id] = sec_def
# Common things first
result[target].sec_def.id = sec_def.id
result[target].sec_def.name = sec_def.name
result[target].sec_def.password = self.decrypt_func(sec_def.password or '')
result[target].sec_def.sec_type = item.sec_type
if item.sec_type == SEC_DEF_TYPE.BASIC_AUTH:
result[target].sec_def.username = sec_def.username
result[target].sec_def.realm = sec_def.realm
self._copy_rate_limiting_config(sec_def, result[target].sec_def)
elif item.sec_type == SEC_DEF_TYPE.JWT:
result[target].sec_def.username = sec_def.username
self._copy_rate_limiting_config(sec_def, result[target].sec_def)
elif item.sec_type == SEC_DEF_TYPE.APIKEY:
result[target].sec_def.header = 'HTTP_{}'.format(sec_def.header.upper().replace('-', '_'))
self._copy_rate_limiting_config(sec_def, result[target].sec_def)
elif item.sec_type == SEC_DEF_TYPE.WSS:
result[target].sec_def.username = sec_def.username
result[target].sec_def.password_type = sec_def.password_type
result[target].sec_def.reject_empty_nonce_creat = sec_def.reject_empty_nonce_creat
result[target].sec_def.reject_stale_tokens = sec_def.reject_stale_tokens
result[target].sec_def.reject_expiry_limit = sec_def.reject_expiry_limit
result[target].sec_def.nonce_freshness_time = sec_def.nonce_freshness_time
elif item.sec_type == SEC_DEF_TYPE.TLS_CHANNEL_SEC:
result[target].sec_def.value = dict(parse_tls_channel_security_definition(sec_def.value))
elif item.sec_type == SEC_DEF_TYPE.NTLM:
result[target].sec_def.username = sec_def.username
else:
result[target].sec_def = ZATO_NONE
return result, columns
# ################################################################################################################################
def get_sql_internal_service_list(self, cluster_id):
""" Returns a list of service name and IDs for input cluster ID. It represents what is currently found in the ODB
and is used during server startup to decide if any new services should be added from what is found in the filesystem.
"""
with closing(self.session()) as session:
return session.query(
Service.id,
Service.impl_name,
Service.is_active,
Service.slow_threshold,
).\
filter(Service.cluster_id==cluster_id).\
all()
# ################################################################################################################################
def get_basic_data_service_list(self, session):
""" Returns basic information about all the services in ODB.
"""
query = select([
ServiceTable.c.id,
ServiceTable.c.name,
ServiceTable.c.impl_name,
]).where(
ServiceTable.c.cluster_id==self.cluster_id
)
return session.execute(query).\
fetchall()
# ################################################################################################################################
def get_basic_data_deployed_service_list(self):
""" Returns basic information about all the deployed services in ODB.
"""
with closing(self.session()) as session:
query = select([
ServiceTable.c.name,
DeployedServiceTable.c.source,
]).where(and_(
DeployedServiceTable.c.service_id==ServiceTable.c.id,
DeployedServiceTable.c.server_id==self.server_id
))
return session.execute(query).\
fetchall()
# ################################################################################################################################
def add_services(self, session, data):
# type: (list[dict]) -> None
try:
session.execute(ServiceTableInsert().values(data))
except IntegrityError:
# This can be ignored because it is possible that there will be
# more than one server trying to insert rows related to services
# that are hot-deployed from web-admin or another source.
logger.debug('Ignoring IntegrityError with `%s`', data)
# ################################################################################################################################
def add_deployed_services(self, session, data):
try:
session.execute(DeployedServiceInsert().values(data))
except OperationalError as e:
if 'duplicate key value violates unique constraint' in str(e):
pass
else:
raise
# ################################################################################################################################
def drop_deployed_services_by_name(self, session, service_id_list):
session.execute(
DeployedServiceDelete().\
where(DeployedService.service_id.in_(service_id_list))
)
# ################################################################################################################################
def drop_deployed_services(self, server_id):
""" Removes all the deployed services from a server.
"""
with closing(self.session()) as session:
session.execute(
DeployedServiceDelete().\
where(DeployedService.server_id==server_id)
)
session.commit()
# ################################################################################################################################
def is_service_active(self, service_id):
""" Returns whether the given service is active or not.
"""
with closing(self.session()) as session:
return session.query(Service.is_active).\
filter(Service.id==service_id).\
one()[0]
# ################################################################################################################################
def hot_deploy(self, deployment_time, details, payload_name, payload, server_id):
""" Inserts hot-deployed data into the DB along with setting the preliminary
AWAITING_DEPLOYMENT status for each of the servers this server's cluster
is aware of.
"""
with closing(self.session()) as session:
# Create the deployment package info ..
dp = DeploymentPackage()
dp.deployment_time = deployment_time
dp.details = details
dp.payload_name = payload_name
dp.payload = payload
dp.server_id = server_id
# .. add it to the session ..
session.add(dp)
# .. for each of the servers in this cluster set the initial status ..
servers = session.query(Cluster).\
filter(Cluster.id == self.server.cluster_id).\
one().servers
for server in servers:
ds = DeploymentStatus()
ds.package_id = dp.id
ds.server_id = server.id
ds.status = DEPLOYMENT_STATUS.AWAITING_DEPLOYMENT
ds.status_change_time = datetime.utcnow()
session.add(ds)
session.commit()
return dp.id
# ################################################################################################################################
def add_delivery(self, deployment_time, details, service, source_info):
""" Adds information about the server's deployed service into the ODB.
"""
raise NotImplementedError()
# ################################################################################################################################
def get_internal_channel_list(self, cluster_id, needs_columns=False):
""" Returns the list of internal HTTP/SOAP channels, that is,
channels pointing to internal services.
"""
with closing(self.session()) as session:
return query.internal_channel_list(session, cluster_id, needs_columns)
def get_http_soap_list(self, cluster_id, connection=None, transport=None, needs_columns=False):
""" Returns the list of all HTTP/SOAP connections.
"""
with closing(self.session()) as session:
return query.http_soap_list(session, cluster_id, connection, transport, True, None, needs_columns)
# ################################################################################################################################
def get_job_list(self, cluster_id, needs_columns=False):
""" Returns a list of jobs defined on the given cluster.
"""
with closing(self.session()) as session:
return query.job_list(session, cluster_id, None, needs_columns)
# ################################################################################################################################
def get_service_list(self, cluster_id, needs_columns=False):
""" Returns a list of services defined on the given cluster.
"""
with closing(self.session()) as session:
return elems_with_opaque(query.service_list(session, cluster_id, needs_columns=needs_columns))
# ################################################################################################################################
def get_service_id_list(self, session, cluster_id, name_list):
""" Returns a list of IDs matching input service names.
"""
return query.service_id_list(session, cluster_id, name_list)
# ################################################################################################################################
def get_service_list_with_include(self, session, cluster_id, include_list, needs_columns=False):
""" Returns a list of all services from the input include_list.
"""
return query.service_list_with_include(session, cluster_id, include_list, needs_columns)
# ################################################################################################################################
def get_apikey_security_list(self, cluster_id, needs_columns=False):
""" Returns a list of API keys existing on the given cluster.
"""
with closing(self.session()) as session:
return elems_with_opaque(query.apikey_security_list(session, cluster_id, needs_columns))
# ################################################################################################################################
def get_aws_security_list(self, cluster_id, needs_columns=False):
""" Returns a list of AWS definitions existing on the given cluster.
"""
with closing(self.session()) as session:
return query.aws_security_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_basic_auth_list(self, cluster_id, cluster_name, needs_columns=False):
""" Returns a list of HTTP Basic Auth definitions existing on the given cluster.
"""
with closing(self.session()) as session:
return elems_with_opaque(query.basic_auth_list(session, cluster_id, cluster_name, needs_columns))
# ################################################################################################################################
def get_jwt_list(self, cluster_id, cluster_name, needs_columns=False):
""" Returns a list of JWT definitions existing on the given cluster.
"""
with closing(self.session()) as session:
return elems_with_opaque(query.jwt_list(session, cluster_id, cluster_name, needs_columns))
# ################################################################################################################################
def get_ntlm_list(self, cluster_id, needs_columns=False):
""" Returns a list of NTLM definitions existing on the given cluster.
"""
with closing(self.session()) as session:
return query.ntlm_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_oauth_list(self, cluster_id, needs_columns=False):
""" Returns a list of OAuth accounts existing on the given cluster.
"""
with closing(self.session()) as session:
return query.oauth_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_tls_ca_cert_list(self, cluster_id, needs_columns=False):
""" Returns a list of TLS CA certs on the given cluster.
"""
with closing(self.session()) as session:
return query.tls_ca_cert_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_tls_channel_sec_list(self, cluster_id, needs_columns=False):
""" Returns a list of definitions for securing TLS channels.
"""
with closing(self.session()) as session:
return query.tls_channel_sec_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_tls_key_cert_list(self, cluster_id, needs_columns=False):
""" Returns a list of TLS key/cert pairs on the given cluster.
"""
with closing(self.session()) as session:
return query.tls_key_cert_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_wss_list(self, cluster_id, needs_columns=False):
""" Returns a list of WS-Security definitions on the given cluster.
"""
with closing(self.session()) as session:
return query.wss_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_vault_connection_list(self, cluster_id, needs_columns=False):
""" Returns a list of Vault connections on the given cluster.
"""
with closing(self.session()) as session:
return query.vault_connection_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_xpath_sec_list(self, cluster_id, needs_columns=False):
""" Returns a list of XPath-based security definitions on the given cluster.
"""
with closing(self.session()) as session:
return query.xpath_sec_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_definition_amqp(self, cluster_id, def_id):
""" Returns an AMQP definition's details.
"""
with closing(self.session()) as session:
return query.definition_amqp(session, cluster_id, def_id)
# ################################################################################################################################
def get_definition_amqp_list(self, cluster_id, needs_columns=False):
""" Returns a list of AMQP definitions on the given cluster.
"""
with closing(self.session()) as session:
return query.definition_amqp_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_amqp(self, cluster_id, out_id):
""" Returns an outgoing AMQP connection's details.
"""
with closing(self.session()) as session:
return query.out_amqp(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_amqp_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing AMQP connections.
"""
with closing(self.session()) as session:
return query.out_amqp_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_channel_amqp(self, cluster_id, channel_id):
""" Returns a particular AMQP channel.
"""
with closing(self.session()) as session:
return query.channel_amqp(session, cluster_id, channel_id)
# ################################################################################################################################
def get_channel_amqp_list(self, cluster_id, needs_columns=False):
""" Returns a list of AMQP channels.
"""
with closing(self.session()) as session:
return query.channel_amqp_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_def_wmq(self, cluster_id, def_id):
""" Returns an IBM MQ definition's details.
"""
with closing(self.session()) as session:
return query.definition_wmq(session, cluster_id, def_id)
# ################################################################################################################################
def get_definition_wmq_list(self, cluster_id, needs_columns=False):
""" Returns a list of IBM MQ definitions on the given cluster.
"""
with closing(self.session()) as session:
return query.definition_wmq_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_wmq(self, cluster_id, out_id):
""" Returns an outgoing IBM MQ connection's details.
"""
with closing(self.session()) as session:
return query.out_wmq(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_wmq_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing IBM MQ connections.
"""
with closing(self.session()) as session:
return query.out_wmq_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_channel_wmq(self, cluster_id, channel_id):
""" Returns a particular IBM MQ channel.
"""
with closing(self.session()) as session:
return query.channel_wmq(session, cluster_id, channel_id)
# ################################################################################################################################
def get_channel_wmq_list(self, cluster_id, needs_columns=False):
""" Returns a list of IBM MQ channels.
"""
with closing(self.session()) as session:
return query.channel_wmq_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_zmq(self, cluster_id, out_id):
""" Returns an outgoing ZMQ connection's details.
"""
with closing(self.session()) as session:
return query.out_zmq(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_zmq_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing ZMQ connections.
"""
with closing(self.session()) as session:
return query.out_zmq_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_channel_zmq(self, cluster_id, channel_id):
""" Returns a particular ZMQ channel.
"""
with closing(self.session()) as session:
return query.channel_zmq(session, cluster_id, channel_id)
# ################################################################################################################################
def get_channel_zmq_list(self, cluster_id, needs_columns=False):
""" Returns a list of ZMQ channels.
"""
with closing(self.session()) as session:
return query.channel_zmq_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_channel_file_transfer_list(self, cluster_id, needs_columns=False):
""" Returns a list of file transfer channels.
"""
with closing(self.session()) as session:
return query_generic.connection_list(
session, cluster_id, GENERIC.CONNECTION.TYPE.CHANNEL_FILE_TRANSFER, needs_columns)
# ################################################################################################################################
def get_channel_web_socket(self, cluster_id, channel_id):
""" Returns a particular WebSocket channel.
"""
with closing(self.session()) as session:
return query.channel_web_socket(session, cluster_id, channel_id)
# ################################################################################################################################
def get_channel_web_socket_list(self, cluster_id, needs_columns=False):
""" Returns a list of WebSocket channels.
"""
with closing(self.session()) as session:
return query.channel_web_socket_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_sql(self, cluster_id, out_id):
""" Returns an outgoing SQL connection's details.
"""
with closing(self.session()) as session:
return query.out_sql(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_sql_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing SQL connections.
"""
with closing(self.session()) as session:
return query.out_sql_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_odoo(self, cluster_id, out_id):
""" Returns an outgoing Odoo connection's details.
"""
with closing(self.session()) as session:
return query.out_odoo(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_odoo_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing Odoo connections.
"""
with closing(self.session()) as session:
return query.out_odoo_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_sap(self, cluster_id, out_id):
""" Returns an outgoing SAP RFC connection's details.
"""
with closing(self.session()) as session:
return query.out_sap(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_sap_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing SAP RFC connections.
"""
with closing(self.session()) as session:
return query.out_sap_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_sftp_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing SFTP connections.
"""
with closing(self.session()) as session:
return query_generic.connection_list(session, cluster_id, GENERIC.CONNECTION.TYPE.OUTCONN_SFTP, needs_columns)
# ################################################################################################################################
def get_out_ftp(self, cluster_id, out_id):
""" Returns an outgoing FTP connection's details.
"""
with closing(self.session()) as session:
return query.out_ftp(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_ftp_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing FTP connections.
"""
with closing(self.session()) as session:
return query.out_ftp_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_cache_builtin(self, cluster_id, id):
""" Returns a built-in cache definition's details.
"""
with closing(self.session()) as session:
return query.cache_builtin(session, cluster_id, id)
# ################################################################################################################################
def get_cache_builtin_list(self, cluster_id, needs_columns=False):
""" Returns a list of built-in cache definitions.
"""
with closing(self.session()) as session:
return query.cache_builtin_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_cache_memcached(self, cluster_id, id):
""" Returns a Memcached-based definition's details.
"""
with closing(self.session()) as session:
return query.cache_memcached(session, cluster_id, id)
# ################################################################################################################################
def get_cache_memcached_list(self, cluster_id, needs_columns=False):
""" Returns a list of Memcached-based cache definitions.
"""
with closing(self.session()) as session:
return query.cache_memcached_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_namespace_list(self, cluster_id, needs_columns=False):
""" Returns a list of XML namespaces.
"""
with closing(self.session()) as session:
return query.namespace_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_xpath_list(self, cluster_id, needs_columns=False):
""" Returns a list of XPath expressions.
"""
with closing(self.session()) as session:
return query.xpath_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_json_pointer_list(self, cluster_id, needs_columns=False):
""" Returns a list of JSON Pointer expressions.
"""
with closing(self.session()) as session:
return query.json_pointer_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_cloud_aws_s3_list(self, cluster_id, needs_columns=False):
""" Returns a list of AWS S3 connections.
"""
with closing(self.session()) as session:
return query.cloud_aws_s3_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_pubsub_topic_list(self, cluster_id, needs_columns=False):
""" Returns a list of pub/sub topics defined in a cluster.
"""
return elems_with_opaque(query.pubsub_topic_list(self._session, cluster_id, needs_columns))
# ################################################################################################################################
def get_pubsub_subscription_list(self, cluster_id, needs_columns=False):
""" Returns a list of pub/sub subscriptions defined in a cluster.
"""
return query_ps_subscription.pubsub_subscription_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_notif_sql_list(self, cluster_id, needs_columns=False):
""" Returns a list of SQL notification definitions.
"""
return query.notif_sql_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_cassandra_conn_list(self, cluster_id, needs_columns=False):
""" Returns a list of Cassandra connections.
"""
return query.cassandra_conn_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_cassandra_query_list(self, cluster_id, needs_columns=False):
""" Returns a list of Cassandra queries.
"""
return query.cassandra_query_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_search_es_list(self, cluster_id, needs_columns=False):
""" Returns a list of ElasticSearch connections.
"""
return query.search_es_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_search_solr_list(self, cluster_id, needs_columns=False):
""" Returns a list of Solr connections.
"""
return query.search_solr_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_sms_twilio_list(self, cluster_id, needs_columns=False):
""" Returns a list of Twilio connections.
"""
return query.sms_twilio_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_email_smtp_list(self, cluster_id, needs_columns=False):
""" Returns a list of SMTP connections.
"""
return query.email_smtp_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_email_imap_list(self, cluster_id, needs_columns=False):
""" Returns a list of IMAP connections.
"""
return query.email_imap_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_rbac_permission_list(self, cluster_id, needs_columns=False):
""" Returns a list of RBAC permissions.
"""
return query.rbac_permission_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_rbac_role_list(self, cluster_id, needs_columns=False):
""" Returns a list of RBAC roles.
"""
return query.rbac_role_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_rbac_client_role_list(self, cluster_id, needs_columns=False):
""" Returns a list of RBAC roles assigned to clients.
"""
return query.rbac_client_role_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_rbac_role_permission_list(self, cluster_id, needs_columns=False):
""" Returns a list of RBAC permissions for roles.
"""
return query.rbac_role_permission_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_pubsub_endpoint_list(self, cluster_id, needs_columns=False):
""" Returns a list of pub/sub endpoints.
"""
out = query.pubsub_endpoint_list(self._session, cluster_id, needs_columns)
return out
# ################################################################################################################################
def get_generic_connection_list(self, cluster_id, needs_columns=False):
""" Returns a list of generic connections.
"""
return query_generic.connection_list(self._session, cluster_id, needs_columns=needs_columns)
# ################################################################################################################################
def get_sso_user_rate_limiting_info(self):
""" Returns a list of SSO users that have rate limiting enabled.
"""
with closing(self.session()) as session:
return get_sso_user_rate_limiting_info(session)
# ################################################################################################################################
def _migrate_30_encrypt_sec_base(self, session, id, attr_name, encrypted_value):
""" Sets an encrypted value of a named attribute in a security definition.
"""
item = session.query(SecurityBase).\
filter(SecurityBase.id==id).\
one()
setattr(item, attr_name, encrypted_value)
session.add(item)
_migrate_30_encrypt_sec_apikey = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_aws = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_basic_auth = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_jwt = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_ntlm = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_oauth = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_vault_conn_sec = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_wss = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_xpath_sec = _migrate_30_encrypt_sec_base
# ################################################################################################################################
| 66,853
|
Python
|
.py
| 1,123
| 50.103295
| 130
| 0.455366
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,643
|
__init__.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# ################################################################################################################################
def ping_database(params, ping_query):
connection = None
try:
#
# MySQL
#
if params['engine'].startswith('mysql'):
import pymysql
connection = pymysql.connect(
host = params['host'],
port = int(params['port']),
user = params['username'],
password = params['password'],
db = params['db_name'],
)
#
# PostgreSQL
#
elif params['engine'].startswith('postgres'):
import pg8000
connection = pg8000.connect(
host = params['host'],
port = int(params['port']),
user = params['username'],
password = params['password'],
database = params['db_name'],
)
#
# SQLite
#
elif params['engine'].startswith('sqlite'):
pass
#
# Unrecognised
#
else:
raise ValueError('Unrecognised database `{}`'.format(params['engine']))
finally:
if connection:
connection.close()
# ################################################################################################################################
def create_pool(engine_params, ping_query, query_class=None):
# stdlib
import copy
# SQLAlchemy
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
# Zato
from zato.common.util.api import get_engine_url
engine_params = copy.deepcopy(engine_params)
# Databases other than SQLite ..
if engine_params['engine'] != 'sqlite':
engine_params['password'] = str(engine_params['password'])
engine_params['extra']['pool_size'] = engine_params.pop('pool_size')
conect_args = None
# .. we are using SQLite ..
else:
conect_args = {
'check_same_thread': False
}
engine = create_engine(get_engine_url(engine_params), conect_args=conect_args, **engine_params.get('extra', {}))
engine.execute(ping_query)
Session = sessionmaker()
Session.configure(bind=engine, query_cls=query_class)
session = Session()
return session
# ################################################################################################################################
# Taken from http://www.siafoo.net/snippet/85
# Licensed under BSD2 - http://opensource.org/licenses/bsd-license.php
def drop_all(engine):
""" Drops all tables and sequences (but not VIEWS) from a Postgres database
"""
# stdlib
import logging
from traceback import format_exc
# SQLAlchemy
from sqlalchemy.sql import text
logger = logging.getLogger('zato')
sequence_sql="""SELECT sequence_name FROM information_schema.sequences
WHERE sequence_schema='public'
"""
table_sql="""SELECT table_name FROM information_schema.tables
WHERE table_schema='public' AND table_type != 'VIEW' AND table_name NOT LIKE 'pg_ts_%%'
"""
for table in [name for (name,) in engine.execute(text(table_sql))]:
try:
engine.execute(text('DROP TABLE %s CASCADE' % table))
except Exception:
logger.warning(format_exc())
for seq in [name for (name,) in engine.execute(text(sequence_sql))]:
try:
engine.execute(text('DROP SEQUENCE %s CASCADE' % seq))
except Exception:
logger.warning(format_exc())
# ################################################################################################################################
| 4,093
|
Python
|
.py
| 102
| 31.598039
| 130
| 0.512251
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,644
|
testing.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/testing.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import logging
# Zato
from zato.common.api import UNITTEST
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
class UnittestCursor:
def __init__(self, result=None):
self.result = result
def close(self, *args, **kwargs):
pass
def fetchall(self, *args, **kwargs):
return self.result or []
def _getter(self, *args, **kwargs):
pass
# ################################################################################################################################
class UnittestSession:
def __init__(self, engine):
# type: (UnittestEngine)
self.engine = engine
def execute(self, query, *args, **kwargs):
return UnittestCursor()
def begin(self, *args, **kwargs):
pass
def close(self, *args, **kwargs):
pass
# ################################################################################################################################
class UnittestEngine:
""" An SQL engine used only in unittests, one that does not actually connect to any database.
"""
name = UNITTEST.SQL_ENGINE
def __init__(self, engine_url, config):
# type: (str, dict)
self.engine_url = engine_url
self.config = config
def connect(self):
return UnittestSession(self)
_contextual_connect = connect
# ################################################################################################################################
| 1,975
|
Python
|
.py
| 46
| 37.913043
| 130
| 0.431197
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,645
|
post_process.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/post_process.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import PubSubEndpoint
# ################################################################################################################################
if 0:
from zato.common.odb.model import Cluster
Cluster = Cluster
# ################################################################################################################################
# ################################################################################################################################
class ODBPostProcess:
""" SQL post-processing functionality, e.g. creation of objects only after aserver has started.
"""
def __init__(self, session, cluster, cluster_id):
# type: (object, Cluster, int)
if not (cluster or cluster_id):
raise ValueError('At least one of cluster or cluster_id is required in place of `{}` `{}`'.format(
cluster, cluster_id))
self.session = session
self.cluster = cluster
self.cluster_id = cluster_id
# ################################################################################################################################
def run(self):
self.add_pubsub_service_endpoint()
self.session.commit()
# ################################################################################################################################
def add_pubsub_service_endpoint(self, _name=PUBSUB.SERVICE_SUBSCRIBER.NAME):
existing = self.session.query(PubSubEndpoint.id).\
filter(PubSubEndpoint.name==_name).\
first()
if not existing:
endpoint = PubSubEndpoint()
endpoint.name = _name
endpoint.is_internal = True
endpoint.role = PUBSUB.ROLE.SUBSCRIBER.id
endpoint.topic_patterns = PUBSUB.SERVICE_SUBSCRIBER.TOPICS_ALLOWED
endpoint.endpoint_type = PUBSUB.ENDPOINT_TYPE.SERVICE.id
if self.cluster:
endpoint.cluster = self.cluster
else:
endpoint.cluster_id = self.cluster_id
self.session.add(endpoint)
# ################################################################################################################################
# ################################################################################################################################
| 2,679
|
Python
|
.py
| 49
| 47.306122
| 130
| 0.423372
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,646
|
rate_limiting.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/query/rate_limiting.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Zato
from zato.common.odb.model import RateLimitState
# ################################################################################################################################
def current_state(session, cluster_id, object_type, object_id, period, network):
""" Rate limiting state for input network in the given period.
"""
return session.query(RateLimitState).\
filter(RateLimitState.cluster_id==cluster_id).\
filter(RateLimitState.object_type==object_type).\
filter(RateLimitState.object_id==object_id).\
filter(RateLimitState.period==period).\
filter(RateLimitState.last_network==network)
# ################################################################################################################################
def current_period_list(session, cluster_id):
""" Returns all periods stored in ODB, no matter their object type, ID or similar.
"""
return session.query(RateLimitState.period)
# ################################################################################################################################
| 1,350
|
Python
|
.py
| 24
| 52.208333
| 130
| 0.512528
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,647
|
generic.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/query/generic.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from datetime import datetime
from logging import getLogger
# SQLAlchemy
from sqlalchemy import and_, delete, exists, insert, update
# Zato
from zato.common.api import GENERIC, FILE_TRANSFER, NotGiven
from zato.common.odb.model import GenericConn as ModelGenericConn, GenericObject as ModelGenericObject
from zato.common.odb.query import query_wrapper
from zato.common.typing_ import cast_
from zato.common.util.sql import get_dict_with_opaque
# ################################################################################################################################
# ################################################################################################################################
if 0:
from sqlalchemy.orm import Session as SASession
from zato.common.typing_ import any_, dictlist, intnone, strdict, strintnone, strnone
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
_generic_attr_name = GENERIC.ATTR_NAME
ModelGenericObjectTable:'any_' = ModelGenericObject.__table__
# ################################################################################################################################
# ################################################################################################################################
class GenericObjectWrapper:
""" Wraps access to generic objects.
"""
type_:'strnone' = None
subtype:'strnone' = None
model_class:'type_[ModelGenericObject]' = ModelGenericObject
def __init__(self, session:'SASession', cluster_id:'int') -> 'None':
self.session = session
self.cluster_id = cluster_id
# ################################################################################################################################
def build_list_item_from_sql_row(self, row:'strdict') -> 'strdict':
return row
# ################################################################################################################################
def _build_get_where_query(self, name:'str') -> 'any_':
return and_(
self.model_class.name==name,
self.model_class.type_==self.type_,
self.model_class.cluster_id==self.cluster_id,
)
# ################################################################################################################################
def get(self, name:'str', type_:'strnone'=None) -> 'any_':
# Local variables
type_ = type_ or self.type_
item = self.session.query(self.model_class).\
filter(self.model_class.name==name).\
filter(self.model_class.type_==type_).\
filter(self.model_class.cluster_id==self.cluster_id).\
first()
return cast_('any_', get_dict_with_opaque(item) if item else None)
# ################################################################################################################################
def get_list(self, type_:'strnone'=None, subtype:'strnone'=None, *, parent_object_id:'intnone'=None) -> 'dictlist':
# Local variables
type_ = type_ or self.type_
subtype = subtype or self.subtype
# Our response to produce
out:'dictlist' = []
items = self.session.query(self.model_class).\
filter(self.model_class.type_==type_).\
filter(self.model_class.cluster_id==self.cluster_id)
if subtype:
items = items.filter(self.model_class.subtype==subtype)
if parent_object_id:
items = items.filter(self.model_class.parent_object_id==parent_object_id)
items = items.order_by(self.model_class.name)
items = items.all()
for item in items:
item:'strdict' = get_dict_with_opaque(item)
item = self.build_list_item_from_sql_row(item)
out.append(item)
return out
# ################################################################################################################################
def exists(self, name:'str') -> 'bool':
""" Returns a boolean flag indicating whether the input name is already stored in the ODB. False otherwise.
"""
where_query = self._build_get_where_query(name)
exists_query = exists().where(where_query)
return cast_('bool', self.session.query(exists_query).\
scalar())
# ################################################################################################################################
def create_many(
self,
item_list:'dictlist',
type_:'strnone'=None,
subtype:'strnone'=None,
*,
parent_object_id:'intnone'=None,
) -> 'any_':
# Local variables
type_ = type_ or self.type_
subtype = subtype or self.subtype
now = datetime.utcnow()
# Preprocess each item we have on input
for item in item_list:
item['type_'] = type_
item['subtype'] = subtype
item['parent_object_id'] = parent_object_id
item['cluster_id'] = self.cluster_id
item['creation_time'] = now
item['last_modified'] = now
result = insert(self.model_class).values(item_list)
return result
# ################################################################################################################################
def create(
self,
name:'str',
opaque:'str',
type_:'strnone'=None,
subtype:'strnone'=None,
*,
parent_object_id:'intnone'=None
) -> 'any_':
""" Creates a row based on the input data.
"""
item = {
'name': name,
_generic_attr_name: opaque,
}
result = self.create_many([item], type_, subtype, parent_object_id=parent_object_id)
return result
# ################################################################################################################################
def update(self, name:'str', opaque:'any_'=NotGiven, type_:'strnone'=None, *, id:'int'=False) -> 'any_':
""" Updates an already existing object.
"""
# Local variables
type_ = type_ or self.type_
# Name will be always updated ..
values = {
'name': name
}
# .. whereas opaque attributes are optional ..
if opaque is not NotGiven:
values[_generic_attr_name] = opaque
# .. build a basic filter for the query ..
and_filter:'any_' = (
ModelGenericObjectTable.c.type_==type_,
ModelGenericObjectTable.c.cluster_id==self.cluster_id,
)
# .. if we have an ID on input, we update by its value ..
# .. which will let us do a rename ..
if id:
and_filter = and_filter + (ModelGenericObjectTable.c.id==id,)
# .. otherwise, match by name ..
else:
and_filter = and_filter + (ModelGenericObjectTable.c.name==name,)
# .. turn the tuple of parameters into an actual filter ..
and_filter = and_(*and_filter)
# .. build a query that will update the object ..
query = update(ModelGenericObjectTable).\
values(values).\
where(and_filter)
# .. and return it to our caller.
return query
# ################################################################################################################################
def delete_by_id(self, id:'int') -> 'any_':
""" Deletes an existing object by its ID.
"""
query = delete(ModelGenericObjectTable).where(ModelGenericObjectTable.c.id==id)
return query
# ################################################################################################################################
def delete_by_parent_object_id(self, parent_object_id:'int') -> 'any_':
""" Deletes objects by their parent object's ID.
"""
query = delete(ModelGenericObjectTable).where(ModelGenericObjectTable.c.parent_object_id==parent_object_id)
return query
# ################################################################################################################################
def delete_by_name(self, name:'str', *, parent_object_id:'strintnone'=None) -> 'any_':
""" Deletes an existing object by its and and, potentially, other attributes.
"""
# This is always used ..
criteria = [ModelGenericObjectTable.c.name==name]
# .. this is optional ..
if parent_object_id:
if parent_object_id:
criteria.append(ModelGenericObjectTable.c.parent_object_id==parent_object_id)
# .. regardless of what we had, build an 'and' operator of it ..
criteria = and_(*criteria)
# .. build a query object now ..
query = delete(ModelGenericObjectTable).where(criteria)
# .. and return it to our caller.
return query
# ################################################################################################################################
def store(self, name:'str', opaque:'str') -> 'None':
""" Inserts new data or updates an already existing row matching the input.
"""
already_exists = self.exists(name)
query = self.update(name, opaque) if already_exists else self.create(name, opaque)
self.session.execute(query)
self.session.commit()
# ################################################################################################################################
# ################################################################################################################################
class FileTransferWrapper(GenericObjectWrapper):
type_ = GENERIC.CONNECTION.TYPE.CHANNEL_FILE_TRANSFER
class FTPFileTransferWrapper(FileTransferWrapper):
subtype = FILE_TRANSFER.SOURCE_TYPE.FTP.id
class SFTPFileTransferWrapper(FileTransferWrapper):
subtype = FILE_TRANSFER.SOURCE_TYPE.SFTP.id
# ################################################################################################################################
# ################################################################################################################################
class GroupsWrapper(GenericObjectWrapper):
def build_list_item_from_sql_row(self, row: 'strdict') -> 'strdict':
out:'strdict' = {}
out['name'] = row['name']
out['type'] = row['subtype']
out['id'] = row['id']
out['group_id'] = row['parent_object_id']
return out
# ################################################################################################################################
# ################################################################################################################################
@query_wrapper
def connection_list(session:'SASession', cluster_id:'int', type_:'strnone'=None, needs_columns:'bool'=False) -> 'any_':
""" A list of generic connections by their type.
"""
q = session.query(ModelGenericConn).\
filter(ModelGenericConn.cluster_id==cluster_id)
if type_:
q = q.filter(ModelGenericConn.type_==type_)
q = q.order_by(ModelGenericConn.name)
return q
# ################################################################################################################################
# ################################################################################################################################
| 12,244
|
Python
|
.py
| 230
| 45.73913
| 130
| 0.432847
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,648
|
__init__.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/query/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
from functools import wraps
# Bunch
from bunch import bunchify
# SQLAlchemy
from sqlalchemy import and_, func, not_, or_
from sqlalchemy.orm import aliased
from sqlalchemy.sql.expression import case
# Zato
from zato.common.api import CACHE, DEFAULT_HTTP_PING_METHOD, DEFAULT_HTTP_POOL_SIZE, GENERIC, HTTP_SOAP_SERIALIZATION_TYPE, \
PARAMS_PRIORITY, PUBSUB, URL_PARAMS_PRIORITY
from zato.common.json_internal import loads
from zato.common.odb.model import AWSS3, APIKeySecurity, AWSSecurity, Cache, CacheBuiltin, CacheMemcached, CassandraConn, \
CassandraQuery, ChannelAMQP, ChannelWebSocket, ChannelWMQ, ChannelZMQ, Cluster, ConnDefAMQP, ConnDefWMQ, \
CronStyleJob, DeployedService, ElasticSearch, HTTPBasicAuth, HTTPSOAP, IMAP, IntervalBasedJob, Job, JSONPointer, JWT, \
MsgNamespace, NotificationSQL as NotifSQL, NTLM, OAuth, OutgoingOdoo, \
OutgoingAMQP, OutgoingFTP, OutgoingWMQ, OutgoingZMQ, PubSubEndpoint, \
PubSubEndpointTopic, PubSubEndpointEnqueuedMessage, PubSubMessage, PubSubSubscription, PubSubTopic, RBACClientRole, \
RBACPermission, RBACRole, RBACRolePermission, SecurityBase, Server, Service, SMSTwilio, SMTP, Solr, SQLConnectionPool, \
TLSCACert, TLSChannelSecurity, TLSKeyCertSecurity, WebSocketClient, WebSocketClientPubSubKeys, WebSocketSubscription, \
WSSDefinition, VaultConnection, XPath, XPathSecurity, OutgoingSAP
from zato.common.util.search import SearchResults as _SearchResults
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import anylist
# ################################################################################################################################
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
_not_given = object()
_no_page_limit = 2 ** 24 # ~16.7 million results, tops
_gen_attr = GENERIC.ATTR_NAME
# ################################################################################################################################
def count(session, q):
_q = q.statement.with_only_columns([func.count()]).order_by(None)
return session.execute(_q).scalar()
# ################################################################################################################################
class _QueryConfig:
@staticmethod
def supports_kwargs(query_func):
""" Returns True if the given query func supports kwargs, False otherwise.
"""
return query_func in (
http_soap_list,
)
# ################################################################################################################################
class _SearchWrapper:
""" Wraps results in pagination and/or filters out objects by their name or other attributes.
"""
def __init__(self, q, default_page_size=_no_page_limit, **config):
# Apply WHERE conditions
where = config.get('where') or _not_given
if where is not _not_given:
q = q.filter(where)
else:
filters = []
if query := config.get('query', []):
query = query if isinstance(query, (list, tuple)) else [query]
if filter_by := config.get('filter_by', []):
filter_by = filter_by if isinstance(filter_by, (list, tuple)) else [filter_by]
len_filter_by = len(filter_by)
for column in filter_by:
for criterion in query:
expression = column.contains(criterion)
if criterion.startswith('-'):
expression = not_(expression)
and_filter = and_(*[expression]) # type: ignore
filters.append(and_filter)
# We need to use "or" if we filter by more then one column
# to let the filters match all of them independently.
if len_filter_by > 1:
combine_criteria_using = or_
else:
combine_criteria_using = and_
q = q.filter(combine_criteria_using(*filters))
# Total number of results
total_q = q.statement.with_only_columns([func.count()]).order_by(None)
self.total = q.session.execute(total_q).scalar()
# Pagination
page_size = config.get('page_size', default_page_size)
cur_page = config.get('cur_page', 0)
slice_from = cur_page * page_size
slice_to = slice_from + page_size
self.q = q.slice(slice_from, slice_to)
# ################################################################################################################################
def query_wrapper(func):
""" A decorator for queries which works out whether a given query function should return the result only
or a column list retrieved in addition to the result. This is useful because some callers prefer the former
and some need the latter. Also, paginates the results if requested to by the caller.
"""
@wraps(func)
def inner(*args, **kwargs):
# Each query function will have the last argument either False or True
# depending on whether columns are needed or not.
needs_columns = args[-1]
if _QueryConfig.supports_kwargs(func):
result = func(*args, **kwargs)
else:
result = func(*args)
tool = _SearchWrapper(result, **kwargs)
result = _SearchResults(tool.q, tool.q.all(), tool.q.statement.columns, tool.total)
if needs_columns:
return result, result.columns
return result
return inner
# ################################################################################################################################
def bunch_maker(func):
""" Turns SQLAlchemy rows into bunch instances, taking opaque elements into account.
"""
@wraps(func)
def inner(*args, **kwargs):
result = func(*args, **kwargs)
out = bunchify(result._asdict())
opaque = out.pop(_gen_attr, None)
if opaque:
opaque = loads(opaque)
out.update(opaque)
return out
return inner
# ################################################################################################################################
def internal_channel_list(session, cluster_id):
""" All the HTTP/SOAP channels that point to internal services.
"""
return session.query(
HTTPSOAP.soap_action, Service.name).\
filter(HTTPSOAP.cluster_id==Cluster.id).\
filter(HTTPSOAP.service_id==Service.id).\
filter(Service.is_internal==True).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==HTTPSOAP.cluster_id) # noqa: E712
# ################################################################################################################################
def _job(session, cluster_id):
return session.query(
Job.id,
Job.name,
Job.is_active,
Job.job_type,
Job.start_date,
Job.extra,
Service.name.label('service_name'),
Service.impl_name.label('service_impl_name'),
Service.id.label('service_id'),
IntervalBasedJob.weeks,
IntervalBasedJob.days,
IntervalBasedJob.hours,
IntervalBasedJob.minutes,
IntervalBasedJob.seconds,
IntervalBasedJob.repeats,
CronStyleJob.cron_definition
).\
outerjoin(IntervalBasedJob, Job.id==IntervalBasedJob.job_id).\
outerjoin(CronStyleJob, Job.id==CronStyleJob.job_id).\
filter(Job.cluster_id==Cluster.id).\
filter(Job.service_id==Service.id).\
filter(Cluster.id==cluster_id)
@query_wrapper
def job_list(session, cluster_id, service_name=None, needs_columns=False):
""" All the scheduler's jobs defined in the ODB.
"""
q = _job(session, cluster_id)
if service_name:
q = q.filter(Service.name==service_name)
return q.\
order_by(Job.name)
def job_by_id(session, cluster_id, job_id):
""" A scheduler's job fetched by its ID.
"""
return _job(session, cluster_id).\
filter(Job.id==job_id).\
one()
def job_by_name(session, cluster_id, name):
""" A scheduler's job fetched by its name.
"""
return _job(session, cluster_id).\
filter(Job.name==name).\
one()
# ################################################################################################################################
def _sec_base(session, cluster_id):
return session.query(
SecurityBase.id,
SecurityBase.is_active,
SecurityBase.sec_type,
SecurityBase.name,
SecurityBase.username).\
filter(SecurityBase.cluster_id==Cluster.id).\
filter(Cluster.id==cluster_id)
def sec_base(session, cluster_id, sec_base_id, use_one=True):
q = _sec_base(session, cluster_id).\
filter(SecurityBase.id==sec_base_id)
if use_one:
result = q.one()
else:
result = q.first()
return result
@query_wrapper
def apikey_security_list(session, cluster_id, needs_columns=False):
""" All the API keys.
"""
return session.query(
APIKeySecurity.id,
APIKeySecurity.name,
APIKeySecurity.is_active,
APIKeySecurity.username,
APIKeySecurity.password,
APIKeySecurity.sec_type,
APIKeySecurity.opaque1,
).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==APIKeySecurity.cluster_id).\
filter(SecurityBase.id==APIKeySecurity.id).\
order_by(SecurityBase.name)
@query_wrapper
def aws_security_list(session, cluster_id, needs_columns=False):
""" All the Amazon security definitions.
"""
return session.query(
AWSSecurity.id, AWSSecurity.name,
AWSSecurity.is_active,
AWSSecurity.username,
AWSSecurity.password, AWSSecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==AWSSecurity.cluster_id).\
filter(SecurityBase.id==AWSSecurity.id).\
order_by(SecurityBase.name)
@query_wrapper
def basic_auth_list(session, cluster_id, cluster_name, needs_columns=False):
""" All the HTTP Basic Auth definitions.
"""
q = session.query(
HTTPBasicAuth.id,
HTTPBasicAuth.name,
HTTPBasicAuth.is_active,
HTTPBasicAuth.username,
HTTPBasicAuth.realm,
HTTPBasicAuth.password,
HTTPBasicAuth.sec_type,
HTTPBasicAuth.password_type,
HTTPBasicAuth.opaque1,
Cluster.id.label('cluster_id'), Cluster.name.label('cluster_name')).\
filter(Cluster.id==HTTPBasicAuth.cluster_id)
if cluster_id:
q = q.filter(Cluster.id==cluster_id)
else:
q = q.filter(Cluster.name==cluster_name)
q = q.filter(SecurityBase.id==HTTPBasicAuth.id).\
order_by(SecurityBase.name)
return q
def _jwt(session, cluster_id, cluster_name, needs_columns=False):
""" All the JWT definitions.
"""
q = session.query(
JWT.id,
JWT.name,
JWT.is_active,
JWT.username,
JWT.password,
JWT.ttl,
JWT.sec_type,
JWT.password_type,
JWT.opaque1,
Cluster.id.label('cluster_id'),
Cluster.name.label('cluster_name')).\
filter(Cluster.id==JWT.cluster_id)
if cluster_id:
q = q.filter(Cluster.id==cluster_id)
else:
q = q.filter(Cluster.name==cluster_name)
q = q.filter(SecurityBase.id==JWT.id).\
order_by(SecurityBase.name)
return q
@query_wrapper
def jwt_list(*args, **kwargs):
return _jwt(*args, **kwargs)
def jwt_by_username(session, cluster_id, username, needs_columns=False):
""" An individual JWT definition by its username.
"""
return _jwt(session, cluster_id, None, needs_columns).\
filter(JWT.username==username).\
one()
@query_wrapper
def ntlm_list(session, cluster_id, needs_columns=False):
""" All the NTLM definitions.
"""
return session.query(
NTLM.id, NTLM.name,
NTLM.is_active,
NTLM.username,
NTLM.password, NTLM.sec_type,
NTLM.password_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==NTLM.cluster_id).\
filter(SecurityBase.id==NTLM.id).\
order_by(SecurityBase.name)
@query_wrapper
def oauth_list(session, cluster_id, needs_columns=False):
""" All the OAuth definitions.
"""
out = session.query(
OAuth.id,
OAuth.name,
OAuth.is_active,
OAuth.username,
OAuth.password,
OAuth.proto_version,
OAuth.sig_method,
OAuth.max_nonce_log,
OAuth.sec_type,
OAuth.opaque1,
).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==OAuth.cluster_id).\
filter(SecurityBase.id==OAuth.id).\
order_by(SecurityBase.name)
return out
@query_wrapper
def tls_ca_cert_list(session, cluster_id, needs_columns=False):
""" TLS CA certs.
"""
return session.query(TLSCACert).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==TLSCACert.cluster_id).\
order_by(TLSCACert.name)
@query_wrapper
def tls_channel_sec_list(session, cluster_id, needs_columns=False):
""" TLS-based channel security.
"""
return session.query(
TLSChannelSecurity.id, TLSChannelSecurity.name,
TLSChannelSecurity.is_active, TLSChannelSecurity.value,
TLSChannelSecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==TLSChannelSecurity.cluster_id).\
filter(SecurityBase.id==TLSChannelSecurity.id).\
order_by(SecurityBase.name)
@query_wrapper
def tls_key_cert_list(session, cluster_id, needs_columns=False):
""" TLS key/cert pairs.
"""
return session.query(
TLSKeyCertSecurity.id, TLSKeyCertSecurity.name,
TLSKeyCertSecurity.is_active, TLSKeyCertSecurity.info,
TLSKeyCertSecurity.auth_data, TLSKeyCertSecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==TLSKeyCertSecurity.cluster_id).\
filter(SecurityBase.id==TLSKeyCertSecurity.id).\
order_by(SecurityBase.name)
@query_wrapper
def wss_list(session, cluster_id, needs_columns=False):
""" All the WS-Security definitions.
"""
return session.query(
WSSDefinition.id, WSSDefinition.name, WSSDefinition.is_active,
WSSDefinition.username, WSSDefinition.password, WSSDefinition.password_type,
WSSDefinition.reject_empty_nonce_creat, WSSDefinition.reject_stale_tokens,
WSSDefinition.reject_expiry_limit, WSSDefinition.nonce_freshness_time,
WSSDefinition.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==WSSDefinition.cluster_id).\
filter(SecurityBase.id==WSSDefinition.id).\
order_by(SecurityBase.name)
@query_wrapper
def xpath_sec_list(session, cluster_id, needs_columns=False):
""" All the XPath security definitions.
"""
return session.query(
XPathSecurity.id, XPathSecurity.name, XPathSecurity.is_active, XPathSecurity.username, XPathSecurity.username_expr,
XPathSecurity.password_expr, XPathSecurity.password, XPathSecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==XPathSecurity.cluster_id).\
filter(SecurityBase.id==XPathSecurity.id).\
order_by(SecurityBase.name)
# ################################################################################################################################
def _definition_amqp(session, cluster_id):
return session.query(
ConnDefAMQP.name, ConnDefAMQP.id, ConnDefAMQP.host,
ConnDefAMQP.port, ConnDefAMQP.vhost, ConnDefAMQP.username,
ConnDefAMQP.frame_max, ConnDefAMQP.heartbeat, ConnDefAMQP.password).\
filter(Cluster.id==ConnDefAMQP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ConnDefAMQP.name)
def definition_amqp(session, cluster_id, id):
""" A particular AMQP definition
"""
return _definition_amqp(session, cluster_id).\
filter(ConnDefAMQP.id==id).\
one()
@query_wrapper
def definition_amqp_list(session, cluster_id, needs_columns=False):
""" AMQP connection definitions.
"""
return _definition_amqp(session, cluster_id)
# ################################################################################################################################
def _def_wmq(session, cluster_id):
return session.query(
ConnDefWMQ.id, ConnDefWMQ.name, ConnDefWMQ.host,
ConnDefWMQ.port, ConnDefWMQ.queue_manager, ConnDefWMQ.channel,
ConnDefWMQ.cache_open_send_queues, ConnDefWMQ.cache_open_receive_queues,
ConnDefWMQ.use_shared_connections, ConnDefWMQ.ssl, ConnDefWMQ.ssl_cipher_spec,
ConnDefWMQ.ssl_key_repository, ConnDefWMQ.needs_mcd, ConnDefWMQ.max_chars_printed,
ConnDefWMQ.username, ConnDefWMQ.password, ConnDefWMQ.use_jms).\
filter(Cluster.id==ConnDefWMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ConnDefWMQ.name)
def definition_wmq(session, cluster_id, id):
""" A particular IBM MQ definition
"""
return _def_wmq(session, cluster_id).\
filter(ConnDefWMQ.id==id).\
one()
@query_wrapper
def definition_wmq_list(session, cluster_id, needs_columns=False):
""" IBM MQ connection definitions.
"""
return _def_wmq(session, cluster_id)
# ################################################################################################################################
def _out_amqp(session, cluster_id):
return session.query(
OutgoingAMQP.id, OutgoingAMQP.name, OutgoingAMQP.is_active,
OutgoingAMQP.delivery_mode, OutgoingAMQP.priority, OutgoingAMQP.content_type,
OutgoingAMQP.content_encoding, OutgoingAMQP.expiration, OutgoingAMQP.pool_size, OutgoingAMQP.user_id,
OutgoingAMQP.app_id, ConnDefAMQP.name.label('def_name'), OutgoingAMQP.def_id).\
filter(OutgoingAMQP.def_id==ConnDefAMQP.id).\
filter(ConnDefAMQP.id==OutgoingAMQP.def_id).\
filter(Cluster.id==ConnDefAMQP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(OutgoingAMQP.name)
def out_amqp(session, cluster_id, id):
""" An outgoing AMQP connection.
"""
return _out_amqp(session, cluster_id).\
filter(OutgoingAMQP.id==id).\
one()
@query_wrapper
def out_amqp_list(session, cluster_id, needs_columns=False):
""" Outgoing AMQP connections.
"""
return _out_amqp(session, cluster_id)
# ################################################################################################################################
def _out_wmq(session, cluster_id):
return session.query(
OutgoingWMQ.id, OutgoingWMQ.name, OutgoingWMQ.is_active,
OutgoingWMQ.delivery_mode, OutgoingWMQ.priority, OutgoingWMQ.expiration,
ConnDefWMQ.name.label('def_name'), OutgoingWMQ.def_id).\
filter(OutgoingWMQ.def_id==ConnDefWMQ.id).\
filter(ConnDefWMQ.id==OutgoingWMQ.def_id).\
filter(Cluster.id==ConnDefWMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(OutgoingWMQ.name)
def out_wmq(session, cluster_id, id):
""" An outgoing IBM MQ connection (by ID).
"""
return _out_wmq(session, cluster_id).\
filter(OutgoingWMQ.id==id).\
one()
def out_wmq_by_name(session, cluster_id, name):
""" An outgoing IBM MQ connection (by name).
"""
return _out_wmq(session, cluster_id).\
filter(OutgoingWMQ.name==name).\
first()
@query_wrapper
def out_wmq_list(session, cluster_id, needs_columns=False):
""" Outgoing IBM MQ connections.
"""
return _out_wmq(session, cluster_id)
# ################################################################################################################################
def _channel_amqp(session, cluster_id):
return session.query(
ChannelAMQP.id, ChannelAMQP.name, ChannelAMQP.is_active,
ChannelAMQP.queue, ChannelAMQP.consumer_tag_prefix,
ConnDefAMQP.name.label('def_name'), ChannelAMQP.def_id,
ChannelAMQP.pool_size, ChannelAMQP.ack_mode, ChannelAMQP.prefetch_count,
ChannelAMQP.data_format,
Service.name.label('service_name'),
Service.impl_name.label('service_impl_name')).\
filter(ChannelAMQP.def_id==ConnDefAMQP.id).\
filter(ChannelAMQP.service_id==Service.id).\
filter(Cluster.id==ConnDefAMQP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ChannelAMQP.name)
def channel_amqp(session, cluster_id, id):
""" A particular AMQP channel.
"""
return _channel_amqp(session, cluster_id).\
filter(ChannelAMQP.id==id).\
one()
@query_wrapper
def channel_amqp_list(session, cluster_id, needs_columns=False):
""" AMQP channels.
"""
return _channel_amqp(session, cluster_id)
# ################################################################################################################################
def _channel_wmq(session, cluster_id):
return session.query(
ChannelWMQ.id, ChannelWMQ.name, ChannelWMQ.is_active,
ChannelWMQ.queue, ConnDefWMQ.name.label('def_name'), ChannelWMQ.def_id,
ChannelWMQ.data_format, Service.name.label('service_name'),
Service.impl_name.label('service_impl_name')).\
filter(ChannelWMQ.def_id==ConnDefWMQ.id).\
filter(ChannelWMQ.service_id==Service.id).\
filter(Cluster.id==ConnDefWMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ChannelWMQ.name)
def channel_wmq(session, cluster_id, id):
""" A particular IBM MQ channel.
"""
return _channel_wmq(session, cluster_id).\
filter(ChannelWMQ.id==id).\
one()
@query_wrapper
def channel_wmq_list(session, cluster_id, needs_columns=False):
""" IBM MQ channels.
"""
return _channel_wmq(session, cluster_id)
# ################################################################################################################################
def _out_zmq(session, cluster_id):
return session.query(
OutgoingZMQ.id, OutgoingZMQ.name, OutgoingZMQ.is_active,
OutgoingZMQ.address, OutgoingZMQ.socket_type, OutgoingZMQ.socket_method).\
filter(Cluster.id==OutgoingZMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(OutgoingZMQ.name)
def out_zmq(session, cluster_id, id):
""" An outgoing ZeroMQ connection.
"""
return _out_zmq(session, cluster_id).\
filter(OutgoingZMQ.id==id).\
one()
@query_wrapper
def out_zmq_list(session, cluster_id, needs_columns=False):
""" Outgoing ZeroMQ connections.
"""
return _out_zmq(session, cluster_id)
# ################################################################################################################################
def _channel_zmq(session, cluster_id):
return session.query(
ChannelZMQ.id, ChannelZMQ.name, ChannelZMQ.is_active,
ChannelZMQ.address, ChannelZMQ.socket_type, ChannelZMQ.socket_method, ChannelZMQ.sub_key,
ChannelZMQ.pool_strategy, ChannelZMQ.service_source, ChannelZMQ.data_format,
Service.name.label('service_name'), Service.impl_name.label('service_impl_name')).\
filter(Service.id==ChannelZMQ.service_id).\
filter(Cluster.id==ChannelZMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ChannelZMQ.name)
def channel_zmq(session, cluster_id, id):
""" An incoming ZeroMQ connection.
"""
return _channel_zmq(session, cluster_id).\
filter(ChannelZMQ.id==id).\
one()
@query_wrapper
def channel_zmq_list(session, cluster_id, needs_columns=False):
""" Incoming ZeroMQ connections.
"""
return _channel_zmq(session, cluster_id)
# ################################################################################################################################
def _http_soap(session, cluster_id):
return session.query(
HTTPSOAP.id,
HTTPSOAP.name,
HTTPSOAP.is_active,
HTTPSOAP.is_internal,
HTTPSOAP.transport,
HTTPSOAP.host,
HTTPSOAP.url_path,
HTTPSOAP.method,
HTTPSOAP.soap_action,
HTTPSOAP.soap_version,
HTTPSOAP.data_format,
HTTPSOAP.security_id,
HTTPSOAP.has_rbac,
HTTPSOAP.connection,
HTTPSOAP.content_type,
case([(HTTPSOAP.ping_method != None, HTTPSOAP.ping_method)], else_=DEFAULT_HTTP_PING_METHOD).label('ping_method'), # noqa
case([(HTTPSOAP.pool_size != None, HTTPSOAP.pool_size)], else_=DEFAULT_HTTP_POOL_SIZE).label('pool_size'),
case([(HTTPSOAP.merge_url_params_req != None, HTTPSOAP.merge_url_params_req)], else_=True).label('merge_url_params_req'),
case([(HTTPSOAP.url_params_pri != None, HTTPSOAP.url_params_pri)], else_=URL_PARAMS_PRIORITY.DEFAULT).label('url_params_pri'),
case([(HTTPSOAP.params_pri != None, HTTPSOAP.params_pri)], else_=PARAMS_PRIORITY.DEFAULT).label('params_pri'),
case([(
HTTPSOAP.serialization_type != None, HTTPSOAP.serialization_type)],
else_=HTTP_SOAP_SERIALIZATION_TYPE.DEFAULT.id).label('serialization_type'),
HTTPSOAP.timeout,
HTTPSOAP.sec_tls_ca_cert_id,
HTTPSOAP.sec_use_rbac,
HTTPSOAP.cache_id,
HTTPSOAP.cache_expiry,
HTTPSOAP.content_encoding,
HTTPSOAP.opaque1,
Cache.name.label('cache_name'),
Cache.cache_type,
TLSCACert.name.label('sec_tls_ca_cert_name'),
SecurityBase.sec_type,
Service.name.label('service_name'),
Service.id.label('service_id'),
Service.impl_name.label('service_impl_name'),
SecurityBase.name.label('security_name'),
SecurityBase.username.label('username'),
SecurityBase.password.label('password'),
SecurityBase.password_type.label('password_type'),).\
outerjoin(Service, Service.id==HTTPSOAP.service_id).\
outerjoin(Cache, Cache.id==HTTPSOAP.cache_id).\
outerjoin(TLSCACert, TLSCACert.id==HTTPSOAP.sec_tls_ca_cert_id).\
outerjoin(SecurityBase, HTTPSOAP.security_id==SecurityBase.id).\
filter(Cluster.id==HTTPSOAP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(HTTPSOAP.name)
def http_soap_security_list(session, cluster_id, connection=None):
""" HTTP/SOAP security definitions.
"""
q = _http_soap(session, cluster_id)
if connection:
q = q.filter(HTTPSOAP.connection==connection)
return q
def http_soap(session, cluster_id, item_id=None, name=None):
""" An HTTP/SOAP connection.
"""
q = _http_soap(session, cluster_id)
if item_id:
q = q.filter(HTTPSOAP.id==item_id)
elif name:
q = q.filter(HTTPSOAP.name==name)
else:
raise Exception('Exactly one of \'id\' or \'name\' is required')
return q.one()
@query_wrapper
def http_soap_list(session, cluster_id, connection=None, transport=None, return_internal=True, data_format=None,
needs_columns=False, *args, **kwargs):
""" HTTP/SOAP connections, both channels and outgoing ones.
"""
q = _http_soap(session, cluster_id)
if connection:
q = q.filter(HTTPSOAP.connection==connection)
if transport:
q = q.filter(HTTPSOAP.transport==transport)
if not return_internal:
q = q.filter(
not_(
HTTPSOAP.name.startswith('zato') |
HTTPSOAP.name.startswith('/zato/sso/') |
HTTPSOAP.name.startswith('pub.zato.service.service-invoker')
)
)
if data_format:
q = q.filter(HTTPSOAP.data_format.startswith(data_format))
return q
# ################################################################################################################################
def _out_sql(session, cluster_id):
return session.query(SQLConnectionPool).\
filter(Cluster.id==SQLConnectionPool.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(SQLConnectionPool.name)
def out_sql(session, cluster_id, id):
""" An outgoing SQL connection.
"""
return _out_sql(session, cluster_id).\
filter(SQLConnectionPool.id==id).\
one()
@query_wrapper
def out_sql_list(session, cluster_id, needs_columns=False):
""" Outgoing SQL connections.
"""
return _out_sql(session, cluster_id)
# ################################################################################################################################
def _out_ftp(session, cluster_id):
return session.query(
OutgoingFTP.id,
OutgoingFTP.name,
OutgoingFTP.is_active,
OutgoingFTP.host,
OutgoingFTP.port,
OutgoingFTP.user,
OutgoingFTP.password,
OutgoingFTP.acct,
OutgoingFTP.timeout,
OutgoingFTP.dircache,
OutgoingFTP.opaque1,
).\
filter(Cluster.id==OutgoingFTP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(OutgoingFTP.name)
def out_ftp(session, cluster_id, id):
""" An outgoing FTP connection.
"""
return _out_ftp(session, cluster_id).\
filter(OutgoingFTP.id==id).\
one()
@query_wrapper
def out_ftp_list(session, cluster_id, needs_columns=False):
""" Outgoing FTP connections.
"""
return _out_ftp(session, cluster_id)
# ################################################################################################################################
def _service(session, cluster_id):
return session.query(
Service.id,
Service.name,
Service.is_active,
Service.impl_name,
Service.is_internal,
Service.slow_threshold,
Service.opaque1,
).\
filter(Cluster.id==Service.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(Service.name)
def service(session, cluster_id, id=None, name=None):
""" A service.
"""
q = _service(session, cluster_id)
if name:
q = q.filter(Service.name==name)
elif id:
q = q.filter(Service.id==id)
return q.one()
@query_wrapper
def service_list(session, cluster_id, return_internal=True, include_list=None, needs_columns=False):
""" All services.
"""
q = _service(session, cluster_id)
if include_list:
q = q.filter(or_(Service.name.in_(include_list)))
else:
if not return_internal:
q = q.filter(not_(Service.name.startswith('zato')))
return q
@query_wrapper
def service_list_with_include(session, cluster_id, include_list, needs_columns=False):
q = _service(session, cluster_id)
return q.filter(Service.name.in_(include_list))
def service_id_list(session, cluster_id, name_list=None):
return session.query(
Service.id,
Service.impl_name).\
filter(Cluster.id==Service.cluster_id).\
filter(Cluster.id==cluster_id).\
filter(Service.name.in_(name_list))
# ################################################################################################################################
def service_deployment_list(session, service_id=None, include_internal=None):
query = session.query(
DeployedService.details,
Server.name.label('server_name'),
Server.id.label('server_id'),
Service.id.label('service_id'),
Service.name.label('service_name'),
).\
filter(DeployedService.service_id==Service.id).\
filter(DeployedService.server_id==Server.id)
if service_id:
query = query.\
filter(DeployedService.service_id==service_id)
if not include_internal:
query = query.\
filter(Service.is_internal==False) # type: ignore
query = query.filter(
not_(
Service.name.startswith('zato') |
Service.name.startswith('pub.zato') |
Service.name.startswith('pub.helpers') |
Service.name.startswith('helpers')
)
)
return query.all()
# ################################################################################################################################
def _msg_list(class_, order_by, session, cluster_id, needs_columns=False):
""" All the namespaces.
"""
return session.query(
class_.id, class_.name,
class_.value).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==class_.cluster_id).\
order_by(order_by)
@query_wrapper
def namespace_list(session, cluster_id, needs_columns=False):
""" All the namespaces.
"""
return _msg_list(MsgNamespace, MsgNamespace.name, session, cluster_id, query_wrapper)
@query_wrapper
def xpath_list(session, cluster_id, needs_columns=False):
""" All the XPaths.
"""
return _msg_list(XPath, XPath.name, session, cluster_id, query_wrapper)
@query_wrapper
def json_pointer_list(session, cluster_id, needs_columns=False):
""" All the JSON Pointers.
"""
return _msg_list(JSONPointer, JSONPointer.name, session, cluster_id, query_wrapper)
# ################################################################################################################################
def _cloud_aws_s3(session, cluster_id):
return session.query(
AWSS3.id, AWSS3.name, AWSS3.is_active, AWSS3.pool_size, AWSS3.address, AWSS3.debug_level, AWSS3.suppr_cons_slashes,
AWSS3.content_type, AWSS3.metadata_, AWSS3.security_id, AWSS3.bucket, AWSS3.encrypt_at_rest, AWSS3.storage_class,
SecurityBase.username, SecurityBase.password).\
filter(Cluster.id==cluster_id).\
filter(AWSS3.security_id==SecurityBase.id).\
order_by(AWSS3.name)
def cloud_aws_s3(session, cluster_id, id):
""" An AWS S3 connection.
"""
return _cloud_aws_s3(session, cluster_id).\
filter(AWSS3.id==id).\
one()
@query_wrapper
def cloud_aws_s3_list(session, cluster_id, needs_columns=False):
""" AWS S3 connections.
"""
return _cloud_aws_s3(session, cluster_id)
# ################################################################################################################################
def _pubsub_endpoint(session, cluster_id):
return session.query(
PubSubEndpoint.id,
PubSubEndpoint.name,
PubSubEndpoint.endpoint_type,
PubSubEndpoint.is_active,
PubSubEndpoint.is_internal,
PubSubEndpoint.role,
PubSubEndpoint.tags,
PubSubEndpoint.topic_patterns,
PubSubEndpoint.pub_tag_patterns,
PubSubEndpoint.message_tag_patterns,
PubSubEndpoint.security_id,
PubSubEndpoint.service_id,
PubSubEndpoint.ws_channel_id,
SecurityBase.sec_type,
SecurityBase.name.label('sec_name'),
Service.name.label('service_name'),
ChannelWebSocket.name.label('ws_channel_name'),
).\
outerjoin(SecurityBase, SecurityBase.id==PubSubEndpoint.security_id).\
outerjoin(Service, Service.id==PubSubEndpoint.service_id).\
outerjoin(ChannelWebSocket, ChannelWebSocket.id==PubSubEndpoint.ws_channel_id).\
filter(PubSubEndpoint.cluster_id==cluster_id).\
order_by(PubSubEndpoint.name)
def pubsub_endpoint(session, cluster_id, id):
""" An individual pub/sub endpoint.
"""
return _pubsub_endpoint(session, cluster_id).\
filter(PubSubEndpoint.id==id).\
one()
@query_wrapper
def pubsub_endpoint_list(session, cluster_id, needs_columns=False):
""" A list of pub/sub endpoints.
"""
result = _pubsub_endpoint(session, cluster_id)
return result
# ################################################################################################################################
def _pubsub_topic(session, cluster_id):
return session.query(
PubSubTopic.id,
PubSubTopic.name,
PubSubTopic.is_active,
PubSubTopic.is_internal,
PubSubTopic.max_depth_gd,
PubSubTopic.max_depth_non_gd,
PubSubTopic.has_gd,
PubSubTopic.is_api_sub_allowed,
PubSubTopic.depth_check_freq,
PubSubTopic.hook_service_id,
PubSubTopic.pub_buffer_size_gd,
PubSubTopic.task_sync_interval,
PubSubTopic.task_delivery_interval,
PubSubTopic.opaque1,
Service.name.label('hook_service_name'),
).\
outerjoin(Service, Service.id==PubSubTopic.hook_service_id).\
filter(Cluster.id==PubSubTopic.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(PubSubTopic.name)
@bunch_maker
def pubsub_topic(session, cluster_id, topic_id=None, topic_name=None) -> 'PubSubTopic':
""" A pub/sub topic.
"""
q = _pubsub_topic(session, cluster_id)
if topic_id:
q = q.filter(PubSubTopic.id==topic_id)
elif topic_name:
q = q.filter(PubSubTopic.name==topic_name)
else:
raise ValueError('Topic ID or name is required on input')
return q.one()
@query_wrapper
def pubsub_topic_list(session, cluster_id, needs_columns=False):
""" All pub/sub topics.
"""
return _pubsub_topic(session, cluster_id)
# ################################################################################################################################
def pubsub_publishers_for_topic(session, cluster_id, topic_id):
return session.query(
PubSubEndpoint.service_id, PubSubEndpoint.security_id,
PubSubEndpoint.ws_channel_id, PubSubEndpoint.name,
PubSubEndpoint.is_active, PubSubEndpoint.is_internal,
PubSubEndpoint.last_seen, PubSubEndpoint.last_pub_time,
PubSubEndpointTopic.last_pub_time,
PubSubEndpointTopic.pub_msg_id.label('last_msg_id'),
PubSubEndpointTopic.pub_correl_id.label('last_correl_id'),
PubSubEndpointTopic.in_reply_to.label('last_in_reply_to'),
PubSubEndpointTopic.ext_client_id,
Service.name.label('service_name'),
SecurityBase.name.label('sec_name'),
ChannelWebSocket.name.label('ws_channel_name'),
).\
outerjoin(Service, Service.id==PubSubEndpoint.service_id).\
outerjoin(SecurityBase, SecurityBase.id==PubSubEndpoint.security_id).\
outerjoin(ChannelWebSocket, ChannelWebSocket.id==PubSubEndpoint.ws_channel_id).\
filter(PubSubEndpointTopic.topic_id==PubSubTopic.id).\
filter(PubSubEndpointTopic.topic_id==topic_id).\
filter(PubSubEndpointTopic.endpoint_id==PubSubEndpoint.id).\
filter(PubSubEndpointTopic.cluster_id==cluster_id)
# ################################################################################################################################
def _pubsub_topic_message(session, cluster_id, needs_sub_queue_check):
q = session.query(
PubSubMessage.pub_msg_id.label('msg_id'),
PubSubMessage.pub_correl_id.label('correl_id'),
PubSubMessage.in_reply_to,
PubSubMessage.pub_time, PubSubMessage.data_prefix_short,
PubSubMessage.pub_pattern_matched, PubSubMessage.priority,
PubSubMessage.ext_pub_time, PubSubMessage.size,
PubSubMessage.data_format, PubSubMessage.mime_type,
PubSubMessage.data, PubSubMessage.expiration,
PubSubMessage.expiration_time, PubSubMessage.has_gd,
PubSubMessage.ext_client_id,
PubSubEndpoint.id.label('endpoint_id'),
PubSubEndpoint.name.label('endpoint_name'),
PubSubEndpoint.service_id,
PubSubEndpoint.security_id,
PubSubEndpoint.ws_channel_id,
PubSubTopic.id.label('topic_id'),
PubSubTopic.name.label('topic_name'),
).\
filter(PubSubMessage.published_by_id==PubSubEndpoint.id).\
filter(PubSubMessage.cluster_id==cluster_id).\
filter(PubSubMessage.topic_id==PubSubTopic.id)
if needs_sub_queue_check:
q = q.\
filter(~PubSubMessage.is_in_sub_queue)
return q
# ################################################################################################################################
def pubsub_message(session, cluster_id, pub_msg_id, needs_sub_queue_check=True):
return _pubsub_topic_message(session, cluster_id, needs_sub_queue_check).\
filter(PubSubMessage.pub_msg_id==pub_msg_id)
# ################################################################################################################################
def _pubsub_endpoint_queue(session, cluster_id):
return session.query(
PubSubSubscription.id.label('sub_id'),
PubSubSubscription.active_status,
PubSubSubscription.is_internal,
PubSubSubscription.creation_time,
PubSubSubscription.sub_key,
PubSubSubscription.has_gd,
PubSubSubscription.delivery_method,
PubSubSubscription.delivery_data_format,
PubSubSubscription.delivery_endpoint,
PubSubSubscription.is_staging_enabled,
PubSubSubscription.ext_client_id,
PubSubTopic.id.label('topic_id'),
PubSubTopic.name.label('topic_name'),
PubSubTopic.name.label('name'), # Currently queue names are the same as their originating topics
PubSubEndpoint.name.label('endpoint_name'),
PubSubEndpoint.id.label('endpoint_id'),
WebSocketSubscription.ext_client_id.label('ws_ext_client_id'),
).\
outerjoin(WebSocketSubscription, WebSocketSubscription.sub_key==PubSubSubscription.sub_key).\
filter(PubSubSubscription.topic_id==PubSubTopic.id).\
filter(PubSubSubscription.cluster_id==cluster_id).\
filter(PubSubSubscription.endpoint_id==PubSubEndpoint.id)
# ################################################################################################################################
@query_wrapper
def pubsub_endpoint_queue_list(session, cluster_id, endpoint_id, needs_columns=False):
return _pubsub_endpoint_queue(session, cluster_id).\
filter(PubSubSubscription.endpoint_id==endpoint_id).\
order_by(PubSubSubscription.creation_time.desc())
# ################################################################################################################################
def pubsub_endpoint_queue_list_by_sub_keys(session, cluster_id, sub_key_list) -> 'anylist':
return _pubsub_endpoint_queue(session, cluster_id).\
filter(PubSubSubscription.sub_key.in_(sub_key_list)).\
all()
# ################################################################################################################################
def pubsub_endpoint_queue(session, cluster_id, sub_id):
return _pubsub_endpoint_queue(session, cluster_id).\
filter(PubSubSubscription.id==sub_id).\
one()
# ################################################################################################################################
@query_wrapper
def pubsub_messages_for_topic(session, cluster_id, topic_id, needs_columns=False):
return _pubsub_topic_message(session, cluster_id, True).\
filter(PubSubMessage.topic_id==topic_id).\
order_by(PubSubMessage.pub_time.desc())
# ################################################################################################################################
def _pubsub_queue_message(session, cluster_id):
return session.query(
PubSubMessage.pub_msg_id.label('msg_id'),
PubSubMessage.pub_correl_id.label('correl_id'),
PubSubMessage.in_reply_to,
PubSubMessage.data_prefix_short,
PubSubMessage.priority,
PubSubMessage.ext_pub_time,
PubSubMessage.size,
PubSubMessage.data_format,
PubSubMessage.mime_type,
PubSubMessage.data,
PubSubMessage.expiration,
PubSubMessage.expiration_time,
PubSubMessage.ext_client_id,
PubSubMessage.published_by_id,
PubSubMessage.pub_pattern_matched,
PubSubTopic.id.label('topic_id'),
PubSubTopic.name.label('topic_name'),
PubSubTopic.name.label('queue_name'), # Currently, queue name = name of its underlying topic
PubSubEndpointEnqueuedMessage.creation_time.label('recv_time'),
PubSubEndpointEnqueuedMessage.delivery_count,
PubSubEndpointEnqueuedMessage.last_delivery_time,
PubSubEndpointEnqueuedMessage.is_in_staging,
PubSubEndpointEnqueuedMessage.endpoint_id.label('subscriber_id'),
PubSubEndpointEnqueuedMessage.sub_key,
PubSubEndpoint.name.label('subscriber_name'),
PubSubSubscription.sub_pattern_matched,
).\
filter(PubSubEndpointEnqueuedMessage.pub_msg_id==PubSubMessage.pub_msg_id).\
filter(PubSubEndpointEnqueuedMessage.topic_id==PubSubTopic.id).\
filter(PubSubEndpointEnqueuedMessage.endpoint_id==PubSubEndpoint.id).\
filter(PubSubEndpointEnqueuedMessage.sub_key==PubSubSubscription.sub_key).\
filter(PubSubEndpointEnqueuedMessage.cluster_id==cluster_id)
# ################################################################################################################################
def pubsub_queue_message(session, cluster_id, msg_id):
return _pubsub_queue_message(session, cluster_id).\
filter(PubSubMessage.pub_msg_id==msg_id)
# ################################################################################################################################
def pubsub_messages_for_queue_raw(session, cluster_id, sub_key, *, skip_delivered=False):
q = _pubsub_queue_message(session, cluster_id).\
filter(PubSubEndpointEnqueuedMessage.sub_key==sub_key)
if skip_delivered:
q = q.filter(PubSubEndpointEnqueuedMessage.delivery_status != PUBSUB.DELIVERY_STATUS.DELIVERED)
return q
# ################################################################################################################################
@query_wrapper
def pubsub_messages_for_queue(session, cluster_id, sub_key, skip_delivered=False, needs_columns=False):
q = pubsub_messages_for_queue_raw(session, cluster_id, sub_key, skip_delivered=skip_delivered)
return q.order_by(PubSubEndpointEnqueuedMessage.creation_time.desc())
# ################################################################################################################################
def pubsub_hook_service(session, cluster_id, endpoint_id, model_class):
return session.query(
Service.id,
Service.name,
).\
filter(Cluster.id==Service.cluster_id).\
filter(Service.id==model_class.hook_service_id).\
first()
# ################################################################################################################################
def _notif_sql(session, cluster_id, needs_password):
""" SQL notifications.
"""
columns = [NotifSQL.id, NotifSQL.is_active, NotifSQL.name, NotifSQL.query, NotifSQL.notif_type, NotifSQL.interval,
NotifSQL.def_id, SQLConnectionPool.name.label('def_name'), Service.name.label('service_name')]
if needs_password:
columns.append(SQLConnectionPool.password)
return session.query(*columns).\
filter(Cluster.id==NotifSQL.cluster_id).\
filter(SQLConnectionPool.id==NotifSQL.def_id).\
filter(Service.id==NotifSQL.service_id).\
filter(Cluster.id==cluster_id)
@query_wrapper
def notif_sql_list(session, cluster_id, needs_password=False, needs_columns=False):
""" All the SQL notifications.
"""
return _notif_sql(session, cluster_id, needs_password)
# ################################################################################################################################
def _search_es(session, cluster_id):
""" ElasticSearch connections.
"""
return session.query(ElasticSearch).\
filter(Cluster.id==ElasticSearch.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ElasticSearch.name)
@query_wrapper
def search_es_list(session, cluster_id, needs_columns=False):
""" All the ElasticSearch connections.
"""
return _search_es(session, cluster_id)
# ################################################################################################################################
def _search_solr(session, cluster_id):
""" Solr sonnections.
"""
return session.query(Solr).\
filter(Cluster.id==Solr.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(Solr.name)
@query_wrapper
def search_solr_list(session, cluster_id, needs_columns=False):
""" All the Solr connections.
"""
return _search_solr(session, cluster_id)
# ################################################################################################################################
def _server(session, cluster_id, cluster_name):
q = session.query(
Server.id, Server.name, Server.bind_host, Server.bind_port, Server.last_join_status, Server.last_join_mod_date,
Server.last_join_mod_by, Server.up_status, Server.up_mod_date, Server.preferred_address,
Server.crypto_use_tls,
Cluster.id.label('cluster_id'), Cluster.name.label('cluster_name')).\
filter(Cluster.id==Server.cluster_id)
if cluster_id:
q = q.filter(Cluster.id==cluster_id)
else:
q = q.filter(Cluster.name==cluster_name)
q = q.order_by(Server.name)
return q
@query_wrapper
def server_list(session, cluster_id, cluster_name, up_status=None, needs_columns=False):
""" All the servers defined on a cluster.
"""
q = _server(session, cluster_id, cluster_name)
if up_status:
q = q.filter(Server.up_status==up_status)
return q
def server_by_name(session, cluster_id, cluster_name, server_name):
return _server(session, cluster_id, cluster_name).\
filter(Server.name==server_name).\
all()
def server_by_id(session, cluster_id, server_id):
return _server(session, cluster_id, None).\
filter(Server.id==server_id).\
one()
# ################################################################################################################################
def _cassandra_conn(session, cluster_id):
return session.query(CassandraConn).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==CassandraConn.cluster_id).\
order_by(CassandraConn.name)
def cassandra_conn(session, cluster_id, id):
""" A Cassandra connection definition.
"""
return _cassandra_conn(session, cluster_id).\
filter(CassandraConn.id==id).\
one()
@query_wrapper
def cassandra_conn_list(session, cluster_id, needs_columns=False):
""" A list of Cassandra connection definitions.
"""
return _cassandra_conn(session, cluster_id)
# ################################################################################################################################
def _cassandra_query(session, cluster_id):
return session.query(
CassandraQuery.id, CassandraQuery.name, CassandraQuery.value,
CassandraQuery.is_active, CassandraQuery.cluster_id,
CassandraConn.name.label('def_name'),
CassandraConn.id.label('def_id'),
).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==CassandraQuery.cluster_id).\
filter(CassandraConn.id==CassandraQuery.def_id).\
order_by(CassandraQuery.name)
def cassandra_query(session, cluster_id, id):
""" A Cassandra prepared statement.
"""
return _cassandra_query(session, cluster_id).\
filter(CassandraQuery.id==id).\
one()
@query_wrapper
def cassandra_query_list(session, cluster_id, needs_columns=False):
""" A list of Cassandra prepared statements.
"""
return _cassandra_query(session, cluster_id)
# ################################################################################################################################
def _email_smtp(session, cluster_id):
return session.query(SMTP).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==SMTP.cluster_id).\
order_by(SMTP.name)
def email_smtp(session, cluster_id, id):
""" An SMTP connection.
"""
return _email_smtp(session, cluster_id).\
filter(SMTP.id==id).\
one()
@query_wrapper
def email_smtp_list(session, cluster_id, needs_columns=False):
""" A list of SMTP connections.
"""
return _email_smtp(session, cluster_id)
# ################################################################################################################################
def _email_imap(session, cluster_id):
return session.query(IMAP).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==IMAP.cluster_id).\
order_by(IMAP.name)
def email_imap(session, cluster_id, id):
""" An IMAP connection.
"""
return _email_imap(session, cluster_id).\
filter(IMAP.id==id).\
one()
@query_wrapper
def email_imap_list(session, cluster_id, needs_columns=False):
""" A list of IMAP connections.
"""
return _email_imap(session, cluster_id)
# ################################################################################################################################
def _rbac_permission(session, cluster_id):
return session.query(RBACPermission).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==RBACPermission.cluster_id).\
order_by(RBACPermission.name)
def rbac_permission(session, cluster_id, id=None, name=None):
""" An RBAC permission.
"""
q = _rbac_permission(session, cluster_id)
if name:
q = q.filter(RBACPermission.name==name)
elif id:
q = q.filter(RBACPermission.id==id)
return q.one()
@query_wrapper
def rbac_permission_list(session, cluster_id, needs_columns=False):
""" A list of RBAC permissions.
"""
return _rbac_permission(session, cluster_id)
# ################################################################################################################################
def _rbac_role(session, cluster_id):
rbac_parent = aliased(RBACRole)
return session.query(RBACRole.id, RBACRole.name, RBACRole.parent_id, rbac_parent.name.label('parent_name')).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==RBACRole.cluster_id).\
outerjoin(rbac_parent, rbac_parent.id==RBACRole.parent_id).\
order_by(RBACRole.name)
def rbac_role(session, cluster_id, id=None, name=None):
""" An RBAC role.
"""
q = _rbac_role(session, cluster_id)
if name:
q = q.filter(RBACRole.name==name)
elif id:
q = q.filter(RBACRole.id==id)
return q.one()
@query_wrapper
def rbac_role_list(session, cluster_id, needs_columns=False):
""" A list of RBAC roles.
"""
return _rbac_role(session, cluster_id)
# ################################################################################################################################
def _rbac_client_role(session, cluster_id):
return session.query(RBACClientRole).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==RBACClientRole.cluster_id).\
order_by(RBACClientRole.client_def)
def rbac_client_role(session, cluster_id, id):
""" An individual mapping between a client and role.
"""
return _rbac_client_role(session, cluster_id).\
filter(RBACClientRole.id==id).\
one()
@query_wrapper
def rbac_client_role_list(session, cluster_id, needs_columns=False):
""" A list of mappings between clients and roles.
"""
return _rbac_client_role(session, cluster_id)
# ################################################################################################################################
def _rbac_role_permission(session, cluster_id):
return session.query(RBACRolePermission).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==RBACRolePermission.cluster_id).\
order_by(RBACRolePermission.role_id)
def rbac_role_permission(session, cluster_id, id):
""" An individual permission for a given role against a service.
"""
return _rbac_role_permission(session, cluster_id).\
filter(RBACRolePermission.id==id).\
one()
@query_wrapper
def rbac_role_permission_list(session, cluster_id, needs_columns=False):
""" A list of permissions for roles against services.
"""
return _rbac_role_permission(session, cluster_id)
# ################################################################################################################################
def cache_by_id(session, cluster_id, cache_id):
return session.query(Cache).\
filter(Cache.id==cluster_id).\
filter(Cluster.id==Cache.cluster_id).\
filter(Cache.id==cache_id).\
one()
# ################################################################################################################################
def _cache_builtin(session, cluster_id):
return session.query(CacheBuiltin).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==CacheBuiltin.cluster_id).\
filter(Cache.id==CacheBuiltin.cache_id).\
filter(Cache.cache_type==CACHE.TYPE.BUILTIN).\
order_by(CacheBuiltin.name)
def cache_builtin(session, cluster_id, id):
""" An individual built-in cache definition.
"""
return _cache_builtin(session, cluster_id).\
filter(CacheBuiltin.id==id).\
one()
@query_wrapper
def cache_builtin_list(session, cluster_id, needs_columns=False):
""" A list of built-in cache definitions.
"""
return _cache_builtin(session, cluster_id)
# ################################################################################################################################
def _cache_memcached(session, cluster_id):
return session.query(
CacheMemcached.cache_id, CacheMemcached.name, CacheMemcached.is_active,
CacheMemcached.is_default, CacheMemcached.is_debug,
CacheMemcached.servers, CacheMemcached.extra,
CacheMemcached.cache_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==CacheMemcached.cluster_id).\
filter(Cache.id==CacheMemcached.cache_id).\
filter(Cache.cache_type==CACHE.TYPE.MEMCACHED).\
order_by(CacheMemcached.name)
def cache_memcached(session, cluster_id, id):
""" An individual Memcached cache definition.
"""
return _cache_builtin(session, cluster_id).\
filter(CacheMemcached.id==id).\
one()
@query_wrapper
def cache_memcached_list(session, cluster_id, needs_columns=False):
""" A list of Memcached cache definitions.
"""
return _cache_memcached(session, cluster_id)
# ################################################################################################################################
def _out_odoo(session, cluster_id):
return session.query(OutgoingOdoo).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==OutgoingOdoo.cluster_id).\
order_by(OutgoingOdoo.name)
def out_odoo(session, cluster_id, id):
""" An individual Odoo connection.
"""
return _out_odoo(session, cluster_id).\
filter(OutgoingOdoo.id==id).\
one()
@query_wrapper
def out_odoo_list(session, cluster_id, needs_columns=False):
""" A list of Odoo connections.
"""
return _out_odoo(session, cluster_id)
# ################################################################################################################################
def _out_sap(session, cluster_id):
return session.query(OutgoingSAP).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==OutgoingSAP.cluster_id).\
order_by(OutgoingSAP.name)
def out_sap(session, cluster_id, id):
""" An individual SAP RFC connection.
"""
return _out_sap(session, cluster_id).\
filter(OutgoingSAP.id==id).\
one()
@query_wrapper
def out_sap_list(session, cluster_id, needs_columns=False):
""" A list of SAP RFC connections.
"""
return _out_sap(session, cluster_id)
# ################################################################################################################################
def _channel_web_socket(session, cluster_id):
""" WebSocket channels
"""
return session.query(
ChannelWebSocket.id,
ChannelWebSocket.name,
ChannelWebSocket.is_active,
ChannelWebSocket.is_internal,
ChannelWebSocket.address,
ChannelWebSocket.data_format,
ChannelWebSocket.service_id,
ChannelWebSocket.security_id,
ChannelWebSocket.new_token_wait_time,
ChannelWebSocket.token_ttl,
ChannelWebSocket.is_out,
ChannelWebSocket.opaque1,
SecurityBase.sec_type,
VaultConnection.default_auth_method.label('vault_conn_default_auth_method'),
SecurityBase.name.label('sec_name'),
Service.name.label('service_name'),
).\
outerjoin(Service, Service.id==ChannelWebSocket.service_id).\
outerjoin(SecurityBase, SecurityBase.id==ChannelWebSocket.security_id).\
outerjoin(VaultConnection, SecurityBase.id==VaultConnection.id).\
filter(Cluster.id==ChannelWebSocket.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ChannelWebSocket.name)
def channel_web_socket(session, cluster_id, id):
""" An incoming WebSocket connection.
"""
return _channel_web_socket(session, cluster_id).\
filter(ChannelWebSocket.id==id).\
one()
@query_wrapper
def channel_web_socket_list(session, cluster_id, needs_columns=False):
""" All the WebSocket channel connections.
"""
return _channel_web_socket(session, cluster_id)
# ################################################################################################################################
def web_socket_client_by_pub_id(session, pub_client_id):
""" An individual WebSocket connection by its public ID.
"""
return session.query(
WebSocketClient.id,
ChannelWebSocket.id.label('channel_id'),
ChannelWebSocket.name.label('channel_name')
).\
filter(WebSocketClient.pub_client_id==pub_client_id).\
outerjoin(ChannelWebSocket, ChannelWebSocket.id==WebSocketClient.channel_id).\
first()
# ################################################################################################################################
def web_socket_client_by_ext_id(session, ext_client_id, needs_one_or_none=False):
""" An individual WebSocket connection by its external client ID.
"""
query = session.query(
WebSocketClient,
ChannelWebSocket.id.label('channel_id'),
ChannelWebSocket.name.label('channel_name')
).\
filter(WebSocketClient.ext_client_id==ext_client_id).\
outerjoin(ChannelWebSocket, ChannelWebSocket.id==WebSocketClient.channel_id)
return query.one_or_none() if needs_one_or_none else query.all()
# ################################################################################################################################
def web_socket_clients_by_server_id(session, server_id, server_pid):
""" A list of WebSocket clients attached to a particular server by the latter's ID.
"""
query = session.query(WebSocketClient).\
filter(WebSocketClient.server_id==server_id)
if server_pid:
query = query.\
filter(WebSocketClient.server_proc_pid==server_pid)
return query
# ################################################################################################################################
def _web_socket_client(session, cluster_id, channel_id):
return session.query(WebSocketClient).\
filter(WebSocketClient.cluster_id==cluster_id).\
filter(WebSocketClient.channel_id==channel_id).\
order_by(WebSocketClient.connection_time.desc())
# ################################################################################################################################
def web_socket_client(session, cluster_id, channel_id, pub_client_id=None, ext_client_id=None, use_first=True):
query = _web_socket_client(session, cluster_id, channel_id)
if pub_client_id:
query = query.filter(WebSocketClient.pub_client_id==pub_client_id)
elif ext_client_id:
query = query.filter(WebSocketClient.ext_client_id==ext_client_id)
else:
raise ValueError('Either pub_client_id or ext_client_id is required on input')
return query.first() if use_first else query.all()
# ################################################################################################################################
@query_wrapper
def web_socket_client_list(session, cluster_id, channel_id, needs_columns=False):
""" A list of subscriptions to a particular pattern.
"""
return _web_socket_client(session, cluster_id, channel_id)
# ################################################################################################################################
def _web_socket_sub_key_data(session, cluster_id, pub_client_id):
return session.query(
WebSocketClientPubSubKeys.sub_key,
PubSubSubscription.topic_id,
PubSubSubscription.id.label('sub_id'),
PubSubSubscription.creation_time,
PubSubSubscription.endpoint_id,
PubSubSubscription.sub_pattern_matched,
PubSubSubscription.ext_client_id,
PubSubEndpoint.name.label('endpoint_name'),
PubSubTopic.name.label('topic_name')
).\
filter(WebSocketClient.pub_client_id==pub_client_id).\
filter(WebSocketClient.id==WebSocketClientPubSubKeys.client_id).\
filter(WebSocketClientPubSubKeys.sub_key==WebSocketSubscription.sub_key).\
filter(WebSocketClientPubSubKeys.sub_key==PubSubSubscription.sub_key).\
filter(PubSubSubscription.topic_id==PubSubTopic.id).\
filter(PubSubSubscription.endpoint_id==PubSubEndpoint.id)
@query_wrapper
def web_socket_sub_key_data_list(session, cluster_id, pub_client_id, needs_columns=False):
return _web_socket_sub_key_data(session, cluster_id, pub_client_id)
# ################################################################################################################################
def _vault_connection(session, cluster_id):
return session.query(VaultConnection.id, VaultConnection.is_active, VaultConnection.name,
VaultConnection.url, VaultConnection.token, VaultConnection.default_auth_method,
VaultConnection.timeout, VaultConnection.allow_redirects, VaultConnection.tls_verify,
VaultConnection.tls_ca_cert_id, VaultConnection.tls_key_cert_id, VaultConnection.sec_type,
Service.name.label('service_name'), Service.id.label('service_id')).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==VaultConnection.cluster_id).\
outerjoin(Service, Service.id==VaultConnection.service_id).\
order_by(VaultConnection.name)
def vault_connection(session, cluster_id, id):
""" An individual Vault connection.
"""
return _vault_connection(session, cluster_id).\
filter(VaultConnection.id==id).\
one()
@query_wrapper
def vault_connection_list(session, cluster_id, needs_columns=False):
""" A list of Vault connections.
"""
return _vault_connection(session, cluster_id)
# ################################################################################################################################
def _sms_twilio(session, cluster_id):
""" SMS Twilio connections.
"""
return session.query(SMSTwilio).\
filter(Cluster.id==SMSTwilio.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(SMSTwilio.name)
def sms_twilio(session, cluster_id, id):
""" An individual SMS Twilio connection.
"""
return _sms_twilio(session, cluster_id).\
filter(SMSTwilio.id==id).\
one()
@query_wrapper
def sms_twilio_list(session, cluster_id, needs_columns=False):
""" All the SMS Twilio connections.
"""
return _sms_twilio(session, cluster_id)
# ################################################################################################################################
| 69,099
|
Python
|
.py
| 1,521
| 38.777778
| 134
| 0.58355
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,649
|
cleanup.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/query/cleanup.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import operator
from logging import getLogger
# SQLAlchemy
from sqlalchemy import and_, delete, func, or_, select
# Zato
from zato.common.odb.model import PubSubEndpoint, PubSubEndpointEnqueuedMessage, PubSubMessage, PubSubSubscription, PubSubTopic
# ################################################################################################################################
# ################################################################################################################################
if 0:
from datetime import datetime
from sqlalchemy.orm.query import Query
from sqlalchemy.orm.session import Session as SASession
from zato.common.typing_ import any_, anylist, strlist
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato_pubsub.sql')
# ################################################################################################################################
# ################################################################################################################################
QueueTable = PubSubEndpointEnqueuedMessage.__table__
MsgTable = PubSubMessage.__table__
# ################################################################################################################################
# ################################################################################################################################
def get_subscriptions(
task_id:'str',
session:'SASession',
topic_id: 'int',
topic_name: 'str',
max_last_interaction_time:'float',
topic_max_last_interaction_time_dt:'datetime',
topic_max_last_interaction_time_source:'str'
) -> 'anylist':
msg = '%s: Getting subscriptions for topic `%s` (id:%s) with max_last_interaction_time `%s` -> %s (s:%s)'
logger.info(msg, task_id, topic_name, topic_id,
max_last_interaction_time, topic_max_last_interaction_time_dt, topic_max_last_interaction_time_source)
result = session.query(
PubSubSubscription.id,
PubSubSubscription.sub_key,
PubSubSubscription.ext_client_id,
PubSubSubscription.last_interaction_time,
PubSubEndpoint.name.label('endpoint_name'),
PubSubEndpoint.id.label('endpoint_id'),
PubSubTopic.opaque1.label('topic_opaque'),
).\
filter(PubSubSubscription.topic_id == topic_id).\
filter(PubSubSubscription.topic_id == PubSubTopic.id).\
filter(PubSubSubscription.endpoint_id == PubSubEndpoint.id).\
filter(PubSubEndpoint.is_internal.is_(False)).\
filter(or_(
PubSubSubscription.last_interaction_time < max_last_interaction_time,
PubSubSubscription.last_interaction_time.is_(None),
)).\
order_by(PubSubSubscription.last_interaction_time.asc()).\
all()
return result
# ################################################################################################################################
# ################################################################################################################################
def get_topic_messages_with_max_retention_reached(
task_id:'str',
session:'SASession',
topic_id:'int',
topic_name:'str',
max_pub_time_dt:'datetime',
max_pub_time_float:'float',
) -> 'anylist':
logger.info('%s: Looking for messages with max. retention reached for topic `%s` (%s -> %s)',
task_id, topic_name, max_pub_time_float, max_pub_time_dt)
result = session.query(
PubSubMessage.pub_msg_id,
).\
filter(PubSubMessage.topic_id == topic_id).\
filter(PubSubMessage.pub_time < max_pub_time_float).\
all()
return result
# ################################################################################################################################
# ################################################################################################################################
def _get_topic_messages_by_in_how_many_queues(
session:'SASession',
topic_id:'int',
queue_len_operator: 'any_',
queue_len: 'int',
) -> 'Query':
# If it is zero, it means that there are no subscribers for a given message.
# Otherwise, the message is in at least one subscription queue.
in_how_many_queues = func.count(PubSubEndpointEnqueuedMessage.pub_msg_id).label('in_how_many_queues')
#
# This is used to build conditions representing two cases:
#
# 1) having(in_how_many_queues == 0) # No subscribers for message
# 2) having(in_how_many_queues >= 1) # At least one subscriber for message
#
#
condition = queue_len_operator(in_how_many_queues, queue_len)
query = session.query(
PubSubMessage.pub_msg_id,
).\
group_by(PubSubMessage.pub_msg_id).\
outerjoin(PubSubEndpointEnqueuedMessage, PubSubMessage.id==PubSubEndpointEnqueuedMessage.pub_msg_id).\
having(condition).\
filter(PubSubMessage.topic_id == topic_id)
return query
# ################################################################################################################################
# ################################################################################################################################
def get_topic_messages_without_subscribers(
task_id:'str',
session:'SASession',
topic_id:'int',
topic_name:'str',
max_pub_time_dt:'datetime',
max_pub_time_float:'float',
) -> 'anylist':
logger.info('%s: Looking for messages without subscribers for topic `%s` (%s -> %s)',
task_id, topic_name, max_pub_time_float, max_pub_time_dt)
#
# We are building a query condition of this form: having(in_how_many_queues == 0)
#
queue_len_operator = operator.eq # eq = equal
queue_len = 0
# Build a base query ..
query = _get_topic_messages_by_in_how_many_queues(
session,
topic_id,
queue_len_operator,
queue_len
)
# Add our own condition around the max. publication time - we need it because we do not want
# to return messages that have not reached their retention time yet. E.g. if a message was published
# during a cleanup procedure, it may still see its subscriber in the next N seconds,
# which is why we cannot return it. If it never receives a subscriber, or if its max. retention time
# is eventually reached, it will be cleaned up in another pass of the cleanup procedure.
query = query.\
filter(PubSubMessage.pub_time < max_pub_time_float)
# .. obtain the result ..
result = query.all()
# .. and return it to the caller.
return result
# ################################################################################################################################
# ################################################################################################################################
def get_topic_messages_already_expired(
task_id:'str',
session:'SASession',
topic_id:'int',
topic_name:'str',
max_pub_time_dt:'datetime',
max_pub_time_float:'float',
) -> 'anylist':
logger.info('%s: Looking for already expired messages for topic `%s` (%s -> %s)',
task_id, topic_name, max_pub_time_float, max_pub_time_dt)
# Build a query to find all the expired messages for the topic ..
query = select([
MsgTable.c.pub_msg_id,
]).\
where(and_(
MsgTable.c.topic_id == topic_id,
MsgTable.c.expiration_time < max_pub_time_float,
))
# .. obtain the result ..
result = session.execute(query).fetchall()
# .. and return it to the caller.
return result
# ################################################################################################################################
# ################################################################################################################################
def delete_queue_messages(session:'SASession', msg_id_list:'strlist') -> 'None':
logger.info('Deleting %d queue message(s): %s', len(msg_id_list), msg_id_list)
session.execute(
delete(QueueTable).\
where(
QueueTable.c.pub_msg_id.in_(msg_id_list),
)
)
# ################################################################################################################################
# ################################################################################################################################
def delete_topic_messages(session:'SASession', msg_id_list:'strlist') -> 'None':
logger.info('Deleting %d topic message(s): %s', len(msg_id_list), msg_id_list)
session.execute(
delete(MsgTable).\
where(
MsgTable.c.pub_msg_id.in_(msg_id_list),
)
)
# ################################################################################################################################
# ################################################################################################################################
| 9,492
|
Python
|
.py
| 187
| 45.213904
| 130
| 0.460184
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,650
|
common.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/query/common.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.common.api import CONNECTION
from zato.common.odb.model import HTTPSOAP, PubSubTopic
# SQLAlchemy
from sqlalchemy import and_, select
# ################################################################################################################################
# ################################################################################################################################
if 0:
from sqlalchemy import Column
from sqlalchemy.orm.session import Session as SASession
from zato.common.odb.model.base import Base as BaseTable
from zato.common.typing_ import any_, anylist, anylistnone, strlist
Column = Column
# ################################################################################################################################
# ################################################################################################################################
RESTTable:'any_' = HTTPSOAP.__table__
TopicTable:'any_' = PubSubTopic.__table__
# ################################################################################################################################
# ################################################################################################################################
def get_object_list_by_columns(
session:'SASession',
columns:'anylist',
order_by:'any_'=None
) -> 'anylist':
""" Returns all ODB objects from a given table.
"""
q = select(columns)
if order_by is not None:
q = q.order_by(order_by)
result:'anylist' = session.execute(q).fetchall()
return result
# ################################################################################################################################
# ################################################################################################################################
def get_object_list(
session:'SASession',
table:'BaseTable',
) -> 'anylist':
""" Returns all ODB objects from a given table.
"""
columns = [table.c.id, table.c.name]
order_by = table.c.name.desc()
result = get_object_list_by_columns(session, columns, order_by)
return result
# ################################################################################################################################
# ################################################################################################################################
def get_object_list_by_where_impl(
session:'SASession',
table:'BaseTable',
where_list:'str | anylistnone'
) -> 'anylist':
""" Returns ODB objects matching the input query.
"""
# Local variables
where_list = where_list if where_list is not None else []
where_list = where_list if isinstance(where_list, list) else [where_list]
q = select([
table.c.id,
table.c.name,
]).\
where(and_(*where_list))
result:'anylist' = session.execute(q).fetchall()
return result
# ################################################################################################################################
def get_object_list_by_where(
session:'SASession',
table:'BaseTable',
value:'str | strlist',
extra_where_list:'str | anylistnone',
attr_name:'str',
attr_func:'str',
) -> 'anylist':
""" Returns ODB objects matching the input query.
"""
# Local variables
extra_where_list = extra_where_list or []
extra_where_list = extra_where_list if isinstance(extra_where_list, list) else [extra_where_list]
attr = getattr(table.c, attr_name)
func = getattr(attr, attr_func)
name_where:'any_' = func(value)
where_list = [name_where]
where_list.extend(extra_where_list)
result = get_object_list_by_where_impl(session, table, where_list)
return result
# ################################################################################################################################
def get_object_list_by_id_list(
session:'SASession',
table:'BaseTable',
object_name_list:'str',
extra_where_list:'str | anylistnone'=None
) -> 'anylist':
""" Returns ODB objects whose ID is equal to the ones from input.
"""
# Parameters for the where criteria
attr_name = 'id'
attr_func = 'in_'
result = get_object_list_by_where(session, table, object_name_list, extra_where_list, attr_name, attr_func)
return result
# ################################################################################################################################
def get_object_list_by_name_list(
session:'SASession',
table:'BaseTable',
object_name_list:'str',
extra_where_list:'str | anylistnone'=None
) -> 'anylist':
""" Returns ODB objects whose name is equal to the ones from input.
"""
# Parameters for the where criteria
attr_name = 'name'
attr_func = 'in_'
result = get_object_list_by_where(session, table, object_name_list, extra_where_list, attr_name, attr_func)
return result
# ################################################################################################################################
def get_object_list_by_name_contains(
session:'SASession',
table:'BaseTable',
pattern:'str | strlist',
extra_where_list:'str | anylistnone'=None
) -> 'anylist':
""" Returns ODB objects whose name contains the input pattern.
"""
# Parameters for the where criteria
attr_name = 'name'
attr_func = 'contains'
result = get_object_list_by_where(session, table, pattern, extra_where_list, attr_name, attr_func)
return result
# ################################################################################################################################
def get_rest_list_by_name_pattern(session:'SASession', pattern:'str', is_channel:'bool',) -> 'anylist':
""" Returns REST objects matching the pattern.
"""
# Local variables
table:'BaseTable' = RESTTable
# Find out whether we need channels or outgoing connections ..
if is_channel:
connection = CONNECTION.CHANNEL
else:
connection = CONNECTION.OUTGOING
# .. build an addition where part depending on what we need ..
connection_where:'any_' = RESTTable.c.connection == connection
result = get_object_list_by_name_contains(session, table, pattern, connection_where)
return result
# ################################################################################################################################
def get_rest_channel_list_by_name_pattern(session:'SASession', pattern:'str') -> 'anylist':
""" Returns REST channels matching the pattern.
"""
result = get_rest_list_by_name_pattern(session, pattern, True)
return result
# ################################################################################################################################
def get_rest_outgoing_list_by_name_pattern(session:'SASession', pattern:'str') -> 'anylist':
""" Returns REST outgoing connections matching the pattern.
"""
result = get_rest_list_by_name_pattern(session, pattern, False)
return result
# ################################################################################################################################
# ################################################################################################################################
| 7,570
|
Python
|
.py
| 159
| 43.396226
| 130
| 0.455483
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,651
|
publish.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/query/pubsub/publish.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from logging import getLogger
# SQLAlchemy
from sqlalchemy.exc import IntegrityError
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import PubSubEndpoint, PubSubEndpointEnqueuedMessage, PubSubEndpointTopic, PubSubMessage, PubSubTopic
from zato.common.pubsub import ensure_subs_exist, msg_pub_ignore
from zato.common.util.sql.retry import sql_op_with_deadlock_retry
# ################################################################################################################################
# ################################################################################################################################
if 0:
from sqlalchemy.orm.session import Session as SASession
from zato.common.typing_ import any_, callable_, callnone, strdictlist
from zato.server.pubsub.model import sublist
# ################################################################################################################################
# ################################################################################################################################
logger_zato = getLogger('zato')
logger_pubsub = getLogger('zato_pubsub')
# ################################################################################################################################
# ################################################################################################################################
MsgInsert = PubSubMessage.__table__.insert
EndpointTopicInsert = PubSubEndpointTopic.__table__.insert
EnqueuedMsgInsert = PubSubEndpointEnqueuedMessage.__table__.insert
MsgTable = PubSubMessage.__table__
TopicTable = PubSubTopic.__table__
EndpointTable = PubSubEndpoint.__table__
EndpointTopicTable = PubSubEndpointTopic.__table__
# ################################################################################################################################
# ################################################################################################################################
_float_str = PUBSUB.FLOAT_STRING_CONVERT
sub_only_keys = ('sub_pattern_matched', 'topic_name')
# ################################################################################################################################
# ################################################################################################################################
class PublishOpCtx:
needs_topic_messages:'bool' = True
needs_queue_messages:'bool' = True
is_queue_insert_ok:'bool | str' = 'not-set-yet'
suffix:'str'
len_sub_keys_by_topic: 'int'
def __init__(self, pub_counter:'int') -> 'None':
# This is a server-wide counter of messages published
self.pub_counter = pub_counter
# This is a counter increaed after each unsuccessful queue insertion attempt.
self.queue_insert_attempt = 0
# ################################################################################################################################
def on_new_iter(self, subscriptions_by_topic:'sublist') -> 'None':
# This is invoked in each iteration of a publication loop.
# First, increase the publication attempt counter ..
self.queue_insert_attempt += 1
# .. reset out the indicator pointing to whether the insertion was successful ..
self.is_queue_insert_ok = 'not-set-yet'
# .. now, find out how many subscriptions for messages there are
# .. and add the relevant suffix for the noun -> sub_key vs. sub_keys.
self.len_sub_keys_by_topic = len(subscriptions_by_topic)
self.suffix = ' ' if self.len_sub_keys_by_topic == 1 else 's '
# ################################################################################################################################
def get_counter_ctx_str(self) -> 'str':
return f'{self.pub_counter}:{self.queue_insert_attempt}'
# ################################################################################################################################
# ################################################################################################################################
class PublishWithRetryManager:
def __init__(
self,
now, # type: float
cid, # type: str
topic_id, # type: int
topic_name, # type: str
cluster_id, # type: int
pub_counter, # type: int
session, # type: SASession
new_session_func, # type: callable_
before_queue_insert_func, # type: callnone
gd_msg_list, # type: strdictlist
subscriptions_by_topic, # type: sublist
should_collect_ctx # type: bool
) -> 'None':
self.now = now
self.cid = cid
self.topic_id = topic_id
self.topic_name = topic_name
self.cluster_id = cluster_id
self.pub_counter = pub_counter
self.session = session
self.new_session_func = new_session_func
self.before_queue_insert_func = before_queue_insert_func
self.gd_msg_list = gd_msg_list
self.subscriptions_by_topic = subscriptions_by_topic
self.should_collect_ctx = should_collect_ctx
self.ctx_history = []
# ################################################################################################################################
def run(self):
# This is a reusable context object that will be employed
# by all the iterations of the publication loop.
publish_op_ctx = PublishOpCtx(self.pub_counter)
# We make a reference to the subscriptions here because we may want to modify it
# in case queue messages could not be inserted.
subscriptions_by_topic = self.subscriptions_by_topic
while publish_op_ctx.needs_queue_messages:
# We have just entered a new iteration of this loop (possibly it is the first time)
# so we need to carry out a few metadata-related tasks first.
publish_op_ctx.on_new_iter(subscriptions_by_topic)
# This is reusable
counter_ctx_str = publish_op_ctx.get_counter_ctx_str()
# Collect context metadata, if told to.
if self.should_collect_ctx:
self.ctx_history.append(f'Counter -> {counter_ctx_str}')
logger_pubsub.info('SQL publish with retry -> %s -> On new loop iter',
counter_ctx_str,
)
publish_op_ctx = self._sql_publish_with_retry(
publish_op_ctx,
self.cid,
self.cluster_id,
self.topic_id,
self.topic_name,
subscriptions_by_topic,
self.gd_msg_list,
self.now
)
if self.should_collect_ctx:
self.ctx_history.append(f'Result -> {publish_op_ctx.is_queue_insert_ok}')
logger_pubsub.info('SQL publish with retry -> %s -> is_queue_ok:%s',
counter_ctx_str,
publish_op_ctx.is_queue_insert_ok
)
if not publish_op_ctx.is_queue_insert_ok:
if self.should_collect_ctx:
self.ctx_history.append(f'Queue insert OK -> {publish_op_ctx.is_queue_insert_ok}')
# We may need to filter out subscriptions that do not exist anymore - this is needed because
# we took our list of subscribers from self.pubsub but it is possible that between the time
# we got this list and when this transaction started, some of the subscribers
# have been already deleted from the database so, if we were not filter them out, we would be
# potentially trying to insert rows pointing to foreign keys that no longer exist.
with closing(self.new_session_func()) as new_session: # type: ignore
subscriptions_by_topic = ensure_subs_exist(
new_session,
self.topic_name,
self.gd_msg_list,
subscriptions_by_topic,
'_sql_publish_with_retry',
counter_ctx_str
)
if self.should_collect_ctx:
self.ctx_history.append(f'Sub by topic -> {subscriptions_by_topic}')
# ################################################################################################################################
# ################################################################################################################################
def _sql_publish_with_retry(
self,
publish_op_ctx, # type: PublishOpCtx
cid, # type: str
cluster_id, # type: int
topic_id, # type: int
topic_name, # type: str
subscriptions_by_topic, # type: sublist
gd_msg_list, # type: strdictlist
now # type: float
) -> 'PublishOpCtx':
""" A low-level implementation of sql_publish_with_retry.
"""
# Added for type hints
sub_keys_by_topic = 'default-sub-keys-by-topic'
# This is reusable
counter_ctx_str = publish_op_ctx.get_counter_ctx_str()
#
# We need to temporarily remove selected keys from gd_msg_list while we insert the topic
# messages only to bring back these keys later on when inserting the same messages for subscribers.
#
# The reason is that we want to avoid the "sqlalchemy.exc.CompileError: Unconsumed column names"
# condition which is met when insert_topic_messages attempts to use columns that are needed
# only by insert_queue_messages.
#
# An alternative to removing them temporarily would be to have two copies of the messages
# but that could be expensive as they would be otherwise 99% the same, differing only
# by these few, specific short keys.
#
# These message attributes are needed only by queue subscribers
sub_only = {}
# Go through each message and remove the keys that topics do not use
for msg in gd_msg_list: # type: dict
pub_msg_id = msg['pub_msg_id']
sub_attrs = sub_only.setdefault(pub_msg_id, {})
for name in sub_only_keys:
sub_attrs[name] = msg.pop(name, None)
# Publish messages - insert rows, each representing an individual message.
if publish_op_ctx.needs_topic_messages:
# It may be the case that we do not have any message to publish ..
if not gd_msg_list:
# .. in such a situation, store a message in logs ..
logger_pubsub.info('No messages in -> %s -> %s', counter_ctx_str, cid)
# .. now, indicate that the publication went fine (seeing as there was nothing to publish)
# .. and that no queue insertion should be carried out.
publish_op_ctx.needs_topic_messages = False
publish_op_ctx.needs_queue_messages = False
else:
# This is the place where the insert to the topic table statement is executed.
self.insert_topic_messages(cid, gd_msg_list)
# If we are here, it means that the insert above was successful
# and we can set a flag for later use to indicate that.
publish_op_ctx.needs_topic_messages = False
# Log details about the messages inserted.
logger_pubsub.info(
'Topic messages inserted -> %s -> %s -> %s -> %s',
counter_ctx_str, cid, topic_name, gd_msg_list
)
# We enter here only if it is necessary, i.e. if there has not been previously
# a succcessful insertion already in a previous iteration of the publication loop
# and if there are any messages to publish at all.
if publish_op_ctx.needs_queue_messages:
# Sort alphabetically all the sub_keys to make it easy to find them in logs.
sub_keys_by_topic = sorted(elem.sub_key for elem in subscriptions_by_topic)
# .. we may still have an empty list om input - this will happen if all the subscriptions
# .. that we thought would exist have already been deleted ..
if not sub_keys_by_topic:
# .. in such a situation, store a message in logs ..
logger_pubsub.info('No subscribers in -> %s -> %s', counter_ctx_str, cid)
# .. now, indicate to the caller that the insertion went fine (seeing as there was nothing to insert)
# .. and that it should not repeat the call.
publish_op_ctx.is_queue_insert_ok = True
publish_op_ctx.needs_queue_messages = False
else:
try:
# .. now, go through each message and add back the keys that topics did not use
# .. but queues are going to need.
for msg in gd_msg_list: # type: dict
pub_msg_id = msg['pub_msg_id']
for name in sub_only_keys:
msg[name] = sub_only[pub_msg_id][name]
# Log what we are about to do
logger_pubsub.info('Inserting queue messages for %s sub_key%s-> %s -> %s -> %s',
publish_op_ctx.len_sub_keys_by_topic, publish_op_ctx.suffix, counter_ctx_str, cid, sub_keys_by_topic)
if self.before_queue_insert_func:
self.before_queue_insert_func(self, sub_keys_by_topic)
# This is the call that adds references to each of GD message for each of the input subscribers.
self.insert_queue_messages(cluster_id, subscriptions_by_topic, gd_msg_list, topic_id, now, cid)
# Log what we did
logger_pubsub.info('Inserted queue messages for %s sub_key%s-> %s -> %s -> %s',
publish_op_ctx.len_sub_keys_by_topic, publish_op_ctx.suffix, counter_ctx_str, cid, sub_keys_by_topic)
# No integrity error / no deadlock = all good
is_queue_insert_ok = True
except IntegrityError as e:
err_msg = 'Caught IntegrityError (_sql_publish_with_retry) -> %s -> %s -> `%s`'
logger_zato.info(err_msg, counter_ctx_str, cid, e)
logger_pubsub.info(err_msg, counter_ctx_str, cid, e)
# If we have an integrity error here it means that our transaction, the whole of it,
# was rolled back - this will happen on MySQL in case in case of deadlocks which may
# occur because delivery tasks update the table that insert_queue_messages wants to insert to.
# We need to return False for our caller to understand that the whole transaction needs
# to be repeated.
is_queue_insert_ok = False
# Update publication context based on whether queue messages were inserted or not.
publish_op_ctx.is_queue_insert_ok = is_queue_insert_ok
publish_op_ctx.needs_queue_messages = not is_queue_insert_ok
# This is returned no matter what happened earlier above.
return publish_op_ctx
# ################################################################################################################################
def _insert_topic_messages(self, msg_list:'strdictlist') -> 'None':
""" A low-level implementation for insert_topic_messages.
"""
# Delete keys that cannot be inserted in SQL
for msg in msg_list: # type: dict
for name in msg_pub_ignore:
msg.pop(name, None)
self.session.execute(MsgInsert().values(msg_list))
# ################################################################################################################################
def insert_topic_messages(self, cid:'str', msg_list:'strdictlist') -> 'any_':
""" Publishes messages to a topic, i.e. runs an INSERT that inserts rows, one for each message.
"""
try:
return sql_op_with_deadlock_retry(cid, 'insert_topic_messages', self._insert_topic_messages, msg_list)
# Catch duplicate MsgId values sent by clients
except IntegrityError as e:
err_msg = 'Caught IntegrityError (insert_topic_messages) -> %s -> `%s`'
logger_zato.info(err_msg, cid, e)
logger_pubsub.info(err_msg, cid, e)
raise
# ################################################################################################################################
def _insert_queue_messages(self, queue_msgs:'strdictlist') -> 'None':
""" A low-level call to enqueue messages.
"""
self.session.execute(EnqueuedMsgInsert().values(queue_msgs))
# ################################################################################################################################
def insert_queue_messages(
self,
cluster_id, # type: int
subscriptions_by_topic, # type: sublist
msg_list, # type: strdictlist
topic_id, # type: int
now, # type: float
cid # type: str
)-> 'any_':
""" Moves messages to each subscriber's queue, i.e. runs an INSERT that adds relevant references to the topic message.
Also, updates each message's is_in_sub_queue flag to indicate that it is no longer available for other subscribers.
"""
# All the queue messages to be inserted.
queue_msgs = []
# Note that it is possible that there will be messages in msg_list
# for which no subscriber will be found in the outer loop.
# This is possible if one or more subscriptions were removed in between
# the time when the publication was triggered and when the SQL query actually runs.
# In such a case, such message(s) will not be delivered to a sub_key that no longer exists.
for sub in subscriptions_by_topic:
for msg in msg_list:
# Enqueues the message for each subscriber
queue_msgs.append({
'creation_time': _float_str.format(now),
'pub_msg_id': msg['pub_msg_id'],
'endpoint_id': sub.endpoint_id,
'topic_id': topic_id,
'sub_key': sub.sub_key,
'cluster_id': cluster_id,
'sub_pattern_matched': msg['sub_pattern_matched'][sub.sub_key],
})
# Move the message to endpoint queues
return sql_op_with_deadlock_retry(cid, 'insert_queue_messages', self._insert_queue_messages, queue_msgs)
# ################################################################################################################################
def sql_publish_with_retry(
*,
now, # type: float
cid, # type: str
topic_id, # type: int
topic_name, # type: str
cluster_id, # type: int
pub_counter, # type: int
session, # type: SASession
new_session_func, # type: callable_
before_queue_insert_func, # type: callnone
gd_msg_list, # type: strdictlist
subscriptions_by_topic, # type: sublist
should_collect_ctx # type: bool
) -> 'PublishWithRetryManager':
""" Populates SQL structures with new messages for topics and their counterparts in subscriber queues.
In case of a deadlock will retry the whole transaction, per MySQL's requirements, which rolls back
the whole of it rather than a deadlocking statement only.
"""
# Build the manager object responsible for the publication ..
publish_with_retry_manager = PublishWithRetryManager(
now,
cid,
topic_id,
topic_name,
cluster_id,
pub_counter,
session,
new_session_func,
before_queue_insert_func,
gd_msg_list,
subscriptions_by_topic,
should_collect_ctx
)
# .. publish the message(s) ..
publish_with_retry_manager.run()
# .. and return the manager to our caller.
return publish_with_retry_manager
# ################################################################################################################################
# ################################################################################################################################
| 20,967
|
Python
|
.py
| 362
| 47.10221
| 130
| 0.517356
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,652
|
queue.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/query/pubsub/queue.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# SQLAlchemy
from sqlalchemy import func, update
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import PubSubEndpointEnqueuedMessage, PubSubMessage, PubSubSubscription
from zato.common.odb.query import count, _pubsub_queue_message
from zato.common.util.time_ import utcnow_as_ms
# ################################################################################################################################
# ################################################################################################################################
if 0:
from sqlalchemy.orm.session import Session as SASession
from zato.common.typing_ import anylist, intlist, strlistempty
# ################################################################################################################################
# ################################################################################################################################
PubSubEnqMsg = PubSubEndpointEnqueuedMessage
# ################################################################################################################################
# ################################################################################################################################
_delivered = PUBSUB.DELIVERY_STATUS.DELIVERED
_initialized = PUBSUB.DELIVERY_STATUS.INITIALIZED
_to_delete = PUBSUB.DELIVERY_STATUS.TO_DELETE
_waiting = PUBSUB.DELIVERY_STATUS.WAITING_FOR_CONFIRMATION
# ################################################################################################################################
# ################################################################################################################################
def get_messages(
session, # type: SASession
cluster_id, # type: int
sub_key, # type: str
batch_size, # type: int
now # type: float
) -> 'anylist':
""" Returns up to batch_size messages for input sub_key and mark them as being delivered.
"""
# First, get all messages but note it is SELECT FOR UPDATE
messages = _pubsub_queue_message(session, cluster_id).\
filter(PubSubSubscription.sub_key==sub_key).\
filter(PubSubEnqMsg.delivery_status==_initialized).\
filter(PubSubMessage.expiration_time>=now).\
with_for_update().\
order_by(PubSubMessage.ext_pub_time.desc()).\
limit(batch_size).\
all()
# Now, within the same transaction, update their delivery status to indicate they are being delivered
msg_id_list = [elem.msg_id for elem in messages]
if msg_id_list:
session.execute(
update(PubSubEnqMsg).\
values({
'delivery_status': _waiting,
'delivery_time': now,
'delivery_count': PubSubEnqMsg.__table__.c.delivery_count + 1,
}).\
where(PubSubEnqMsg.cluster_id).\
where(PubSubEnqMsg.pub_msg_id.in_(msg_id_list))
)
# Return all messages fetched - our caller will commit the transaction thus releasing the FOR UPDATE lock
return messages
# ################################################################################################################################
def _set_delivery_status(
session, # type: SASession
cluster_id, # type: int
sub_key, # type: str
msg_id_list, # type: intlist
now, # type: float
status # type: int
) -> 'None':
session.execute(
update(PubSubEnqMsg).\
values({
'delivery_status': status,
'delivery_time': now,
}).\
where(PubSubEnqMsg.cluster_id).\
where(PubSubEnqMsg.sub_key==sub_key).\
where(PubSubEnqMsg.pub_msg_id.in_(msg_id_list))
)
# ################################################################################################################################
def set_to_delete(
session, # type: SASession
cluster_id, # type: int
sub_key, # type: str
msg_id_list, # type: strlistempty
now, # type: float
status=_to_delete # type: int
) -> 'None':
""" Marks all input messages as to be deleted.
"""
_set_delivery_status(session, cluster_id, sub_key, msg_id_list, now, status)
# ################################################################################################################################
def acknowledge_delivery(
session, # type: SASession
cluster_id, # type: int
sub_key, # type: str
msg_id_list, # type: intlist
now, # type: float
status=_delivered # type: int
) -> 'None':
""" Confirms delivery of all messages from msg_id_list.
"""
_set_delivery_status(session, cluster_id, sub_key, msg_id_list, now, status)
# ################################################################################################################################
def get_queue_depth_by_sub_key(
session, # type: SASession
cluster_id, # type: int
sub_key, # type: str
now # type: float
) -> 'int':
""" Returns queue depth for a given sub_key - does not include messages expired, in staging, or already delivered.
"""
current_q = session.query(PubSubEnqMsg.id).\
filter(PubSubSubscription.sub_key==PubSubEnqMsg.sub_key).\
filter(PubSubEnqMsg.is_in_staging != True).\
filter(PubSubEnqMsg.pub_msg_id==PubSubMessage.pub_msg_id).\
filter(PubSubMessage.expiration_time>=now).\
filter(PubSubSubscription.sub_key==sub_key).\
filter(PubSubEnqMsg.cluster_id==cluster_id) # noqa: E712
return count(session, current_q)
# ################################################################################################################################
def get_queue_depth_by_topic_id_list(
session, # type: SASession
cluster_id, # type: int
topic_id_list # type: intlist
) -> 'anylist':
""" Returns queue depth for a given sub_key - does not include messages expired, in staging, or already delivered.
"""
return session.query(PubSubEnqMsg.topic_id, func.count(PubSubEnqMsg.topic_id)).\
filter(PubSubEnqMsg.topic_id.in_(topic_id_list)).\
filter(PubSubEnqMsg.cluster_id==cluster_id).\
filter(PubSubEnqMsg.delivery_status==_initialized).\
filter(PubSubEnqMsg.pub_msg_id==PubSubMessage.pub_msg_id).\
filter(PubSubMessage.expiration_time>=utcnow_as_ms()).\
group_by(PubSubMessage.topic_id).\
all()
# ################################################################################################################################
| 6,827
|
Python
|
.py
| 138
| 44.072464
| 130
| 0.479285
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,653
|
subscribe.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/query/pubsub/subscribe.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# SQLAlchemy
from sqlalchemy import and_, exists, insert, update
from sqlalchemy.sql import expression as expr
# Zato
from zato.common.odb.model import PubSubEndpointEnqueuedMessage, PubSubMessage, PubSubSubscription, WebSocketSubscription
from zato.common.typing_ import cast_
from zato.common.util.time_ import utcnow_as_ms
# ################################################################################################################################
# ################################################################################################################################
if 0:
from sqlalchemy import Column
from sqlalchemy.orm.session import Session as SASession
from zato.common.typing_ import any_, boolnone, intnone, strnone
Column = Column
# ################################################################################################################################
# ################################################################################################################################
MsgTable = PubSubMessage.__table__
# ################################################################################################################################
# ################################################################################################################################
def has_subscription(
session, # type: SASession
cluster_id, # type: int
topic_id, # type: int
endpoint_id # type: intnone
) -> 'bool':
""" Returns a boolean flag indicating whether input endpoint has subscription to a given topic.
"""
return session.query(exists().where(and_(
PubSubSubscription.endpoint_id==endpoint_id,
PubSubSubscription.topic_id==topic_id,
PubSubSubscription.cluster_id==cluster_id,
))).\
scalar()
# ################################################################################################################################
def add_wsx_subscription(
session, # type: SASession
cluster_id, # type: int
is_internal, # type: boolnone
sub_key, # type: str
ext_client_id, # type: str
ws_channel_id, # type: intnone
sub_id # type: int
) -> 'WebSocketSubscription':
""" Adds an object representing a subscription of a WebSockets client.
"""
wsx_sub = WebSocketSubscription()
wsx_sub.is_internal = is_internal or False
wsx_sub.sub_key = sub_key
wsx_sub.ext_client_id = ext_client_id
wsx_sub.channel_id = ws_channel_id
wsx_sub.cluster_id = cluster_id
wsx_sub.subscription_id = sub_id
session.add(wsx_sub)
return wsx_sub
# ################################################################################################################################
def add_subscription(
session, # type: SASession
cluster_id, # type: int
sub_key, # type: str
ctx # type: any_
) -> 'PubSubSubscription':
""" Adds an object representing a subscription regardless of the underlying protocol.
"""
# Common
ps_sub = PubSubSubscription()
ps_sub.cluster_id = ctx.cluster_id
ps_sub.server_id = ctx.server_id
ps_sub.topic_id = ctx.topic.id
ps_sub.is_internal = ctx.is_internal
ps_sub.is_staging_enabled = ctx.is_staging_enabled
ps_sub.creation_time = ctx.creation_time
ps_sub.sub_key = sub_key
ps_sub.sub_pattern_matched = ctx.sub_pattern_matched
ps_sub.has_gd = ctx.has_gd
ps_sub.active_status = ctx.active_status
ps_sub.endpoint_type = ctx.endpoint_type
ps_sub.endpoint_id = ctx.endpoint_id
ps_sub.delivery_method = ctx.delivery_method
ps_sub.delivery_data_format = ctx.delivery_data_format
ps_sub.delivery_batch_size = ctx.delivery_batch_size
ps_sub.wrap_one_msg_in_list = ctx.wrap_one_msg_in_list if ctx.wrap_one_msg_in_list is not None else True
ps_sub.delivery_max_retry = ctx.delivery_max_retry
ps_sub.delivery_err_should_block = ctx.delivery_err_should_block if ctx.delivery_err_should_block is not None else True
ps_sub.wait_sock_err = ctx.wait_sock_err
ps_sub.wait_non_sock_err = ctx.wait_non_sock_err
ps_sub.ext_client_id = ctx.ext_client_id
# AMQP
ps_sub.amqp_exchange = ctx.amqp_exchange
ps_sub.amqp_routing_key = ctx.amqp_routing_key
ps_sub.out_amqp_id = ctx.out_amqp_id
# Local files
ps_sub.files_directory_list = ctx.files_directory_list
# FTP
ps_sub.ftp_directory_list = ctx.ftp_directory_list
# REST/SOAP
ps_sub.security_id = ctx.security_id
ps_sub.out_http_soap_id = ctx.out_http_soap_id
ps_sub.out_http_method = ctx.out_http_method
# Services
ps_sub.service_id = ctx.service_id
# SMS - Twilio
ps_sub.sms_twilio_from = ctx.sms_twilio_from
ps_sub.sms_twilio_to_list = ctx.sms_twilio_to_list
ps_sub.smtp_is_html = ctx.smtp_is_html
ps_sub.smtp_subject = ctx.smtp_subject
ps_sub.smtp_from = ctx.smtp_from
ps_sub.smtp_to_list = ctx.smtp_to_list
ps_sub.smtp_body = ctx.smtp_body
# WebSockets
ps_sub.ws_channel_id = ctx.ws_channel_id
session.add(ps_sub)
return ps_sub
# ################################################################################################################################
def move_messages_to_sub_queue(
session, # type: SASession
cluster_id, # type: int
topic_id, # type: int
endpoint_id, # type: intnone
sub_pattern_matched, # type: strnone
sub_key, # type: str
pub_time_max # type: float
) -> 'None':
""" Move all unexpired messages from topic to a given subscriber's queue. This method must be called with a global lock
held for topic because it carries out its job through a couple of non-atomic queries.
"""
enqueued_id_subquery = session.query(
PubSubEndpointEnqueuedMessage.pub_msg_id
).\
filter(PubSubEndpointEnqueuedMessage.sub_key==sub_key)
now = utcnow_as_ms()
# SELECT statement used by the INSERT below finds all messages for that topic
# that haven't expired yet.
select_messages = session.query(
PubSubMessage.pub_msg_id,
PubSubMessage.topic_id,
expr.bindparam('creation_time', now),
expr.bindparam('endpoint_id', endpoint_id),
expr.bindparam('sub_pattern_matched', sub_pattern_matched),
expr.bindparam('sub_key', sub_key),
expr.bindparam('is_in_staging', False),
expr.bindparam('cluster_id', cluster_id),
).\
filter(PubSubMessage.topic_id==topic_id).\
filter(PubSubMessage.cluster_id==cluster_id).\
filter(~PubSubMessage.is_in_sub_queue).\
filter(cast_('Column', PubSubMessage.pub_msg_id).notin_(enqueued_id_subquery)).\
filter(PubSubMessage.expiration_time > pub_time_max) # type: ignore
# All message IDs that are available in topic for that subscriber, if there are any at all.
# In theory, it is not required to pull all the messages to build the list in Python, but this is a relatively
# efficient operation because there won't be that many data returned yet it allows us to make sure
# the INSERT and UPDATE below are issued only if truly needed.
msg_ids = [elem.pub_msg_id for elem in select_messages.all()]
if msg_ids:
# INSERT references to topic's messages in the subscriber's queue.
insert_messages = insert(PubSubEndpointEnqueuedMessage).\
from_select((
PubSubEndpointEnqueuedMessage.pub_msg_id,
PubSubEndpointEnqueuedMessage.topic_id,
expr.column('creation_time'),
expr.column('endpoint_id'),
expr.column('sub_pattern_matched'),
expr.column('sub_key'),
expr.column('is_in_staging'),
expr.column('cluster_id'),
), select_messages) # type: ignore
# Move messages to subscriber's queue
session.execute(insert_messages)
# Indicate that all the messages are being delivered to the subscriber which means that no other
# subscriber will ever receive them. Note that we are changing the status only for the messages pertaining
# to the current subscriber without ever touching messages reiceved by any other one.
session.execute(
update(MsgTable).\
values({
'is_in_sub_queue': True,
}).\
where(and_(
MsgTable.c.pub_msg_id.in_(msg_ids),
~MsgTable.c.is_in_sub_queue
))
)
# ################################################################################################################################
| 8,854
|
Python
|
.py
| 186
| 41.397849
| 130
| 0.572057
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,654
|
topic.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/query/pubsub/topic.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# SQLAlchemy
from sqlalchemy import and_, func, select
from sqlalchemy.sql.expression import false as sa_false
# Zato
from zato.common.odb.model import PubSubMessage, PubSubTopic, PubSubSubscription
from zato.common.odb.query import count
from zato.common.typing_ import cast_
# ################################################################################################################################
# ################################################################################################################################
if 0:
from sqlalchemy import Column
from sqlalchemy.orm.session import Session as SASession
from zato.common.typing_ import anylist, intlist, strlist
Column = Column
# ################################################################################################################################
# ################################################################################################################################
MsgTable = PubSubMessage.__table__
SubTable = PubSubSubscription.__table__
TopicTable = PubSubTopic.__table__
# ################################################################################################################################
# ################################################################################################################################
def get_topics_basic_data(session:'SASession') -> 'anylist':
""" Return basic information about a topic, its ID, name and opaque data. The result is sorted by name.
"""
return session.query(
PubSubTopic.id,
PubSubTopic.name,
PubSubTopic.opaque1,
).\
order_by(PubSubTopic.name).\
all()
# ################################################################################################################################
def get_topics_by_sub_keys(session:'SASession', cluster_id:'int', sub_keys:'strlist') -> 'anylist':
""" Returns (topic_id, sub_key) for each input sub_key.
"""
return session.query(
PubSubTopic.id,
PubSubSubscription.sub_key).\
filter(PubSubSubscription.topic_id==PubSubTopic.id).\
filter(cast_('Column', PubSubSubscription.sub_key).in_(sub_keys)).\
all()
# ################################################################################################################################
def get_gd_depth_topic(session:'SASession', cluster_id:'int', topic_id:'int') -> 'int':
""" Returns current depth of input topic by its ID.
"""
q = session.query(MsgTable.c.id).\
filter(MsgTable.c.topic_id==topic_id).\
filter(MsgTable.c.cluster_id==cluster_id).\
filter(~MsgTable.c.is_in_sub_queue)
return count(session, q)
# ################################################################################################################################
def get_gd_depth_topic_list(session:'SASession', cluster_id:'int', topic_id_list:'intlist') -> 'anylist':
""" Returns topics matching the input list as long as they have any messages undelivered to their queues.
"""
q = select([
MsgTable.c.topic_id,
func.count(MsgTable.c.topic_id).label('depth')
]).\
where(and_(
MsgTable.c.cluster_id == cluster_id,
MsgTable.c.is_in_sub_queue == sa_false(),
MsgTable.c.topic_id.in_(topic_id_list),
)).\
group_by('topic_id')
return session.execute(q).fetchall()
# ################################################################################################################################
def get_topic_sub_count_list(session:'SASession', cluster_id:'int', topic_id_list:'intlist') -> 'anylist':
""" Returns the number of subscriptions for each topic from the input list.
"""
q = select([
SubTable.c.topic_id,
func.count(SubTable.c.topic_id).label('sub_count')
]).\
where(and_(
SubTable.c.cluster_id == cluster_id,
SubTable.c.topic_id.in_(topic_id_list),
)).\
group_by('topic_id')
return session.execute(q).fetchall()
# ################################################################################################################################
| 4,416
|
Python
|
.py
| 85
| 46.482353
| 130
| 0.441375
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,655
|
endpoint.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/query/pubsub/endpoint.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# SQLAlchemy
from sqlalchemy import func
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import Cluster, PubSubEndpoint, PubSubSubscription
from zato.common.odb.query import query_wrapper
from zato.common.typing_ import cast_
# ################################################################################################################################
# ################################################################################################################################
if 0:
from sqlalchemy import Column
from sqlalchemy.orm.query import Query
from sqlalchemy.orm.session import Session as SASession
from zato.common.typing_ import any_, intnone
Column = Column
# ################################################################################################################################
# ################################################################################################################################
_subscriber_role = (PUBSUB.ROLE.PUBLISHER_SUBSCRIBER.id, PUBSUB.ROLE.SUBSCRIBER.id)
# ################################################################################################################################
# ################################################################################################################################
def _pubsub_endpoint_summary(
session, # type: SASession
cluster_id, # type: int
topic_id # type: intnone
) -> 'Query':
q = session.query(
PubSubEndpoint.id,
PubSubEndpoint.is_active,
PubSubEndpoint.is_internal,
PubSubEndpoint.role,
cast_('Column', PubSubEndpoint.name).label('endpoint_name'),
PubSubEndpoint.endpoint_type,
PubSubEndpoint.last_seen,
PubSubEndpoint.last_deliv_time,
func.count(PubSubSubscription.id).label('subscription_count'),
).\
group_by(PubSubEndpoint.id).\
outerjoin(PubSubSubscription, PubSubEndpoint.id==PubSubSubscription.endpoint_id).\
filter(Cluster.id==PubSubEndpoint.cluster_id).\
filter(Cluster.id==cluster_id).\
filter(cast_('Column', PubSubEndpoint.role).in_(_subscriber_role))
if topic_id:
q = q.\
filter(PubSubSubscription.topic_id==topic_id)
return q
# ################################################################################################################################
@query_wrapper
def pubsub_endpoint_summary_list(
session, # type: SASession
cluster_id, # type: int
topic_id=None, # type: intnone
needs_columns=False # type: bool
) -> 'Query':
return _pubsub_endpoint_summary(session, cluster_id, topic_id).\
order_by(PubSubEndpoint.id)
# ################################################################################################################################
def pubsub_endpoint_summary(
session, # type: SASession
cluster_id, # type: int
endpoint_id, # type: int
topic_id=None, # type: intnone
) -> 'any_':
return _pubsub_endpoint_summary(session, cluster_id, topic_id).\
filter(PubSubEndpoint.id==endpoint_id).\
one()
# ################################################################################################################################
| 3,464
|
Python
|
.py
| 71
| 44.056338
| 130
| 0.45231
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,656
|
subscription.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/query/pubsub/subscription.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# SQLAlchemy
from sqlalchemy import func
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import Cluster, HTTPSOAP, PubSubTopic, PubSubEndpoint, PubSubSubscription, Server
from zato.common.odb.query import query_wrapper
# ################################################################################################################################
# ################################################################################################################################
if 0:
from sqlalchemy.sql.selectable import Select
from sqlalchemy.orm.query import Query
from sqlalchemy.orm.session import Session as SASession
from zato.common.typing_ import any_, intlist
# ################################################################################################################################
# ################################################################################################################################
_subscriber_role = (PUBSUB.ROLE.PUBLISHER_SUBSCRIBER.id, PUBSUB.ROLE.SUBSCRIBER.id)
# ################################################################################################################################
# ################################################################################################################################
def _pubsub_subscription(
session, # type: SASession
cluster_id # type: int
) -> 'Select':
return session.query(
PubSubSubscription.id,
PubSubSubscription.id.label('sub_id'),
PubSubSubscription.id.label('name'), # A unique 'name' attribute is needed by ConfigDict
PubSubSubscription.active_status,
PubSubSubscription.server_id,
PubSubSubscription.is_internal,
PubSubSubscription.is_staging_enabled,
PubSubSubscription.creation_time,
PubSubSubscription.last_interaction_time,
PubSubSubscription.last_interaction_type,
PubSubSubscription.last_interaction_details,
PubSubSubscription.sub_key,
PubSubSubscription.is_durable,
PubSubSubscription.has_gd,
PubSubSubscription.topic_id,
PubSubSubscription.endpoint_id,
PubSubSubscription.delivery_method,
PubSubSubscription.delivery_data_format,
PubSubSubscription.delivery_batch_size,
PubSubSubscription.wrap_one_msg_in_list,
PubSubSubscription.delivery_max_retry,
PubSubSubscription.ext_client_id,
PubSubSubscription.delivery_err_should_block,
PubSubSubscription.wait_sock_err,
PubSubSubscription.wait_non_sock_err,
PubSubSubscription.sub_pattern_matched,
PubSubSubscription.out_amqp_id,
PubSubSubscription.amqp_exchange,
PubSubSubscription.amqp_routing_key,
PubSubSubscription.files_directory_list,
PubSubSubscription.ftp_directory_list,
PubSubSubscription.sms_twilio_from,
PubSubSubscription.sms_twilio_to_list,
PubSubSubscription.smtp_is_html,
PubSubSubscription.smtp_subject,
PubSubSubscription.smtp_from,
PubSubSubscription.smtp_to_list,
PubSubSubscription.smtp_body,
PubSubSubscription.out_http_soap_id,
PubSubSubscription.out_http_soap_id.label('out_rest_http_soap_id'),
PubSubSubscription.out_http_soap_id.label('out_soap_http_soap_id'),
PubSubSubscription.out_http_method,
PubSubSubscription.delivery_endpoint,
PubSubSubscription.ws_channel_id,
PubSubSubscription.cluster_id,
PubSubTopic.name.label('topic_name'),
PubSubTopic.task_delivery_interval,
PubSubEndpoint.name.label('endpoint_name'),
PubSubEndpoint.endpoint_type,
PubSubEndpoint.service_id,
Server.name.label('server_name'),
HTTPSOAP.name.label('rest_connection'),
).\
outerjoin(PubSubTopic, PubSubTopic.id==PubSubSubscription.topic_id).\
outerjoin(Server, PubSubSubscription.server_id==Server.id).\
outerjoin(HTTPSOAP, PubSubSubscription.out_http_soap_id==HTTPSOAP.id).\
filter(PubSubEndpoint.id==PubSubSubscription.endpoint_id).\
filter(Cluster.id==PubSubSubscription.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(PubSubSubscription.id.desc())
# ################################################################################################################################
def pubsub_subscription(
session, # type: SASession
cluster_id, # type: int
id # type: int
) -> 'any_':
""" A pub/sub subscription.
"""
return _pubsub_subscription(session, cluster_id).\
filter(PubSubSubscription.id==id).\
one()
# ################################################################################################################################
@query_wrapper
def pubsub_subscription_list(
session, # type: SASession
cluster_id, # type: int
needs_columns=False # type: bool
) -> 'any_':
""" All pub/sub subscriptions.
"""
return _pubsub_subscription(session, cluster_id)
# ################################################################################################################################
def pubsub_sub_key_list(session:'SASession') -> 'Query': # type: ignore
""" Returns a list of sub_keys and IDs in the database.
"""
return session.query(
PubSubSubscription.id,
PubSubSubscription.sub_key,
)
# ################################################################################################################################
@query_wrapper
def pubsub_subscription_list_by_endpoint_id(
session, # type: SASession
cluster_id, # type: int
endpoint_id, # type: int
needs_columns=False # type: bool
) -> 'any_':
""" A list of all pub/sub subscriptions for a given endpoint with a search results wrapper.
"""
return _pubsub_subscription(session, cluster_id).\
filter(PubSubSubscription.endpoint_id==endpoint_id)
# ################################################################################################################################
def pubsub_subscription_list_by_endpoint_id_no_search(
session, # type: SASession
cluster_id, # type: int
endpoint_id # type: int
) -> 'any_':
""" A list of all pub/sub subscriptions for a given endpoint without a search results wrapper.
"""
return _pubsub_subscription(session, cluster_id).\
filter(PubSubSubscription.endpoint_id==endpoint_id)
# ################################################################################################################################
def pubsub_subscription_list_by_endpoint_id_list_no_search(
session, # type: SASession
cluster_id, # type: int
endpoint_id_list # type: intlist
) -> 'any_':
""" A list of all pub/sub subscriptions for a list of endpoints without a search results wrapper.
"""
return _pubsub_subscription(session, cluster_id).\
filter(PubSubSubscription.endpoint_id.in_(endpoint_id_list))
# ################################################################################################################################
def pubsub_subscription_list_no_search(
session, # type: SASession
cluster_id, # type: int
) -> 'any_':
""" A list of all pub/sub subscriptions in existence, without a search results wrapper.
"""
return _pubsub_subscription(session, cluster_id)
# ################################################################################################################################
@query_wrapper
def pubsub_endpoint_summary_list(
session, # type: SASession
cluster_id, # type: int
needs_columns=False # type: bool
) -> 'any_':
return session.query(
PubSubEndpoint.id,
PubSubEndpoint.is_active,
PubSubEndpoint.is_internal,
PubSubEndpoint.role,
PubSubEndpoint.name.label('endpoint_name'),
PubSubEndpoint.endpoint_type,
PubSubEndpoint.last_seen,
PubSubEndpoint.last_deliv_time,
func.count(PubSubSubscription.id).label('subscription_count'),
).\
group_by(PubSubEndpoint.id).\
outerjoin(PubSubSubscription, PubSubEndpoint.id==PubSubSubscription.endpoint_id).\
filter(Cluster.id==PubSubEndpoint.cluster_id).\
filter(Cluster.id==cluster_id).\
filter(PubSubEndpoint.role.in_(_subscriber_role)).\
order_by(PubSubEndpoint.id)
# ################################################################################################################################
| 8,780
|
Python
|
.py
| 182
| 42.258242
| 130
| 0.541866
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,657
|
delivery.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/query/pubsub/delivery.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
from traceback import format_exc
# SQLAlchemy
from sqlalchemy import update
from sqlalchemy.exc import IntegrityError
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import PubSubEndpoint, PubSubMessage, PubSubEndpointEnqueuedMessage, PubSubSubscription, Server, \
WebSocketClient, WebSocketClientPubSubKeys
from zato.common.util.sql.retry import sql_op_with_deadlock_retry, sql_query_with_retry
# ################################################################################################################################
if 0:
from sqlalchemy.orm.session import Session as SASession
from zato.common.typing_ import any_, anytuple, intnone, intset, listnone, strlist
# ################################################################################################################################
logger_zato = getLogger('zato')
logger_pubsub = getLogger('zato_pubsub')
# ################################################################################################################################
_initialized = PUBSUB.DELIVERY_STATUS.INITIALIZED
_delivered = PUBSUB.DELIVERY_STATUS.DELIVERED
_float_str=PUBSUB.FLOAT_STRING_CONVERT
# ################################################################################################################################
sql_messages_columns = (
PubSubMessage.pub_msg_id,
PubSubMessage.pub_correl_id,
PubSubMessage.in_reply_to,
PubSubMessage.published_by_id,
PubSubMessage.ext_client_id,
PubSubMessage.group_id,
PubSubMessage.position_in_group,
PubSubMessage.pub_time,
PubSubMessage.ext_pub_time,
PubSubMessage.data,
PubSubMessage.mime_type,
PubSubMessage.priority,
PubSubMessage.expiration,
PubSubMessage.expiration_time,
PubSubMessage.size,
PubSubMessage.user_ctx,
PubSubMessage.zato_ctx,
PubSubMessage.opaque1,
PubSubEndpointEnqueuedMessage.id.label('endp_msg_queue_id'),
PubSubEndpointEnqueuedMessage.sub_key,
PubSubEndpointEnqueuedMessage.sub_pattern_matched,
)
sql_msg_id_columns = (
PubSubMessage.pub_msg_id,
)
# ################################################################################################################################
def _get_base_sql_msg_query(
session, # type: SASession
columns, # type: anytuple
sub_key_list, # type: strlist
pub_time_max, # type: float
cluster_id, # type: int
include_unexpired_only # type: bool
):
query = session.query(*columns).\
filter(PubSubEndpointEnqueuedMessage.pub_msg_id==PubSubMessage.pub_msg_id).\
filter(PubSubEndpointEnqueuedMessage.sub_key.in_(sub_key_list)).\
filter(PubSubEndpointEnqueuedMessage.delivery_status==_initialized)
# If this flag is True, it means that we are returning only messages that have not expired yet.
# It will be True when we use this query to deliver messages to subscribers as we want to deliver
# only these messages that have not expired yet. However, during cleanup, when we delete all messages
# that belong to a subscriber, this flag will be False because really need to delete them all,
# regardless of whether they are already expired or not.
if include_unexpired_only:
query = query.\
filter(PubSubMessage.expiration_time > _float_str.format(pub_time_max))
if cluster_id:
query = query.\
filter(PubSubMessage.cluster_id==cluster_id)
return query
# ################################################################################################################################
def _get_sql_msg_data_by_sub_key(
session, # type: SASession
cluster_id, # type: int
sub_key_list, # type: strlist
last_sql_run, # type: float
pub_time_max, # type: float
columns, # type: anytuple
include_unexpired_only, # type: bool
ignore_list=None, # type: listnone
needs_result=True # type: bool
):
""" Returns all SQL messages queued up for a given sub_key that are not being delivered
or have not been delivered already.
"""
logger_pubsub.info('Getting GD messages for `%s` last_run:%r pub_time_max:%r needs_result:%d unexp:%d', sub_key_list, last_sql_run,
pub_time_max, int(needs_result), int(include_unexpired_only))
query = _get_base_sql_msg_query(session, columns, sub_key_list, pub_time_max, cluster_id, include_unexpired_only)
# If there is the last SQL run time given, it means that we have to fetch all messages
# enqueued for that subscriber since that time ..
if last_sql_run:
query = query.\
filter(PubSubEndpointEnqueuedMessage.creation_time > _float_str.format(last_sql_run))
query = query.\
filter(PubSubEndpointEnqueuedMessage.creation_time <= _float_str.format(pub_time_max))
if ignore_list:
query = query.\
filter(PubSubEndpointEnqueuedMessage.id.notin_(ignore_list))
query = query.\
order_by(PubSubMessage.priority.desc()).\
order_by(PubSubMessage.ext_pub_time).\
order_by(PubSubMessage.pub_time)
out = query.all() if needs_result else query
return out
# ################################################################################################################################
def get_sql_messages_by_sub_key(
session, # type: SASession
cluster_id, # type: int
sub_key_list, # type: strlist
last_sql_run, # type: float
pub_time_max, # type: float
ignore_list, # type: intset
include_unexpired_only=True # type: bool
) -> 'any_':
return _get_sql_msg_data_by_sub_key(session, cluster_id, sub_key_list, last_sql_run, pub_time_max,
sql_messages_columns, include_unexpired_only, ignore_list)
# ################################################################################################################################
def get_sql_messages_by_msg_id_list(
session, # type: SASession
cluster_id, # type: int
sub_key, # type: str
pub_time_max, # type: float
msg_id_list, # type: strlist
include_unexpired_only=True # type: bool
) -> 'any_':
query = _get_base_sql_msg_query(session, sql_messages_columns, [sub_key], pub_time_max, cluster_id, include_unexpired_only)
return query.\
filter(PubSubEndpointEnqueuedMessage.pub_msg_id.in_(msg_id_list))
# ################################################################################################################################
def get_sql_msg_ids_by_sub_key(
session, # type: SASession
cluster_id, # type: intnone
sub_key, # type: str
last_sql_run, # type: float
pub_time_max, # type: float
include_unexpired_only=True, # type: bool
needs_result=False # type: bool
) -> 'any_':
return _get_sql_msg_data_by_sub_key(session, cluster_id, [sub_key], last_sql_run, pub_time_max, sql_msg_id_columns,
include_unexpired_only, needs_result=needs_result)
# ################################################################################################################################
def _confirm_pubsub_msg_delivered_query(
session, # type: SASession
cluster_id, # type: int
sub_key, # type: str
delivered_pub_msg_id_list, # type: strlist
now # type: float
) -> 'None':
""" Returns all SQL messages queued up for a given sub_key.
"""
session.execute(
update(PubSubEndpointEnqueuedMessage).\
values({
'delivery_status': _delivered,
'delivery_time': now
}).\
where(PubSubEndpointEnqueuedMessage.pub_msg_id.in_(delivered_pub_msg_id_list)).\
where(PubSubEndpointEnqueuedMessage.sub_key==sub_key)
)
# ################################################################################################################################
def _confirm_pubsub_msg_delivered(*args:'any_') -> 'bool':
try:
return sql_op_with_deadlock_retry(
None,
'_confirm_pubsub_msg_delivered_query',
_confirm_pubsub_msg_delivered_query,
*args
)
except IntegrityError:
logger_zato.info('Caught IntegrityError (_confirm_pubsub_msg_delivered) `%s` -> `%s`', args, format_exc())
return False
# ################################################################################################################################
def confirm_pubsub_msg_delivered(*args:'any_') -> 'None':
sql_query_with_retry(_confirm_pubsub_msg_delivered, '_confirm_pubsub_msg_delivered', *args)
# ################################################################################################################################
def get_delivery_server_for_sub_key(
session, # type: SASession
cluster_id, # type: int
sub_key, # type: str
is_wsx # type: bool
) -> 'any_':
""" Returns information about which server handles delivery tasks for input sub_key, the latter must exist in DB.
Assumes that sub_key belongs to a non-WSX endpoint and then checks WebSockets in case the former query founds
no matching server.
"""
# Sub key belongs to a WebSockets client ..
if is_wsx:
return session.query(
Server.id.label('server_id'),
Server.name.label('server_name'),
Server.cluster_id,
).\
filter(WebSocketClient.server_id==Server.id).\
filter(WebSocketClient.cluster_id==cluster_id).\
filter(WebSocketClient.id==WebSocketClientPubSubKeys.client_id).\
filter(WebSocketClientPubSubKeys.sub_key==sub_key).\
first()
# .. otherwise, it is a REST, SOAP or another kind of client, but for sure it's not WebSockets.
else:
return session.query(
Server.id.label('server_id'),
Server.name.label('server_name'),
Server.cluster_id,
PubSubEndpoint.endpoint_type,
).\
filter(Server.id==PubSubSubscription.server_id).\
filter(PubSubSubscription.sub_key==sub_key).\
filter(PubSubSubscription.endpoint_id==PubSubEndpoint.id).\
filter(PubSubSubscription.cluster_id==cluster_id).\
first()
# ################################################################################################################################
| 10,610
|
Python
|
.py
| 220
| 42.368182
| 135
| 0.56128
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,658
|
sso.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/model/sso.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# SQLAlchemy
from sqlalchemy import Boolean, Column, DateTime, false as sa_false, ForeignKey, Index, Integer, Sequence, String, Text, \
UniqueConstraint
from sqlalchemy.ext.declarative import declared_attr
# Zato
from zato.common.odb.model.base import Base, _JSON
# ################################################################################################################################
class _SSOGroup(Base):
__tablename__ = 'zato_sso_group'
__table_args__ = (
UniqueConstraint('name', 'source', name='zato_g_name_uq'),
UniqueConstraint('group_id', name='zato_g_gid_uq'),
{})
# Not exposed publicly, used only for SQL joins
id = Column(Integer, Sequence('zato_sso_group_id_seq'), primary_key=True)
is_active = Column(Boolean(), nullable=False) # Currently unused and always set to True
is_internal = Column(Boolean(), nullable=False, default=False)
# Publicly visible
group_id = Column(String(191), nullable=False)
name = Column(String(191), nullable=False)
source = Column(String(191), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# Groups may be optionally nested
parent_id = Column(Integer, ForeignKey('zato_sso_group.id', ondelete='CASCADE'), nullable=True)
# ################################################################################################################################
class _SSOUser(Base):
__tablename__ = 'zato_sso_user'
__table_args__ = (
UniqueConstraint('username', name='zato_u_usrn_uq'),
UniqueConstraint('user_id', name='zato_user_id_uq'),
Index('zato_u_email_idx', 'email', unique=False, mysql_length={'email':767}),
Index('zato_u_appr_stat_idx', 'approval_status', unique=False),
Index('zato_u_dspn_idx', 'display_name_upper', unique=False),
Index('zato_u_alln_idx', 'first_name_upper', 'middle_name_upper', 'last_name_upper', unique=False),
Index('zato_u_lastn_idx', 'last_name_upper', unique=False),
Index('zato_u_sigst_idx', 'sign_up_status', unique=False),
Index('zato_u_sigctok_idx', 'sign_up_confirm_token', unique=True),
{})
# Not exposed publicly, used only for SQL joins
id = Column(Integer, Sequence('zato_sso_user_id_seq'), primary_key=True)
# Publicly visible
user_id = Column(String(191), nullable=False)
is_active = Column(Boolean(), nullable=False) # Currently unused and always set to True
is_internal = Column(Boolean(), nullable=False, default=False)
is_super_user = Column(Boolean(), nullable=False, default=False)
is_locked = Column(Boolean(), nullable=False, default=False)
locked_time = Column(DateTime(), nullable=True)
# Creation metadata, e.g. what this user's remote IP was
creation_ctx = Column(Text(), nullable=False)
# Note that this is not an FK - this is on purpose to keep this information around
# even if parent row is deleted.
locked_by = Column(String(191), nullable=True)
approval_status = Column(String(191), nullable=False)
approval_status_mod_time = Column(DateTime(), nullable=False) # When user was approved or rejected
approval_status_mod_by = Column(String(191), nullable=False) # Same comment as in locked_by
# Basic information, always required
username = Column(String(191), nullable=False)
password = Column(Text(), nullable=False)
password_is_set = Column(Boolean(), nullable=False)
password_must_change = Column(Boolean(), nullable=False)
password_last_set = Column(DateTime(), nullable=False)
password_expiry = Column(DateTime(), nullable=False)
# Sign-up information, possibly used in API workflows
sign_up_status = Column(String(191), nullable=False)
sign_up_time = Column(DateTime(), nullable=False)
sign_up_confirm_time = Column(DateTime(), nullable=True)
sign_up_confirm_token = Column(String(191), nullable=False)
# Won't be always needed
email = Column(Text(), nullable=True)
# Various cultures don't have a notion of first or last name and display_name is the one that can be used in that case.
display_name = Column(String(191), nullable=True)
first_name = Column(String(191), nullable=True)
middle_name = Column(String(191), nullable=True)
last_name = Column(String(191), nullable=True)
# Same as above but upper-cased for look-up / indexing purposes
display_name_upper = Column(String(191), nullable=True)
first_name_upper = Column(String(191), nullable=True)
middle_name_upper = Column(String(191), nullable=True)
last_name_upper = Column(String(191), nullable=True)
# Rate limiting
is_rate_limit_active = Column(Boolean(), nullable=True)
rate_limit_type = Column(String(40), nullable=True)
rate_limit_def = Column(Text(), nullable=True)
rate_limit_check_parent_def = Column(Boolean(), nullable=True)
# TOTP
is_totp_enabled = Column(Boolean(), nullable=False, server_default=sa_false())
totp_key = Column(Text(), nullable=True)
totp_label = Column(Text(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# ################################################################################################################################
class _SSOUserGroup(Base):
""" An N:N mapping of users to their groups.
"""
__tablename__ = 'zato_sso_user_group'
__table_args__ = (
UniqueConstraint('user_id', 'group_id', name='zato_ug_id_uq'),
{})
# Not exposed publicly, used only to have a natural FK
id = Column(Integer, Sequence('zato_sso_ug_seq'), primary_key=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
user_id = Column(Integer, ForeignKey('zato_sso_user.id', ondelete='CASCADE'), nullable=False)
group_id = Column(Integer, ForeignKey('zato_sso_group.id', ondelete='CASCADE'), nullable=False)
# ################################################################################################################################
class _SSOSession(Base):
__tablename__ = 'zato_sso_session'
__table_args__ = (
Index('zato_sso_sust_idx', 'ust', unique=True),
Index('zato_sso_extsi_idx', 'ext_session_id', unique=False, mysql_length={'ext_session_id':767}),
{})
# Not exposed publicly, used only for SQL joins
id = Column(Integer, Sequence('zato_sso_sid_seq'), primary_key=True)
# Publicly visible session identifier (user session token)
ust = Column(String(191), nullable=False)
creation_time = Column(DateTime(), nullable=False)
expiration_time = Column(DateTime(), nullable=False)
remote_addr = Column(Text(), nullable=False)
user_agent = Column(Text(), nullable=False)
auth_type = Column(Text(), nullable=False)
auth_principal = Column(Text(), nullable=False)
# ID of a session external to SSO that is linked to this one,
# where external may still mean JWT or Basic Auth,
# but it is not a built-in SSO one.
ext_session_id = Column(Text(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
@declared_attr
def user_id(cls):
return Column(Integer, ForeignKey('zato_sso_user.id', ondelete='CASCADE'), nullable=False)
# ################################################################################################################################
class _SSOAttr(Base):
__tablename__ = 'zato_sso_attr'
__table_args__ = (
UniqueConstraint('name', 'is_session_attr', 'user_id', '_ust_string', name='zato_attr_name_uq'),
Index('zato_attr_usr', 'user_id', unique=False),
Index('zato_attr_usr_ust', 'user_id', 'ust', unique=False),
Index('zato_attr_usr_name', 'user_id', 'name', unique=False),
Index('zato_attr_usr_ust_name', 'user_id', 'ust', 'name', unique=True),
{})
# Not exposed publicly, used only because SQLAlchemy requires an FK
id = Column(Integer, Sequence('zato_sso_attr_seq'), primary_key=True)
creation_time = Column(DateTime(), nullable=False)
last_modified = Column(DateTime(), nullable=True)
expiration_time = Column(DateTime(), nullable=True)
is_session_attr = Column(Boolean(), nullable=False)
is_encrypted = Column(Boolean(), nullable=False, default=False)
serial_method = Column(String(20), nullable=False, default='json')
name = Column(String(191), nullable=False)
value = Column(Text(), nullable=True)
# Unlike ust, this cannot be NULL so it may be used for practical purposes in the unique constraint 'zato_attr_name_uq',
# otherwise all NULL values are considered different (or at least uncomparable) and API-wise, it is not possible
# to construct a sensible unique constraint.
_ust_string = Column(String(191), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
user_id = Column(String(191), ForeignKey('zato_sso_user.user_id', ondelete='CASCADE'), nullable=False)
ust = Column(String(191), ForeignKey('zato_sso_session.ust', ondelete='CASCADE'), nullable=True)
# ################################################################################################################################
class _SSOLinkedAuth(Base):
__tablename__ = 'zato_sso_linked_auth'
__table_args__ = (
Index('auth_idx', 'auth_type', 'user_id', 'auth_id', 'auth_principal', unique=True,
mysql_length={'auth_type':191, 'user_id':191, 'auth_principal':191}),
{})
# Not exposed publicly, used only because SQLAlchemy requires an FK
id = Column(Integer, Sequence('zato_sso_linked_auth_seq'), primary_key=True)
is_active = Column(Boolean(), nullable=False) # Currently unused and always set to True
is_internal = Column(Boolean(), nullable=False, default=False)
creation_time = Column(DateTime(), nullable=False)
last_modified = Column(DateTime(), nullable=True)
# If True, auth_principal will point to an account/user defined externally to Zato,
# e.g. in a system that Zato has no direct authentication support for.
# Otherwise, if False, auth_id will be filled in.
has_ext_principal = Column(Boolean(), nullable=False)
# A label describing authentication type
auth_type = Column(Text(191), nullable=False)
#
# Will be provided if has_ext_principal is False, in which case it will point to one of sec_base.id definitions.
#
# Note that if the SSO ODB is installed in a standalone database, this column will not be an FK
# because there will be no parent sec_base.id column to point to. The Alembic logic to add
# the FK after the table is created is implemented in cli/create_odb.py:Create.
auth_id = Column(Integer, nullable=True)
# Will be given if auth_id is not provided.
auth_principal = Column(Text(191), nullable=True)
# E.g. name of an environment this link is valid in - useful in cases when the same user
# has multiple linked accounts, different in different auth sources (environments).
auth_source = Column(Text(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# SSO user this entry links to
user_id = Column(String(191), ForeignKey('zato_sso_user.user_id', ondelete='CASCADE'), nullable=False)
# ################################################################################################################################
class _SSOPasswordReset(Base):
__tablename__ = 'zato_sso_password_reset'
__table_args__ = (
Index('zato_prt_value_type', 'token', 'type_', unique=True),
{})
# Not exposed publicly, used only because SQLAlchemy requires an FK
id = Column(Integer, Sequence('zato_sso_flow_prt_seq'), primary_key=True)
creation_time = Column(DateTime(), nullable=False)
expiration_time = Column(DateTime(), nullable=False)
# Creation metadata in JSON
creation_ctx = Column(_JSON(), nullable=False)
# The actual PRT (password reset token)
token = Column(String(191), nullable=False)
# PRT type - what kind is it of, e.g. a Zato built-in one or an external one?
type_ = Column(String(191), nullable=False)
# This key is used to reset the password after the PRT has been accessed
reset_key = Column(String(191), nullable=False)
# This is set when the PRT is accessed in order to set a time limit
# for the password reset procedure (using prt.password_change_session_duration from sso.conf)
reset_key_exp_time = Column(DateTime(), nullable=False)
# Will be set to True when the PRT has been accessed in any way,
# e.g. a user clicks on a link.
has_been_accessed = Column(Boolean(), nullable=False, default=False)
# When was the PRT accessed
access_time = Column(DateTime(), nullable=True)
# Access metadata in JSON
access_ctx = Column(_JSON(), nullable=True)
# Will be set to True when a password is reset using this PRT and reset_key
is_password_reset = Column(Boolean(), nullable=False, default=False)
# When was the password reset
password_reset_time = Column(DateTime(), nullable=True)
# Password reset metadata in JSON
password_reset_ctx = Column(_JSON(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# SSO user this entry links to
user_id = Column(String(191), ForeignKey('zato_sso_user.user_id', ondelete='CASCADE'), nullable=False)
# ################################################################################################################################
| 13,838
|
Python
|
.py
| 239
| 52.577406
| 130
| 0.641604
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,659
|
__init__.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/model/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from datetime import datetime
from ftplib import FTP_PORT
# SQLAlchemy
from sqlalchemy import BigInteger, Boolean, Column, DateTime, Enum, false as sa_false, ForeignKey, Index, Integer, \
LargeBinary, Numeric, Sequence, SmallInteger, String, Text, true as sa_true, UniqueConstraint
from sqlalchemy.orm import backref, relationship
# Zato
from zato.common.api import AMQP, CASSANDRA, CLOUD, DATA_FORMAT, HTTP_SOAP_SERIALIZATION_TYPE, MISC, NOTIF, ODOO, SAP, PUBSUB, \
SCHEDULER, STOMP, PARAMS_PRIORITY, URL_PARAMS_PRIORITY
from zato.common.json_internal import json_dumps
from zato.common.odb.const import WMQ_DEFAULT_PRIORITY
from zato.common.odb.model.base import Base, _JSON
from zato.common.odb.model.sso import _SSOAttr, _SSOPasswordReset, _SSOGroup, _SSOLinkedAuth, _SSOSession, _SSOUser
from zato.common.typing_ import cast_
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import boolnone, floatnone, intnone, strnone
boolnone = boolnone
floatnone = floatnone
intnone = intnone
strnone = strnone
# ################################################################################################################################
# ################################################################################################################################
def to_json(model, return_as_dict=False):
""" Returns a JSON representation of an SQLAlchemy-backed object.
"""
out = {}
out['fields'] = {}
out['pk'] = getattr(model, 'id', None)
for col in model._sa_class_manager.mapper.mapped_table.columns:
out['fields'][col.name] = getattr(model, col.name)
if return_as_dict:
return out
else:
return json_dumps([out])
# ################################################################################################################################
class SSOGroup(_SSOGroup):
pass
# ################################################################################################################################
class SSOUser(_SSOUser):
pass
# ################################################################################################################################
class SSOSession(_SSOSession):
pass
# ################################################################################################################################
class SSOAttr(_SSOAttr):
pass
# ################################################################################################################################
class SSOLinkedAuth(_SSOLinkedAuth):
pass
# ################################################################################################################################
class SSOPasswordReset(_SSOPasswordReset):
pass
# ################################################################################################################################
class AlembicRevision(Base):
""" A table for Alembic to store its revision IDs for SQL migrations.
Note that Alembic as of version 0.6.0 which is the latest one right now (Sun, Jun 8 2014)
doesn't declare 'version_num' to be a primary key but we need to because SQLAlchemy always needs one.
"""
__tablename__ = 'alembic_version'
version_num = Column(String(32), primary_key=True)
def __init__(self, version_num=None):
self.version_num = version_num
# ################################################################################################################################
class ZatoInstallState(Base):
""" Contains a row for each Zato installation belonging to that particular
ODB. For instance, installing Zato 1.0 will add a new row, installing 1.1
"""
__tablename__ = 'install_state'
id = Column(Integer, Sequence('install_state_seq'), primary_key=True)
version = Column(Integer, unique=True, nullable=False)
install_time = Column(DateTime(), nullable=False)
source_host = Column(String(200), nullable=False)
source_user = Column(String(200), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
def __init__(self, id=None, version=None, install_time=None, source_host=None, source_user=None):
self.id = id
self.version = version
self.install_time = install_time
self.source_host = source_host
self.source_user = source_user
# ################################################################################################################################
class Cluster(Base):
""" Represents a Zato cluster.
"""
__tablename__ = 'cluster'
id = Column(Integer, Sequence('cluster_id_seq'), primary_key=True)
name = Column(String(200), unique=True, nullable=False)
description = Column(String(1000), nullable=True)
odb_type = Column(String(30), nullable=False)
odb_host = Column(String(200), nullable=True)
odb_port = Column(Integer(), nullable=True)
odb_user = Column(String(200), nullable=True)
odb_db_name = Column(String(200), nullable=True)
odb_schema = Column(String(200), nullable=True)
broker_host = Column(String(200), nullable=False, default='broker-host-unused')
broker_port = Column(Integer(), nullable=False, default=998877)
lb_host = Column(String(200), nullable=False)
lb_port = Column(Integer(), nullable=False)
lb_agent_port = Column(Integer(), nullable=False)
cw_srv_id = Column(Integer(), nullable=True)
cw_srv_keep_alive_dt = Column(DateTime(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
def __init__(self, id=None, name=None, description=None, odb_type=None, odb_host=None, odb_port=None, odb_user=None,
odb_db_name=None, odb_schema=None, broker_host=None, broker_port=None, lb_host=None, lb_port=None,
lb_agent_port=None, cw_srv_id=None, cw_srv_keep_alive_dt=None):
self.id = id
self.name = name
self.description = description
self.odb_type = odb_type
self.odb_host = odb_host
self.odb_port = odb_port
self.odb_user = odb_user
self.odb_db_name = odb_db_name
self.odb_schema = odb_schema
self.broker_host = broker_host
self.broker_port = broker_port
self.lb_host = lb_host
self.lb_agent_port = lb_agent_port
self.lb_port = lb_port
self.cw_srv_id = cw_srv_id
self.cw_srv_keep_alive_dt = cw_srv_keep_alive_dt
def to_json(self):
return to_json(self)
# ################################################################################################################################
class Server(Base):
""" Represents a Zato server.
"""
__tablename__ = 'server'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('server_id_seq'), primary_key=True)
name = Column(String(200), nullable=False)
host = Column(String(400), nullable=True)
bind_host = Column(String(400), nullable=True)
bind_port = Column(Integer(), nullable=True)
preferred_address = Column(String(400), nullable=True)
crypto_use_tls = Column(Boolean(), nullable=True)
# If the server's request to join a cluster has been accepted, and for now
# it will always be.
last_join_status = Column(String(40), nullable=True)
last_join_mod_date = Column(DateTime(), nullable=True)
last_join_mod_by = Column(String(200), nullable=True)
# Whether the server's up or not
up_status = Column(String(40), nullable=True)
up_mod_date = Column(DateTime(), nullable=True)
token = Column(String(32), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('servers', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, cluster=None, token=None, last_join_status=None, last_join_mod_date=None,
last_join_mod_by=None):
self.id = id
self.name = name
self.cluster = cluster
self.token = token
self.last_join_status = last_join_status
self.last_join_mod_date = last_join_mod_date
self.last_join_mod_by = last_join_mod_by
self.has_lb_config = False # Not used by the database
self.in_lb = False # Not used by the database
self.lb_state = None # Not used by the database
self.lb_address = None # Not used by the database
self.may_be_deleted = None # Not used by the database
self.up_mod_date_user = None # Not used by the database
# ################################################################################################################################
class SecurityBase(Base):
""" A base class for all the security definitions.
"""
__tablename__ = 'sec_base'
__table_args__ = (UniqueConstraint('cluster_id', 'name'),
UniqueConstraint('cluster_id', 'username', 'sec_type'), {})
__mapper_args__ = {'polymorphic_on': 'sec_type'}
id = Column(Integer, Sequence('sec_base_seq'), primary_key=True)
name = Column(String(200), nullable=False)
# It's nullable because some children classes do not use usernames
username = Column(String(200), nullable=True)
password = Column(String(1000), nullable=True)
password_type = Column(String(45), nullable=True)
is_active = Column(Boolean(), nullable=False)
sec_type = Column(String(45), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('security_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class MultiSecurity(Base):
""" An N:N mapping between security definitions and objects making use of them.
"""
__tablename__ = 'sec_multi'
__table_args__ = (UniqueConstraint('cluster_id', 'conn_id', 'conn_type', 'security_id', 'is_channel', 'is_outconn'), {})
id = Column(Integer, Sequence('sec_multi_seq'), primary_key=True)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False)
priority = Column(Integer(), nullable=False)
conn_id = Column(String(100), nullable=False)
conn_type = Column(String(100), nullable=False)
is_channel = Column(Boolean(), nullable=False)
is_outconn = Column(Boolean(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
security_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=False)
security = relationship(SecurityBase, backref=backref('sec_multi_list', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('sec_multi_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class HTTPBasicAuth(SecurityBase):
""" An HTTP Basic Auth definition.
"""
__tablename__ = 'sec_basic_auth'
__mapper_args__ = {'polymorphic_identity': 'basic_auth'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
realm = Column(String(200), nullable=False)
def __init__(self, id=None, name=None, is_active=None, username=None, realm=None, password=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.realm = realm
self.password = password
self.cluster = cluster
# ################################################################################################################################
class JWT(SecurityBase):
""" A set of JavaScript Web Token (JWT) credentials.
"""
__tablename__ = 'sec_jwt'
__mapper_args__ = {'polymorphic_identity': 'jwt'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
ttl = Column(Integer, nullable=False)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, ttl=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.ttl = ttl
self.cluster = cluster
# ################################################################################################################################
class WSSDefinition(SecurityBase):
""" A WS-Security definition.
"""
__tablename__ = 'sec_wss_def'
__mapper_args__ = {'polymorphic_identity':'wss'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
reject_empty_nonce_creat = Column(Boolean(), nullable=False)
reject_stale_tokens = Column(Boolean(), nullable=True)
reject_expiry_limit = Column(Integer(), nullable=False)
nonce_freshness_time = Column(Integer(), nullable=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, password_type=None,
reject_empty_nonce_creat=None, reject_stale_tokens=None, reject_expiry_limit=None, nonce_freshness_time=None,
cluster=None, password_type_raw=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.password_type = password_type
self.reject_empty_nonce_creat = reject_empty_nonce_creat
self.reject_stale_tokens = reject_stale_tokens
self.reject_expiry_limit = reject_expiry_limit
self.nonce_freshness_time = nonce_freshness_time
self.cluster = cluster
self.password_type_raw = password_type_raw
# ################################################################################################################################
class OAuth(SecurityBase):
""" Stores OAuth credentials.
"""
__tablename__ = 'sec_oauth'
__mapper_args__ = {'polymorphic_identity':'oauth'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
proto_version = Column(String(32), nullable=False)
sig_method = Column(String(32), nullable=False) # HMAC-SHA1 or PLAINTEXT
max_nonce_log = Column(Integer(), nullable=False)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, proto_version=None, sig_method=None,
max_nonce_log=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.proto_version = proto_version
self.sig_method = sig_method
self.max_nonce_log = max_nonce_log
self.cluster = cluster
def to_json(self):
return to_json(self)
# ################################################################################################################################
class NTLM(SecurityBase):
""" Stores NTLM definitions.
"""
__tablename__ = 'sec_ntlm'
__mapper_args__ = {'polymorphic_identity': 'ntlm'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.cluster = cluster
def to_json(self):
return to_json(self)
# ################################################################################################################################
class AWSSecurity(SecurityBase):
""" Stores Amazon credentials.
"""
__tablename__ = 'sec_aws'
__mapper_args__ = {'polymorphic_identity': 'aws'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.cluster = cluster
def to_json(self):
return to_json(self)
# ################################################################################################################################
class OpenStackSecurity(SecurityBase):
""" Stores OpenStack credentials.
"""
__tablename__ = 'sec_openstack'
__mapper_args__ = {'polymorphic_identity': 'openstack'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.cluster = cluster
def to_json(self):
return to_json(self)
# ################################################################################################################################
class APIKeySecurity(SecurityBase):
""" Stores API keys.
"""
__tablename__ = 'sec_apikey'
__mapper_args__ = {'polymorphic_identity': 'apikey'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.cluster = cluster
self.header = None # Not used by the DB
def to_json(self):
return to_json(self)
# ################################################################################################################################
class XPathSecurity(SecurityBase):
""" Stores XPath-based credentials.
"""
__tablename__ = 'sec_xpath'
__mapper_args__ = {'polymorphic_identity':'xpath_sec'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
username_expr = Column(String(200), nullable=False)
password_expr = Column(String(200), nullable=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, username_expr=None, password_expr=None,
cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.username_expr = username_expr
self.password_expr = password_expr
self.cluster = cluster
def to_json(self):
return to_json(self)
# ################################################################################################################################
class TLSKeyCertSecurity(SecurityBase):
""" Stores information regarding TLS key/cert pairs used in outgoing connections.
"""
__tablename__ = 'sec_tls_key_cert'
__mapper_args__ = {'polymorphic_identity':'tls_key_cert'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
info = Column(LargeBinary(200000), nullable=False)
auth_data = Column(LargeBinary(200000), nullable=False)
# ################################################################################################################################
class TLSChannelSecurity(SecurityBase):
""" Stores information regarding TLS client certificate-based security definitions.
"""
__tablename__ = 'sec_tls_channel'
__mapper_args__ = {'polymorphic_identity':'tls_channel_sec'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
value = Column(LargeBinary(200000), nullable=False)
# ################################################################################################################################
class VaultConnection(SecurityBase):
""" Stores information on how to connect to Vault and how to authenticate against it by default.
"""
__tablename__ = 'sec_vault_conn'
__mapper_args__ = {'polymorphic_identity':'vault_conn_sec'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
url = Column(String(200), nullable=False)
token = Column(String(200), nullable=True)
default_auth_method = Column(String(200), nullable=True)
timeout = Column(Integer, nullable=False)
allow_redirects = Column(Boolean(), nullable=False)
tls_verify = Column(Boolean(), nullable=False)
tls_key_cert_id = Column(Integer, ForeignKey('sec_tls_key_cert.id', ondelete='CASCADE'), nullable=True)
tls_ca_cert_id = Column(Integer, ForeignKey('sec_tls_ca_cert.id', ondelete='CASCADE'), nullable=True)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True)
service = relationship('Service', backref=backref('vault_conn_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class TLSCACert(Base):
""" Stores information regarding CA certs.
"""
__tablename__ = 'sec_tls_ca_cert'
id = Column(Integer, Sequence('sec_tls_ca_cert_seq'), primary_key=True)
name = Column(String(200), nullable=False)
value = Column(LargeBinary(200000), nullable=False)
info = Column(LargeBinary(200000), nullable=False)
is_active = Column(Boolean(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('ca_cert_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class HTTPSOAP(Base):
""" An incoming or outgoing HTTP/SOAP connection.
"""
__tablename__ = 'http_soap'
__table_args__ = (
UniqueConstraint('name', 'connection', 'transport', 'cluster_id'),
Index('path_host_conn_act_clus_idx', 'url_path', 'host', 'connection', 'soap_action', 'cluster_id', unique=False), {})
id = Column(Integer, Sequence('http_soap_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False)
connection = Column(String(20), nullable=False)
transport = Column(String(200), nullable=False)
host = Column(String(200), nullable=True)
url_path = Column(String(200), nullable=False)
method = Column(String(200), nullable=True)
content_encoding = Column(String(200), nullable=True)
soap_action = Column(String(200), nullable=False)
soap_version = Column(String(20), nullable=True)
data_format = Column(String(20), nullable=True)
content_type = Column(String(200), nullable=True)
ping_method = Column(String(60), nullable=True)
pool_size = Column(Integer, nullable=True)
serialization_type = Column(String(200), nullable=False, default=HTTP_SOAP_SERIALIZATION_TYPE.SUDS.id)
timeout = Column(Integer(), nullable=False, default=MISC.DEFAULT_HTTP_TIMEOUT)
merge_url_params_req = Column(Boolean, nullable=True, default=True)
url_params_pri = Column(String(200), nullable=True, default=URL_PARAMS_PRIORITY.DEFAULT)
params_pri = Column(String(200), nullable=True, default=PARAMS_PRIORITY.DEFAULT)
has_rbac = Column(Boolean, nullable=False, default=False)
sec_use_rbac = Column(Boolean(), nullable=False, default=False)
cache_expiry = Column(Integer, nullable=True, default=0)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
security_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=True)
security = relationship(SecurityBase, backref=backref('http_soap_list', order_by=name, cascade='all, delete, delete-orphan'))
sec_tls_ca_cert_id = Column(Integer, ForeignKey('sec_tls_ca_cert.id', ondelete='CASCADE'), nullable=True)
sec_tls_ca_cert = relationship('TLSCACert', backref=backref('http_soap', order_by=name, cascade='all, delete, delete-orphan'))
cache_id = Column(Integer, ForeignKey('cache.id', ondelete='CASCADE'), nullable=True)
cache = relationship('Cache', backref=backref('http_soap_list', order_by=name, cascade='all, delete, delete-orphan'))
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True)
service = relationship('Service', backref=backref('http_soap', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('http_soap_list', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, is_internal=None, connection=None, transport=None, host=None,
url_path=None, method=None, soap_action=None, soap_version=None, data_format=None, ping_method=None,
pool_size=None, merge_url_params_req=None, url_params_pri=None, params_pri=None, serialization_type=None,
timeout=None, sec_tls_ca_cert_id=None, service_id=None, service=None, security=None, cluster_id=None,
cluster=None, service_name=None, security_id=None, has_rbac=None, security_name=None, content_type=None,
cache_id=None, cache_type=None, cache_expiry=None, cache_name=None, content_encoding=None, match_slash=None,
http_accept=None, opaque=None, **kwargs):
super(HTTPSOAP, self).__init__(**kwargs)
self.id = id
self.name = name
self.is_active = is_active
self.is_internal = is_internal
self.connection = connection
self.transport = transport
self.host = host
self.url_path = url_path
self.method = method
self.soap_action = soap_action
self.soap_version = soap_version
self.data_format = data_format
self.ping_method = ping_method
self.pool_size = pool_size
self.merge_url_params_req = merge_url_params_req
self.url_params_pri = url_params_pri
self.params_pri = params_pri
self.serialization_type = serialization_type
self.timeout = timeout
self.sec_tls_ca_cert_id = sec_tls_ca_cert_id
self.service_id = service_id
self.service = service
self.security = security
self.cluster_id = cluster_id
self.cluster = cluster
self.service_name = service_name # Not used by the DB
self.security_id = security_id
self.has_rbac = has_rbac
self.security_name = security_name
self.content_type = content_type
self.cache_id = cache_id
self.cache_type = cache_type
self.cache_expiry = cache_expiry
self.cache_name = cache_name # Not used by the DB
self.content_encoding = content_encoding
self.match_slash = match_slash # Not used by the DB
self.http_accept = http_accept # Not used by the DB
self.opaque1 = opaque
self.is_rate_limit_active = None
self.rate_limit_type = None
self.rate_limit_def = None
self.rate_limit_check_parent_def = None
self.is_wrapper = None
self.wrapper_type = None
self.password = None
self.security_groups_count = None
self.security_groups_member_count = None
# ################################################################################################################################
class SQLConnectionPool(Base):
""" An SQL connection pool.
"""
__tablename__ = 'sql_pool'
__table_args__ = (UniqueConstraint('cluster_id', 'name'), {})
id = Column(Integer, Sequence('sql_pool_id_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
username = Column(String(200), nullable=False)
password = Column(String(200), nullable=False)
db_name = Column(String(200), nullable=False)
engine = Column(String(200), nullable=False)
extra = Column(LargeBinary(20000), nullable=True)
host = Column(String(200), nullable=False)
port = Column(Integer(), nullable=False)
pool_size = Column(Integer(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('sql_pools', order_by=name, cascade='all, delete, delete-orphan'))
engine_display_name = None # For auto-completion, not used by DB
def __init__(self, id=None, name=None, is_active=None, db_name=None, username=None, engine=None, extra=None, host=None,
port=None, pool_size=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.db_name = db_name
self.username = username
self.engine = engine
self.extra = extra
self.host = host
self.port = port
self.pool_size = pool_size
self.cluster = cluster
# ################################################################################################################################
class Service(Base):
""" A set of basic informations about a service available in a given cluster.
"""
__tablename__ = 'service'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('service_id_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
impl_name = Column(String(2000), nullable=False)
is_internal = Column(Boolean(), nullable=False)
wsdl = Column(LargeBinary(5000000), nullable=True)
wsdl_name = Column(String(200), nullable=True)
slow_threshold = Column(Integer, nullable=False, default=99999)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('services', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, impl_name=None, is_internal=None, cluster=None, wsdl=None,
wsdl_name=None):
self.id = id
self.name = name
self.is_active = is_active
self.impl_name = impl_name
self.is_internal = is_internal
self.cluster = cluster
self.wsdl = wsdl
self.wsdl_name = wsdl_name
self.plain_http_channels = [] # Not used by the database
self.soap_channels = [] # Not used by the database
self.amqp_channels = [] # Not used by the database
self.wmq_channels = [] # Not used by the database
self.zmq_channels = [] # Not used by the database
self.scheduler_jobs = [] # Not used by the database
self.deployment_info = [] # Not used by the database
self.source_info = None # Not used by the database
self.may_be_deleted = False # Not used by the database
self.sample_cid = None # Not used by the database
self.sample_req_timestamp = None # Not used by the database
self.sample_resp_timestamp = None # Not used by the database
self.sample_req = None # Not used by the database
self.sample_resp = None # Not used by the database
self.sample_req_resp_freq = None # Not used by the database
self.sample_req_html = None # Not used by the database
self.sample_resp_html = None # Not used by the database
self.usage = None # Not used by the database
self.time_last = None # Not used by the database
self.time_min_all_time = None # Not used by the database
self.time_max_all_time = None # Not used by the database
self.time_mean_all_time = None # Not used by the database
self.time_usage_1h = None # Not used by the database
self.time_min_1h = None # Not used by the database
self.time_max_1h = None # Not used by the database
self.time_trend_mean_1h = None # Not used by the database
self.time_trend_rate_1h = None # Not used by the database
self.docs_summary = None # Not used by the database
self.docs_description = None # Not used by the database
self.invokes = None # Not used by the database
self.invoked_by = None # Not used by the database
self.last_timestamp = None # Not used by the database
self.last_timestamp_utc = None # Not used by the database
# ################################################################################################################################
class DeployedService(Base):
""" A service living on a given server.
"""
__tablename__ = 'deployed_service'
__table_args__ = (UniqueConstraint('server_id', 'service_id'), {})
deployment_time = Column(DateTime(), nullable=False)
details = Column(String(2000), nullable=False)
source = Column(LargeBinary(500000), nullable=True)
source_path = Column(String(2000), nullable=True)
source_hash = Column(String(512), nullable=True)
source_hash_method = Column(String(20), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
server_id = Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=False, primary_key=True)
server = relationship(Server, backref=backref('deployed_services', order_by=deployment_time, cascade='all, delete, delete-orphan'))
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False, primary_key=True)
service = relationship(Service, backref=backref('deployment_data', order_by=deployment_time, cascade='all, delete, delete-orphan'))
def __init__(self, deployment_time, details, server_id, service_id, source, source_path, source_hash, source_hash_method):
self.deployment_time = deployment_time
self.details = details
self.server_id = server_id
self.service_id = service_id
self.source = source
self.source_path = source_path
self.source_hash = source_hash
self.source_hash_method = source_hash_method
# ################################################################################################################################
class Job(Base):
""" A scheduler's job. Stores all the information needed to execute a job
if it's a one-time job, otherwise the information is kept in related tables.
"""
__tablename__ = 'job'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('job_id_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
job_type = Column(Enum(SCHEDULER.JOB_TYPE.ONE_TIME, SCHEDULER.JOB_TYPE.INTERVAL_BASED,
SCHEDULER.JOB_TYPE.CRON_STYLE, name='job_type'), nullable=False)
start_date = Column(DateTime(), nullable=False)
extra = Column(LargeBinary(500000), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('jobs', order_by=name, cascade='all, delete, delete-orphan'))
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('jobs', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, job_type=None, start_date=None, extra=None, cluster=None,
cluster_id=None, service=None, service_id=None, service_name=None, interval_based=None, cron_style=None,
definition_text=None, job_type_friendly=None):
self.id = id
self.name = name
self.is_active = is_active
self.job_type = job_type
self.start_date = start_date
self.extra = extra
self.cluster = cluster
self.cluster_id = cluster_id
self.service = service
self.service_id = service_id
self.service_name = service_name # Not used by the database
self.interval_based = interval_based
self.cron_style = cron_style
self.definition_text = definition_text # Not used by the database
self.job_type_friendly = job_type_friendly # Not used by the database
# ################################################################################################################################
class IntervalBasedJob(Base):
""" A Cron-style scheduler's job.
"""
__tablename__ = 'job_interval_based'
__table_args__ = (UniqueConstraint('job_id'), {})
id = Column(Integer, Sequence('job_intrvl_seq'), primary_key=True)
job_id = Column(Integer, nullable=False)
weeks = Column(Integer, nullable=True)
days = Column(Integer, nullable=True)
hours = Column(Integer, nullable=True)
minutes = Column(Integer, nullable=True)
seconds = Column(Integer, nullable=True)
repeats = Column(Integer, nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
job_id = Column(Integer, ForeignKey('job.id', ondelete='CASCADE'), nullable=False)
job = relationship(Job, backref=backref('interval_based', uselist=False, cascade='all, delete, delete-orphan', single_parent=True))
def __init__(self, id=None, job=None, weeks=None, days=None, hours=None, minutes=None, seconds=None, repeats=None,
definition_text=None):
self.id = id
self.job = job
self.weeks = weeks
self.days = days
self.hours = hours
self.minutes = minutes
self.seconds = seconds
self.repeats = repeats
self.definition_text = definition_text # Not used by the database
# ################################################################################################################################
class CronStyleJob(Base):
""" A Cron-style scheduler's job.
"""
__tablename__ = 'job_cron_style'
__table_args__ = (UniqueConstraint('job_id'), {})
id = Column(Integer, Sequence('job_cron_seq'), primary_key=True)
cron_definition = Column(String(4000), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
job_id = Column(Integer, ForeignKey('job.id', ondelete='CASCADE'), nullable=False)
job = relationship(
Job, backref=backref('cron_style', uselist=False, cascade='all, delete, delete-orphan', single_parent=True))
def __init__(self, id=None, job=None, cron_definition=None):
self.id = id
self.job = job
self.cron_definition = cron_definition
# ################################################################################################################################
class Cache(Base):
""" Base class for all cache definitions.
"""
__tablename__ = 'cache'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
__mapper_args__ = {'polymorphic_on': 'cache_type'}
id = Column(Integer, Sequence('cache_builtin_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_default = Column(Boolean(), nullable=False)
cache_type = Column(String(45), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('cache_list', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self):
self.current_size = 0 # Not used by the DB
# ################################################################################################################################
class CacheBuiltin(Cache):
""" Cache definitions using mechanisms built into Zato.
"""
__tablename__ = 'cache_builtin'
__mapper_args__ = {'polymorphic_identity':'builtin'}
cache_id = Column(Integer, ForeignKey('cache.id'), primary_key=True)
max_size = Column(Integer(), nullable=False)
max_item_size = Column(Integer(), nullable=False)
extend_expiry_on_get = Column(Boolean(), nullable=False)
extend_expiry_on_set = Column(Boolean(), nullable=False)
sync_method = Column(String(20), nullable=False)
persistent_storage = Column(String(40), nullable=False)
def __init__(self, cluster=None):
self.cluster = cluster
# ################################################################################################################################
class CacheMemcached(Cache):
""" Cache definitions using Memcached.
"""
__tablename__ = 'cache_memcached'
__mapper_args__ = {'polymorphic_identity':'memcached'}
cache_id = Column(Integer, ForeignKey('cache.id'), primary_key=True)
servers = Column(Text, nullable=False)
is_debug = Column(Boolean(), nullable=False)
extra = Column(LargeBinary(20000), nullable=True)
def __init__(self, cluster=None):
self.cluster = cluster
# ################################################################################################################################
class ConnDefAMQP(Base):
""" An AMQP connection definition.
"""
__tablename__ = 'conn_def_amqp'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('conn_def_amqp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
host = Column(String(200), nullable=False)
port = Column(Integer(), nullable=False)
vhost = Column(String(200), nullable=False)
username = Column(String(200), nullable=False)
password = Column(String(200), nullable=False)
frame_max = Column(Integer(), nullable=False)
heartbeat = Column(Integer(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('amqp_conn_defs', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, host=None, port=None, vhost=None, username=None, password=None, frame_max=None,
heartbeat=None, cluster_id=None, cluster=None):
self.id = id
self.name = name
self.host = host
self.port = port
self.vhost = vhost
self.username = username
self.password = password
self.frame_max = frame_max
self.heartbeat = heartbeat
self.cluster_id = cluster_id
self.cluster = cluster
# ################################################################################################################################
class ConnDefWMQ(Base):
""" A IBM MQ connection definition.
"""
__tablename__ = 'conn_def_wmq'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('conn_def_wmq_seq'), primary_key=True)
name = Column(String(200), nullable=False)
# TODO is_active = Column(Boolean(), nullable=False)
host = Column(String(200), nullable=False)
port = Column(Integer, nullable=False)
queue_manager = Column(String(200), nullable=True)
channel = Column(String(200), nullable=False)
cache_open_send_queues = Column(Boolean(), nullable=False)
cache_open_receive_queues = Column(Boolean(), nullable=False)
use_shared_connections = Column(Boolean(), nullable=False)
dynamic_queue_template = Column(String(200), nullable=False, server_default='SYSTEM.DEFAULT.MODEL.QUEUE') # We're not actually using it yet
ssl = Column(Boolean(), nullable=False)
ssl_cipher_spec = Column(String(200))
ssl_key_repository = Column(String(200))
needs_mcd = Column(Boolean(), nullable=False)
use_jms = Column(Boolean(), nullable=False)
max_chars_printed = Column(Integer, nullable=False)
username = Column(String(100), nullable=True)
password = Column(String(200), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('wmq_conn_defs', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, host=None, port=None, queue_manager=None, channel=None, cache_open_send_queues=None,
cache_open_receive_queues=None, use_shared_connections=None, ssl=None, ssl_cipher_spec=None, ssl_key_repository=None,
needs_mcd=None, max_chars_printed=None, cluster_id=None, cluster=None, username=None, password=None, use_jms=None):
self.id = id
self.name = name
self.host = host
self.queue_manager = queue_manager
self.channel = channel
self.port = port
self.cache_open_receive_queues = cache_open_receive_queues
self.cache_open_send_queues = cache_open_send_queues
self.use_shared_connections = use_shared_connections
self.ssl = ssl
self.ssl_cipher_spec = ssl_cipher_spec
self.ssl_key_repository = ssl_key_repository
self.needs_mcd = needs_mcd
self.max_chars_printed = max_chars_printed
self.cluster_id = cluster_id
self.cluster = cluster
self.username = username
self.password = password
self.use_jms = use_jms
# ################################################################################################################################
class OutgoingAMQP(Base):
""" An outgoing AMQP connection.
"""
__tablename__ = 'out_amqp'
__table_args__ = (UniqueConstraint('name', 'def_id'), {})
id = Column(Integer, Sequence('out_amqp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
delivery_mode = Column(SmallInteger(), nullable=False)
priority = Column(SmallInteger(), server_default=str(AMQP.DEFAULT.PRIORITY), nullable=False)
content_type = Column(String(200), nullable=True)
content_encoding = Column(String(200), nullable=True)
expiration = Column(Integer(), nullable=True)
user_id = Column(String(200), nullable=True)
app_id = Column(String(200), nullable=True)
pool_size = Column(SmallInteger(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
def_id = Column(Integer, ForeignKey('conn_def_amqp.id', ondelete='CASCADE'), nullable=False)
def_ = relationship(ConnDefAMQP, backref=backref('out_conns_amqp', cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, delivery_mode=None, priority=None, content_type=None,
content_encoding=None, expiration=None, user_id=None, app_id=None, def_id=None, delivery_mode_text=None,
def_name=None):
self.id = id
self.name = name
self.is_active = is_active
self.delivery_mode = delivery_mode
self.priority = priority
self.content_type = content_type
self.content_encoding = content_encoding
self.expiration = expiration
self.user_id = user_id
self.app_id = app_id
self.def_id = def_id
self.delivery_mode_text = delivery_mode_text # Not used by the DB
self.def_name = def_name # Not used by the DB
# ################################################################################################################################
class OutgoingFTP(Base):
""" An outgoing FTP connection.
"""
__tablename__ = 'out_ftp'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('out_ftp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
host = Column(String(200), nullable=False)
user = Column(String(200), nullable=True)
password = Column(String(200), nullable=True)
acct = Column(String(200), nullable=True)
timeout = Column(Integer, nullable=True)
port = Column(Integer, server_default=str(FTP_PORT), nullable=False)
dircache = Column(Boolean(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('out_conns_ftp', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, host=None, user=None, password=None, acct=None, timeout=None,
port=None, dircache=None, cluster_id=None):
self.id = id
self.name = name
self.is_active = is_active
self.host = host
self.user = user
self.password = password
self.acct = acct
self.timeout = timeout
self.port = port
self.dircache = dircache
self.cluster_id = cluster_id
# ################################################################################################################################
class OutgoingOdoo(Base):
""" An outgoing Odoo connection.
"""
__tablename__ = 'out_odoo'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('out_odoo_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
host = Column(String(200), nullable=False)
port = Column(Integer(), nullable=False, server_default=str(ODOO.DEFAULT.PORT))
user = Column(String(200), nullable=False)
database = Column(String(200), nullable=False)
protocol = Column(String(200), nullable=False)
pool_size = Column(Integer(), nullable=False, server_default=str(ODOO.DEFAULT.POOL_SIZE))
password = Column(String(400), nullable=False)
client_type = Column(String(40), nullable=False, server_default=str(ODOO.CLIENT_TYPE.OPENERP_CLIENT_LIB))
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('out_conns_odoo', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, cluster=None):
self.cluster = cluster
self.protocol_name = None # Not used by the DB
# ################################################################################################################################
class OutgoingSAP(Base):
""" An outgoing SAP RFC connection.
"""
__tablename__ = 'out_sap'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('out_sap_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
host = Column(String(200), nullable=False)
sysnr = Column(String(3), nullable=True, server_default=str(SAP.DEFAULT.INSTANCE))
user = Column(String(200), nullable=False)
client = Column(String(4), nullable=False)
sysid = Column(String(4), nullable=False)
password = Column(String(400), nullable=False)
pool_size = Column(Integer(), nullable=False, server_default=str(SAP.DEFAULT.POOL_SIZE))
router = Column(String(400), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('out_conns_sap', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, cluster=None):
self.cluster = cluster
# ################################################################################################################################
class OutgoingSTOMP(Base):
""" An outgoing STOMP connection.
"""
__tablename__ = 'out_stomp'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('out_stomp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
username = Column(String(200), nullable=True, server_default=STOMP.DEFAULT.USERNAME)
password = Column(String(200), nullable=True)
address = Column(String(200), nullable=False, server_default=STOMP.DEFAULT.ADDRESS)
proto_version = Column(String(20), nullable=False, server_default=STOMP.DEFAULT.PROTOCOL)
timeout = Column(Integer(), nullable=False, server_default=str(STOMP.DEFAULT.TIMEOUT))
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('out_conns_stomp', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, cluster=None):
self.cluster = cluster
# ################################################################################################################################
class OutgoingWMQ(Base):
""" An outgoing IBM MQ connection.
"""
__tablename__ = 'out_wmq'
__table_args__ = (UniqueConstraint('name', 'def_id'), {})
id = Column(Integer, Sequence('out_wmq_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
delivery_mode = Column(SmallInteger(), nullable=False)
priority = Column(SmallInteger(), server_default=str(WMQ_DEFAULT_PRIORITY), nullable=False)
expiration = Column(String(20), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
def_id = Column(Integer, ForeignKey('conn_def_wmq.id', ondelete='CASCADE'), nullable=False)
def_ = relationship(ConnDefWMQ, backref=backref('out_conns_wmq', cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, delivery_mode=None, priority=None, expiration=None, def_id=None,
cluster=None, delivery_mode_text=None, def_name=None):
self.id = id
self.name = name
self.is_active = is_active
self.delivery_mode = delivery_mode
self.priority = priority
self.expiration = expiration
self.def_id = def_id
self.cluster = cluster
self.delivery_mode_text = delivery_mode_text # Not used by the DB
self.def_name = def_name # Not used by DB
self.def_name_full_text = None # Not used by DB
# ################################################################################################################################
class OutgoingZMQ(Base):
""" An outgoing Zero MQ connection.
"""
__tablename__ = 'out_zmq'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('out_zmq_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
address = Column(String(200), nullable=False)
socket_type = Column(String(20), nullable=False)
socket_method = Column(String(20), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('out_conns_zmq', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, address=None, socket_type=None, cluster_id=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.socket_type = socket_type
self.address = address
self.cluster_id = cluster_id
self.cluster = cluster
# ################################################################################################################################
class ChannelAMQP(Base):
""" An incoming AMQP connection.
"""
__tablename__ = 'channel_amqp'
__table_args__ = (UniqueConstraint('name', 'def_id'), {})
id = Column(Integer, Sequence('channel_amqp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
queue = Column(String(200), nullable=False)
consumer_tag_prefix = Column(String(200), nullable=False)
pool_size = Column(Integer, nullable=False)
ack_mode = Column(String(20), nullable=False)
prefetch_count = Column(Integer, nullable=False)
data_format = Column(String(20), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('channels_amqp', order_by=name, cascade='all, delete, delete-orphan'))
def_id = Column(Integer, ForeignKey('conn_def_amqp.id', ondelete='CASCADE'), nullable=False)
def_ = relationship(ConnDefAMQP, backref=backref('channels_amqp', cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, queue=None, consumer_tag_prefix=None, def_id=None, def_name=None,
service_name=None, data_format=None):
self.id = id
self.name = name
self.is_active = is_active
self.queue = queue
self.consumer_tag_prefix = consumer_tag_prefix
self.def_id = def_id
self.def_name = def_name # Not used by the DB
self.service_name = service_name # Not used by the DB
self.data_format = data_format
# ################################################################################################################################
class ChannelSTOMP(Base):
""" An incoming STOMP connection.
"""
__tablename__ = 'channel_stomp'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('channel_stomp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
username = Column(String(200), nullable=True, server_default=STOMP.DEFAULT.USERNAME)
password = Column(String(200), nullable=True)
address = Column(String(200), nullable=False, server_default=STOMP.DEFAULT.ADDRESS)
proto_version = Column(String(20), nullable=False, server_default=STOMP.DEFAULT.PROTOCOL)
timeout = Column(Integer(), nullable=False, server_default=str(STOMP.DEFAULT.TIMEOUT))
sub_to = Column(Text, nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('channels_stomp', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('channels_stomp', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class ChannelWMQ(Base):
""" An incoming IBM MQ connection.
"""
__tablename__ = 'channel_wmq'
__table_args__ = (UniqueConstraint('name', 'def_id'), {})
id = Column(Integer, Sequence('channel_wmq_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
queue = Column(String(200), nullable=False)
data_format = Column(String(20), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('channels_wmq', order_by=name, cascade='all, delete, delete-orphan'))
def_id = Column(Integer, ForeignKey('conn_def_wmq.id', ondelete='CASCADE'), nullable=False)
def_ = relationship(ConnDefWMQ, backref=backref('channels_wmq', cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, queue=None, def_id=None, def_name=None, service_name=None,
data_format=None):
self.id = id
self.name = name
self.is_active = is_active
self.queue = queue
self.def_id = def_id
self.def_name = def_name # Not used by the DB
self.service_name = service_name # Not used by the DB
self.data_format = data_format
# ################################################################################################################################
class ChannelZMQ(Base):
""" An incoming Zero MQ connection.
"""
__tablename__ = 'channel_zmq'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('channel_zmq_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
address = Column(String(200), nullable=False)
socket_type = Column(String(20), nullable=False)
sub_key = Column(String(200), nullable=True)
data_format = Column(String(20), nullable=True)
socket_method = Column(String(20), nullable=False)
pool_strategy = Column(String(20), nullable=False)
service_source = Column(String(20), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('channels_zmq', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('channels_zmq', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, address=None, socket_type=None, socket_type_text=None, sub_key=None,
service_name=None, data_format=None):
self.id = id
self.name = name
self.is_active = is_active
self.address = address
self.socket_type = socket_type
self.socket_type_text = socket_type_text # Not used by the DB
self.sub_key = sub_key
self.service_name = service_name # Not used by the DB
self.data_format = data_format
# ################################################################################################################################
class DeploymentPackage(Base):
""" A package to be deployed onto a server, either a plain .py/.pyw or
a Distutils2 archive.
"""
__tablename__ = 'deployment_package'
id = Column(Integer, Sequence('depl_package_seq'), primary_key=True)
deployment_time = Column(DateTime(), nullable=False)
details = Column(String(2000), nullable=False)
payload_name = Column(String(200), nullable=False)
payload = Column(LargeBinary(5000000), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
server_id = Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=False, primary_key=False)
server = relationship(
Server, backref=backref('originating_deployment_packages',
order_by=deployment_time, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, deployment_time=None, details=None, payload_name=None, payload=None):
self.id = id
self.deployment_time = deployment_time
self.details = details
self.payload_name = payload_name
self.payload = payload
# ################################################################################################################################
class DeploymentStatus(Base):
""" Whether a server has already deployed a given package.
"""
__tablename__ = 'deployment_status'
__table_args__ = (UniqueConstraint('package_id', 'server_id'), {})
id = Column(Integer, Sequence('depl_status_seq'), primary_key=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
package_id = Column(
Integer, ForeignKey('deployment_package.id', ondelete='CASCADE'), nullable=False, primary_key=False)
package = relationship(
DeploymentPackage, backref=backref('deployment_status_list', order_by=package_id, cascade='all, delete, delete-orphan'))
server_id = Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=False, primary_key=False)
server = relationship(
Server, backref=backref('deployment_status_list', order_by=server_id, cascade='all, delete, delete-orphan'))
# See zato.common.DEPLOYMENT_STATUS
status = Column(String(20), nullable=False)
status_change_time = Column(DateTime(), nullable=False)
def __init__(self, package_id=None, server_id=None, status=None, status_change_time=None):
self.package_id = package_id
self.server_id = server_id
self.status = status
self.status_change_time = status_change_time
# ################################################################################################################################
class MsgNamespace(Base):
""" A message namespace, used in XPath, for instance.
"""
__tablename__ = 'msg_ns'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('msg_ns_seq'), primary_key=True)
name = Column(String(200), nullable=False)
value = Column(String(500), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('namespaces', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, value=None, cluster_id=None):
self.id = id
self.name = name
self.value = value
self.cluster_id = cluster_id
# ################################################################################################################################
class XPath(Base):
""" An XPath expression to run against XML messages.
"""
__tablename__ = 'msg_xpath'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('msg_xpath_seq'), primary_key=True)
name = Column(String(200), nullable=False)
value = Column(String(1500), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('xpaths', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, value=None, cluster_id=None):
self.id = id
self.name = name
self.value = value
self.cluster_id = cluster_id
# ################################################################################################################################
class JSONPointer(Base):
""" An XPath-list expression to run against JSON messages.
"""
__tablename__ = 'msg_json_pointer'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('msg_json_pointer_seq'), primary_key=True)
name = Column(String(200), nullable=False)
value = Column(String(1500), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('json_pointers', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, value=None, cluster_id=None):
self.id = id
self.name = name
self.value = value
self.cluster_id = cluster_id
# ################################################################################################################################
class OpenStackSwift(Base):
""" A connection to OpenStack's Swift (no longer used, to be removed).
"""
__tablename__ = 'os_swift'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('os_swift_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
pool_size = Column(Integer, nullable=False)
auth_url = Column(String(200), nullable=False)
auth_version = Column(String(200), nullable=False)
user = Column(String(200), nullable=True)
secret_key = Column(String(200), nullable=True)
retries = Column(Integer, nullable=False)
is_snet = Column(Boolean(), nullable=False)
starting_backoff = Column(Integer, nullable=False)
max_backoff = Column(Integer, nullable=False)
tenant_name = Column(String(200), nullable=True)
should_validate_cert = Column(Boolean(), nullable=False)
cacert = Column(String(200), nullable=True)
should_retr_ratelimit = Column(Boolean(), nullable=False)
needs_tls_compr = Column(Boolean(), nullable=False)
custom_options = Column(String(2000), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('openstack_swift_conns', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, auth_url=None, auth_version=None, user=None, key=None, retries=None,
is_snet=None, starting_backoff=None, max_backoff=None, tenant_name=None, should_validate_cert=None,
cacert=None, should_retr_ratelimit=None, needs_tls_compr=None, custom_options=None):
self.id = id
self.name = name
self.is_active = is_active
self.auth_url = auth_url
self.auth_version = auth_version
self.user = user
self.key = key
self.retries = retries
self.is_snet = is_snet
self.starting_backoff = starting_backoff
self.max_backoff = max_backoff
self.tenant_name = tenant_name
self.should_validate_cert = should_validate_cert
self.cacert = cacert
self.should_retr_ratelimit = should_retr_ratelimit
self.needs_tls_compr = needs_tls_compr
self.custom_options = custom_options
# ################################################################################################################################
class AWSS3(Base):
""" An outgoing connection to AWS S3.
"""
__tablename__ = 'aws_s3'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('aws_s3_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
pool_size = Column(Integer, nullable=False, default=CLOUD.AWS.S3.DEFAULTS.POOL_SIZE)
address = Column(String(200), nullable=False, default=CLOUD.AWS.S3.DEFAULTS.ADDRESS)
debug_level = Column(Integer, nullable=False, default=CLOUD.AWS.S3.DEFAULTS.DEBUG_LEVEL)
suppr_cons_slashes = Column(Boolean(), nullable=False, default=True)
content_type = Column(String(200), nullable=False, default=CLOUD.AWS.S3.DEFAULTS.CONTENT_TYPE)
metadata_ = Column(String(2000), nullable=True) # Can't be 'metadata' because this is reserved to SQLAlchemy
bucket = Column(String(2000), nullable=True)
encrypt_at_rest = Column(Boolean(), nullable=False, default=False)
storage_class = Column(String(200), nullable=False, default=CLOUD.AWS.S3.STORAGE_CLASS.DEFAULT)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
security_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=False)
security = relationship(SecurityBase, backref=backref('aws_s3_conns', order_by=is_active, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('aws_s3_conns', order_by=name, cascade='all, delete, delete-orphan'))
def to_json(self):
return to_json(self)
# ################################################################################################################################
class Notification(Base):
""" A base class for all notifications, be it cloud, FTP-based or others.
"""
__tablename__ = 'notif'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
__mapper_args__ = {'polymorphic_on': 'notif_type'}
id = Column(Integer, Sequence('sec_base_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False, default=True)
notif_type = Column(String(45), nullable=False)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
interval = Column(Integer, nullable=False, default=NOTIF.DEFAULT.CHECK_INTERVAL)
name_pattern = Column(String(2000), nullable=True, default=NOTIF.DEFAULT.NAME_PATTERN)
name_pattern_neg = Column(Boolean(), nullable=True, default=False)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
get_data = Column(Boolean(), nullable=True, default=False)
get_data_patt = Column(String(2000), nullable=True, default=NOTIF.DEFAULT.GET_DATA_PATTERN)
get_data_patt_neg = Column(Boolean(), nullable=True, default=False)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('notification_list', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('notification_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class NotificationOpenStackSwift(Notification):
""" Stores OpenStack Swift notifications (no longer used).
"""
__tablename__ = 'notif_os_swift'
__mapper_args__ = {'polymorphic_identity': 'openstack_swift'}
id = Column(Integer, ForeignKey('notif.id'), primary_key=True)
containers = Column(String(16380), nullable=False)
def_id = Column(Integer, ForeignKey('os_swift.id'), primary_key=True)
definition = relationship(
OpenStackSwift, backref=backref('notif_oss_list', order_by=id, cascade='all, delete, delete-orphan'))
def to_json(self):
return to_json(self)
# ################################################################################################################################
class NotificationSQL(Notification):
""" Stores SQL notifications.
"""
__tablename__ = 'notif_sql'
__mapper_args__ = {'polymorphic_identity': 'sql'}
id = Column(Integer, ForeignKey('notif.id'), primary_key=True)
query = Column(Text, nullable=False)
def_id = Column(Integer, ForeignKey('sql_pool.id'), primary_key=True)
definition = relationship(
SQLConnectionPool, backref=backref('notif_sql_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class CassandraConn(Base):
""" Connections to Cassandra.
"""
__tablename__ = 'conn_def_cassandra'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('conn_def_cassandra_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
contact_points = Column(String(400), nullable=False, default=CASSANDRA.DEFAULT.CONTACT_POINTS)
port = Column(Integer, nullable=False, default=CASSANDRA.DEFAULT.PORT)
exec_size = Column(Integer, nullable=False, default=CASSANDRA.DEFAULT.EXEC_SIZE)
proto_version = Column(Integer, nullable=False, default=CASSANDRA.DEFAULT.PROTOCOL_VERSION)
cql_version = Column(Integer, nullable=True)
default_keyspace = Column(String(400), nullable=False)
username = Column(String(200), nullable=True)
password = Column(String(200), nullable=True)
tls_ca_certs = Column(String(200), nullable=True)
tls_client_cert = Column(String(200), nullable=True)
tls_client_priv_key = Column(String(200), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('cassandra_conn_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class ElasticSearch(Base):
__tablename__ = 'search_es'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('search_es_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False, default=True)
hosts = Column(String(400), nullable=False)
timeout = Column(Integer(), nullable=False)
body_as = Column(String(45), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('search_es_conns', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class Solr(Base):
__tablename__ = 'search_solr'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('search_solr_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False, default=True)
address = Column(String(400), nullable=False)
timeout = Column(Integer(), nullable=False)
ping_path = Column(String(40), nullable=False)
options = Column(String(800), nullable=True)
pool_size = Column(Integer(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('search_solr_conns', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class CassandraQuery(Base):
""" Cassandra query templates.
"""
__tablename__ = 'query_cassandra'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('query_cassandra_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
value = Column(LargeBinary(40000), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('cassandra_queries', order_by=name, cascade='all, delete, delete-orphan'))
def_id = Column(Integer, ForeignKey('conn_def_cassandra.id', ondelete='CASCADE'), nullable=False)
def_ = relationship(CassandraConn, backref=backref('cassandra_queries', cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class SMTP(Base):
__tablename__ = 'email_smtp'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('email_smtp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
host = Column(String(400), nullable=False)
port = Column(Integer(), nullable=False)
timeout = Column(Integer(), nullable=False)
is_debug = Column(Boolean(), nullable=False)
username = Column(String(400), nullable=True)
password = Column(String(400), nullable=True)
mode = Column(String(20), nullable=False)
ping_address = Column(String(200), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('smtp_conns', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class IMAP(Base):
__tablename__ = 'email_imap'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('email_imap_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
host = Column(String(400), nullable=False)
port = Column(Integer(), nullable=False)
timeout = Column(Integer(), nullable=False)
debug_level = Column(Integer(), nullable=False)
username = Column(String(400), nullable=True)
password = Column(String(400), nullable=True)
mode = Column(String(20), nullable=False)
get_criteria = Column(String(2000), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('imap_conns', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class RBACRole(Base):
""" All the roles known within a particular cluster.
"""
__tablename__ = 'rbac_role'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('rbac_role_seq'), primary_key=True)
name = Column(String(200), nullable=False)
parent_id = Column(Integer, ForeignKey('rbac_role.id', ondelete='CASCADE'), nullable=True)
parent = relationship('RBACRole', backref=backref('children'), remote_side=[id])
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('rbac_roles', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class RBACPermission(Base):
""" Permissions defined in a given cluster.
"""
__tablename__ = 'rbac_perm'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('rbac_perm_seq'), primary_key=True)
name = Column(String(200), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('rbac_permissions', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class RBACClientRole(Base):
""" Mappings between clients and roles they have.
"""
__tablename__ = 'rbac_client_role'
__table_args__ = (UniqueConstraint('client_def', 'role_id', 'cluster_id'), {})
id = Column(Integer, Sequence('rbac_cli_rol_seq'), primary_key=True)
name = Column(String(400), nullable=False)
client_def = Column(String(200), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
role_id = Column(Integer, ForeignKey('rbac_role.id', ondelete='CASCADE'), nullable=False)
role = relationship(RBACRole, backref=backref('rbac_client_roles', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('rbac_client_roles', order_by=client_def, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class RBACRolePermission(Base):
""" Mappings between roles and permissions they have on given services.
"""
__tablename__ = 'rbac_role_perm'
__table_args__ = (UniqueConstraint('role_id', 'perm_id', 'service_id', 'cluster_id'), {})
id = Column(Integer, Sequence('rbac_role_perm_seq'), primary_key=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
role_id = Column(Integer, ForeignKey('rbac_role.id', ondelete='CASCADE'), nullable=False)
role = relationship(RBACRole, backref=backref('rbac_role_perms', order_by=id, cascade='all, delete, delete-orphan'))
perm_id = Column(Integer, ForeignKey('rbac_perm.id', ondelete='CASCADE'), nullable=False)
perm = relationship(RBACPermission, backref=backref('rbac_role_perms', order_by=id, cascade='all, delete, delete-orphan'))
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship('Service', backref=backref('role_perm', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('rbac_role_permissions', order_by=id, cascade='all, delete, delete-orphan'))
def get_name(self):
return '{}/{}/{}/{}'.format(self.id, self.role_id, self.perm_id, self.service_id)
# ################################################################################################################################
class KVData(Base):
""" Key/value data table.
"""
__tablename__ = 'kv_data'
__table_args__ = (Index('key_clust_id_idx', 'key', 'cluster_id', unique=True, mysql_length={'key':767}),)
id = Column(Integer, Sequence('kv_data_id_seq'), primary_key=True)
key = Column(LargeBinary(), nullable=False)
value = Column(LargeBinary(), nullable=True)
data_type = Column(String(200), nullable=False, default='text')
creation_time = Column(DateTime(), nullable=False)
expiry_time = Column(DateTime(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=True)
cluster = relationship(Cluster, backref=backref('kv_data', order_by=key, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class ChannelWebSocket(Base):
""" A WebSocket connection definition.
"""
__tablename__ = 'channel_web_socket'
__table_args__ = (UniqueConstraint('name', 'cluster_id'),
UniqueConstraint('address', 'cluster_id'), {})
id = cast_('int', Column(Integer, Sequence('web_socket_chan_seq'), primary_key=True))
name = cast_('str', Column(String(200), nullable=False))
is_active = cast_('bool', Column(Boolean(), nullable=False))
is_internal = cast_('bool', Column(Boolean(), nullable=False))
is_out = cast_('bool', Column(Boolean(), nullable=False, default=sa_false()))
address = cast_('str', Column(String(200), nullable=False))
data_format = cast_('str', Column(String(20), nullable=False))
new_token_wait_time = cast_('int', Column(Integer(), nullable=False))
token_ttl = cast_('int', Column(Integer(), nullable=False))
# JSON data is here
opaque1 = cast_('strnone', Column(_JSON(), nullable=True))
service_id = cast_('intnone', Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True))
service = relationship('Service', backref=backref('web_socket', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = cast_('int', Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False))
cluster = relationship(Cluster, backref=backref('web_socket_list', order_by=name, cascade='all, delete, delete-orphan'))
security_id = cast_('intnone', Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=True))
def __init__(self, id=None, name=None, is_active=None, is_internal=None, address=None, data_format=None,
new_token_wait_time=None, token_ttl=None, service_id=None, service=None, cluster_id=None, cluster=None,
security_id=None, security=None):
self.id = id
self.name = name
self.is_active = is_active
self.is_internal = is_internal
self.address = address
self.data_format = data_format
self.new_token_wait_time = new_token_wait_time
self.token_ttl = token_ttl
self.service_id = service_id
self.service = service
self.cluster_id = cluster_id
self.cluster = cluster
self.security_id = security_id
self.security = security
self.service_name = None # Not used by DB
self.sec_type = None # Not used by DB
# ################################################################################################################################
class WebSocketClient(Base):
""" An active WebSocket client - currently connected to a Zato server process.
"""
__tablename__ = 'web_socket_client'
__table_args__ = (
Index('wscl_pub_client_idx', 'cluster_id', 'pub_client_id', unique=True),
Index('wscl_cli_ext_n_idx', 'cluster_id', 'ext_client_name', unique=False),
Index('wscl_cli_ext_i_idx', 'cluster_id', 'ext_client_id', unique=False),
Index('wscl_pr_addr_idx', 'cluster_id', 'peer_address', unique=False),
Index('wscl_pr_fqdn_idx', 'cluster_id', 'peer_fqdn', unique=False),
{})
# This ID is for SQL
id = cast_('int', Column(Integer, Sequence('web_socket_cli_seq'), primary_key=True))
is_internal = cast_('bool', Column(Boolean(), nullable=False))
# This one is assigned by Zato
pub_client_id = cast_('str', Column(String(200), nullable=False))
# These are assigned by clients themselves
ext_client_id = cast_('str', Column(String(200), nullable=False))
ext_client_name = cast_('strnone', Column(String(200), nullable=True))
local_address = cast_('str', Column(String(400), nullable=False))
peer_address = cast_('str', Column(String(400), nullable=False))
peer_fqdn = cast_('str', Column(String(400), nullable=False))
connection_time = cast_('datetime', Column(DateTime, nullable=False))
last_seen = cast_('datetime', Column(DateTime, nullable=False))
server_proc_pid = cast_('int', Column(Integer, nullable=False))
server_name = cast_('str', Column(String(200), nullable=False)) # References server.name
# JSON data is here
opaque1 = cast_('strnone', Column(_JSON(), nullable=True))
channel_id = cast_('int', Column(Integer, ForeignKey('channel_web_socket.id', ondelete='CASCADE'), nullable=False))
channel = relationship(
ChannelWebSocket, backref=backref('clients', order_by=local_address, cascade='all, delete, delete-orphan'))
server_id = cast_('int', Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=False))
server = relationship(
Server, backref=backref('server_web_socket_clients', order_by=local_address, cascade='all, delete, delete-orphan'))
cluster_id = cast_('int', Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False))
cluster = relationship(
Cluster, backref=backref('web_socket_client_list', order_by=last_seen, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class WebSocketClientPubSubKeys(Base):
""" Associates currently active WebSocket clients with subscription keys.
"""
__tablename__ = 'web_socket_cli_ps_keys'
__table_args__ = (
Index('wscl_psk_cli', 'cluster_id', 'client_id', unique=False),
Index('wscl_psk_sk', 'cluster_id', 'sub_key', unique=False),
{})
id = cast_('int', Column(Integer, Sequence('web_socket_cli_ps_seq'), primary_key=True))
# The same as in web_socket_sub.sub_key
sub_key = cast_('str', Column(String(200), nullable=False))
# JSON data is here
opaque1 = cast_('strnone', Column(_JSON(), nullable=True))
client_id = cast_('int', Column(Integer, ForeignKey('web_socket_client.id', ondelete='CASCADE'), nullable=False))
client = relationship(
WebSocketClient, backref=backref('web_socket_cli_ps_keys', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = cast_('int', Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False))
cluster = relationship(Cluster, backref=backref(
'web_socket_cli_ps_keys', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class WebSocketSubscription(Base):
""" Persistent subscriptions pertaining to a given long-running, possibly restartable, WebSocket connection.
"""
__tablename__ = 'web_socket_sub'
__table_args__ = (
Index('wssub_channel_idx', 'cluster_id', 'channel_id', unique=False),
Index('wssub_subkey_idx', 'cluster_id', 'sub_key', unique=True),
Index('wssub_extcli_idx', 'cluster_id', 'ext_client_id', unique=False),
Index('wssub_subkey_chan_idx', 'cluster_id', 'sub_key', 'channel_id', unique=True),
{})
id = cast_('int', Column(Integer, Sequence('web_socket_sub_seq'), primary_key=True))
is_internal = cast_('bool', Column(Boolean(), nullable=False))
ext_client_id = cast_('str', Column(String(200), nullable=False))
# Each transient, per-connection, web_socket_cli_ps_keys.sub_key will refer to this column
sub_key = cast_('str', Column(String(200), nullable=False))
# JSON data is here
opaque1 = cast_('strnone', Column(_JSON(), nullable=True))
channel_id = cast_('intnone', Column(Integer, ForeignKey('channel_web_socket.id', ondelete='CASCADE'), nullable=True))
channel = relationship(
ChannelWebSocket, backref=backref('web_socket_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
subscription_id = cast_('int', Column(Integer, ForeignKey('pubsub_sub.id', ondelete='CASCADE'), nullable=False))
cluster_id = cast_('int', Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False))
cluster = relationship(Cluster, backref=backref('web_socket_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class PubSubEndpoint(Base):
""" An individual endpoint participating in publish/subscribe scenarios.
"""
__tablename__ = 'pubsub_endpoint'
__table_args__ = (
Index('pubsb_endp_clust_idx', 'cluster_id', unique=False),
Index('pubsb_endp_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_endp_name_idx', 'cluster_id', 'name', unique=True),
UniqueConstraint('cluster_id', 'security_id'),
UniqueConstraint('cluster_id', 'service_id'),
UniqueConstraint('cluster_id', 'ws_channel_id'),
{})
id = cast_('int', Column(Integer, Sequence('pubsub_endp_seq'), primary_key=True))
name = cast_('str', Column(String(200), nullable=False))
is_internal = cast_('bool', Column(Boolean(), nullable=False, server_default=sa_false()))
is_active = cast_('bool', Column(Boolean(), nullable=False, server_default=sa_true())) # Unusued for now
endpoint_type = cast_('str', Column(String(40), nullable=False)) # WSX, REST and other types
last_seen = cast_('intnone', Column(BigInteger(), nullable=True))
last_pub_time = cast_('intnone', Column(BigInteger(), nullable=True))
last_sub_time = cast_('intnone', Column(BigInteger(), nullable=True))
last_deliv_time = cast_('intnone', Column(BigInteger(), nullable=True))
# Endpoint's role, e.g. publisher, subscriber or both
role = cast_('str', Column(String(40), nullable=False))
# Tags describing this endpoint
tags = cast_('strnone', Column(Text, nullable=True)) # Unusued for now
# Patterns for topics that this endpoint may subscribe to
topic_patterns = cast_('strnone', Column(Text, nullable=True))
# Patterns for tags of publishers
pub_tag_patterns = cast_('strnone', Column(Text, nullable=True)) # Unused for now
# Patterns for tags of messages
message_tag_patterns = cast_('strnone', Column(Text, nullable=True)) # Unused for now
# JSON data is here
opaque1 = cast_('strnone', Column(_JSON(), nullable=True))
# Endpoint is a service
service_id = cast_('intnone', Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True))
service = relationship('Service', backref=backref('pubsub_endpoints', order_by=id, cascade='all, delete, delete-orphan'))
# Identifies the endpoint through its security definition, e.g. a username/password combination.
security_id = cast_('intnone', Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=True))
security = relationship(SecurityBase, backref=backref('pubsub_endpoints', order_by=id, cascade='all, delete, delete-orphan'))
# Identifies the endpoint through a reference to a generic connection
gen_conn_id = cast_('intnone', Column(Integer, ForeignKey('generic_conn.id', ondelete='CASCADE'), nullable=True))
gen_conn = relationship('GenericConn', backref=backref('pubsub_endpoints', order_by=id, cascade='all, delete, delete-orphan'))
# Identifies the endpoint through a long-running WebSockets channel
ws_channel_id = cast_('intnone', Column(Integer, ForeignKey('channel_web_socket.id', ondelete='CASCADE'), nullable=True))
ws_channel = relationship(
ChannelWebSocket, backref=backref('pubsub_endpoints', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = cast_('int', Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False))
cluster = relationship(Cluster, backref=backref('pubsub_endpoints', order_by=id, cascade='all, delete, delete-orphan'))
sec_type = None # Not used by DB
sec_name = None # Not used by DB
ws_channel_name = None # Not used by DB
service_name = None # Not used by DB
endpoint_type_human =None # Not used by DB
# ################################################################################################################################
class PubSubTopic(Base):
""" A topic in pub/sub.
"""
__tablename__ = 'pubsub_topic'
__table_args__ = (
Index('pubsb_tp_clust_idx', 'cluster_id', unique=False),
Index('pubsb_tp_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_tp_name_idx', 'cluster_id', 'name', unique=True),
{})
id = cast_('int', Column(Integer, Sequence('pubsub_topic_seq'), primary_key=True))
name = cast_('str', Column(String(200), nullable=False))
is_active = cast_('bool', Column(Boolean(), nullable=False))
is_internal = cast_('bool', Column(Boolean(), nullable=False, default=False))
max_depth_gd = cast_('int', Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.TOPIC_MAX_DEPTH_GD))
max_depth_non_gd = cast_('int', Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.TOPIC_MAX_DEPTH_NON_GD))
depth_check_freq = cast_('int', Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.DEPTH_CHECK_FREQ))
has_gd = cast_('bool', Column(Boolean(), nullable=False)) # Guaranteed delivery
is_api_sub_allowed = cast_('bool', Column(Boolean(), nullable=False))
# How many messages to buffer in RAM before they are actually saved in SQL / pushed to tasks
pub_buffer_size_gd = cast_('int', Column(Integer(), nullable=False, server_default=str(PUBSUB.DEFAULT.PUB_BUFFER_SIZE_GD)))
task_sync_interval = cast_('int', Column(Integer(), nullable=False, server_default=str(PUBSUB.DEFAULT.TASK_SYNC_INTERVAL)))
task_delivery_interval = cast_('int',
Column(Integer(), nullable=False, server_default=str(PUBSUB.DEFAULT.TASK_DELIVERY_INTERVAL)))
# JSON data is here
opaque1 = cast_('strnone', Column(_JSON(), nullable=True))
# A hook service invoked during publications to this specific topic
hook_service_id = cast_('intnone', Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True))
cluster_id = cast_('int', Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False))
cluster = relationship(Cluster, backref=backref('pubsub_topics', order_by=name, cascade='all, delete, delete-orphan'))
# Not used by DB
ext_client_id = None
last_pub_time = None
pub_time = None
ext_pub_time = None
last_pub_time = None
last_pub_msg_id = None
last_endpoint_id = None
last_endpoint_name = None
last_pub_has_gd = None
last_pub_server_pid = None
last_pub_server_name = None
# ################################################################################################################################
class PubSubEndpointTopic(Base):
""" A list of topics to which a given endpoint has ever published along with metadata about the latest publication.
There is one row for each existing publisher and topic ever in use.
"""
__tablename__ = 'pubsub_endp_topic'
__table_args__ = (
Index('pubsb_endpt_clust_idx', 'cluster_id', unique=False),
Index('pubsb_endpt_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_endpt_msgid_idx', 'cluster_id', 'pub_msg_id', unique=True),
Index('pubsb_endpt_clsendtp_idx', 'cluster_id', 'endpoint_id', 'topic_id', unique=True),
{})
id = cast_('int', Column(Integer, Sequence('pubsub_endpt_seq'), primary_key=True))
pub_pattern_matched = cast_('str', Column(Text, nullable=False))
last_pub_time = cast_('float', Column(Numeric(20, 7, asdecimal=False), nullable=False))
pub_msg_id = cast_('str', Column(String(200), nullable=False))
pub_correl_id = cast_('strnone', Column(String(200), nullable=True))
in_reply_to = cast_('strnone', Column(String(200), nullable=True))
ext_client_id = cast_('strnone', Column(Text(), nullable=True))
# JSON data is here
opaque1 = cast_('strnone', Column(_JSON(), nullable=True))
endpoint_id = cast_('intnone', Column(Integer, ForeignKey('pubsub_endpoint.id', ondelete='CASCADE'), nullable=True))
endpoint = relationship(
PubSubEndpoint, backref=backref('pubsub_endpoint_topics', order_by=endpoint_id, cascade='all, delete, delete-orphan'))
topic_id = cast_('int', Column(Integer, ForeignKey('pubsub_topic.id', ondelete='CASCADE'), nullable=False))
topic = relationship(
PubSubTopic, backref=backref('pubsub_endpoint_topics', order_by=topic_id, cascade='all, delete, delete-orphan'))
cluster_id = cast_('int', Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False))
cluster = relationship(Cluster, backref=backref('pubsub_endpoint_topics', order_by=cluster_id,
cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class PubSubMessage(Base):
""" An individual message published to a topic.
"""
__tablename__ = 'pubsub_message'
__table_args__ = (
# This index is needed for FKs from other tables,
# otherwise with MySQL we get error 1215 'Cannot add foreign key constraint'
Index('pubsb_msg_pubmsg_id_idx', 'pub_msg_id', unique=True),
Index('pubsb_msg_pubmsg_clu_id_idx', 'cluster_id', 'pub_msg_id', unique=True),
Index('pubsb_msg_inreplyto_id_idx', 'cluster_id', 'in_reply_to', unique=False),
Index('pubsb_msg_correl_id_idx', 'cluster_id', 'pub_correl_id', unique=False),
{})
# For SQL joins
id = cast_('int', Column(Integer, Sequence('pubsub_msg_seq'), primary_key=True))
# Publicly visible message identifier
pub_msg_id = cast_('str', Column(String(200), nullable=False))
# Publicly visible correlation ID
pub_correl_id = cast_('strnone', Column(String(200), nullable=True))
# Publicly visible ID of the message current message is a response to
in_reply_to = cast_('strnone', Column(String(200), nullable=True))
# ID of an external client on whose behalf the endpoint published the message
ext_client_id = cast_('strnone', Column(Text(), nullable=True))
# Will group messages belonging logically to the same group, useful if multiple
# messages are published with the same timestamp by the same client but they still
# need to be correctly ordered.
group_id = cast_('strnone', Column(Text(), nullable=True))
position_in_group = cast_('intnone', Column(Integer, nullable=True))
# What matching pattern allowed an endpoint to publish this message
pub_pattern_matched = cast_('str', Column(Text, nullable=False))
# When the row was created
pub_time = cast_('float', Column(Numeric(20, 7, asdecimal=False), nullable=False))
# When the message was created by the endpoint publishing it
ext_pub_time = cast_('floatnone', Column(Numeric(20, 7, asdecimal=False), nullable=True))
expiration_time = cast_('floatnone', Column(Numeric(20, 7, asdecimal=False), nullable=True))
last_updated = cast_('floatnone', Column(Numeric(20, 7, asdecimal=False), nullable=True))
data = cast_('str', Column(Text(2 * 10 ** 9), nullable=False)) # 2 GB to prompt a promotion to LONGTEXT under MySQL
data_prefix = cast_('str', Column(Text(), nullable=False))
data_prefix_short = cast_('str', Column(String(200), nullable=False))
data_format = cast_('str', Column(String(200), nullable=False, server_default=PUBSUB.DEFAULT.DATA_FORMAT))
mime_type = cast_('str', Column(String(200), nullable=False, server_default=PUBSUB.DEFAULT.MIME_TYPE))
size = cast_('int', Column(Integer, nullable=False))
priority = cast_('int', Column(Integer, nullable=False, server_default=str(PUBSUB.PRIORITY.DEFAULT)))
expiration = cast_('int', Column(BigInteger, nullable=False, server_default='0'))
has_gd = cast_('bool', Column(Boolean(), nullable=False, server_default=sa_true())) # Guaranteed delivery
# Is the message in at least one delivery queue, meaning that there is at least one
# subscriber to whom this message will be sent so the message is no longer considered
# to be available in the topic for other subscribers to receive it,
# i.e. it can be said that it has been already transported to all subsriber queues (possibly to one only).
is_in_sub_queue = cast_('bool', Column(Boolean(), nullable=False, server_default=sa_false()))
# User-defined arbitrary context data
user_ctx = cast_('strnone', Column(_JSON(), nullable=True))
# Zato-defined arbitrary context data
zato_ctx = cast_('strnone', Column(_JSON(), nullable=True))
# JSON data is here
opaque1 = cast_('strnone', Column(_JSON(), nullable=True))
published_by_id = cast_('int', Column(Integer, ForeignKey('pubsub_endpoint.id', ondelete='CASCADE'), nullable=False))
published_by = relationship(
PubSubEndpoint, backref=backref('pubsub_msg_list', order_by=id, cascade='all, delete, delete-orphan'))
topic_id = cast_('int', Column(Integer, ForeignKey('pubsub_topic.id', ondelete='CASCADE'), nullable=True))
topic = relationship(
PubSubTopic, backref=backref('pubsub_msg_list', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = cast_('int', Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False))
cluster = relationship(Cluster, backref=backref('pubsub_messages', order_by=id, cascade='all, delete, delete-orphan'))
pub_time_utc = None # Not used by DB
# ################################################################################################################################
class PubSubSubscription(Base):
""" Stores high-level information about topics an endpoint subscribes to.
"""
__tablename__ = 'pubsub_sub'
__table_args__ = (
Index('pubsb_sub_clust_idx', 'cluster_id', unique=False),
Index('pubsb_sub_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_sub_clust_endpt_idx', 'cluster_id', 'endpoint_id', 'topic_id', unique=False),
Index('pubsb_sub_clust_subk', 'sub_key', unique=True),
{})
id = cast_('int', Column(Integer, Sequence('pubsub_sub_seq'), primary_key=True))
is_internal = cast_('bool', Column(Boolean(), nullable=False, default=False))
creation_time = cast_('floatnone', Column(Numeric(20, 7, asdecimal=False), nullable=False))
sub_key = cast_('str', Column(String(200), nullable=False)) # Externally visible ID of this subscription
sub_pattern_matched = cast_('str', Column(Text, nullable=False))
deliver_by = cast_('strnone', Column(Text, nullable=True)) # Delivery order, e.g. by priority, date etc.
ext_client_id = cast_('strnone', Column(Text, nullable=True)) # Subscriber's ID as it is stored by that external system
is_durable = cast_('bool', Column(Boolean(), nullable=False, default=True)) # For now always True = survives cluster restarts
has_gd = cast_('bool', Column(Boolean(), nullable=False)) # Guaranteed delivery
active_status = cast_('str', Column(String(200), nullable=False, default=PUBSUB.QUEUE_ACTIVE_STATUS.FULLY_ENABLED.id))
is_staging_enabled = cast_('bool', Column(Boolean(), nullable=False, default=False))
delivery_method = cast_('str', Column(String(200), nullable=False, default=PUBSUB.DELIVERY_METHOD.NOTIFY.id))
delivery_data_format = cast_('str', Column(String(200), nullable=False, default=DATA_FORMAT.JSON))
delivery_endpoint = cast_('strnone', Column(Text, nullable=True))
# This is updated only periodically, e.g. once an hour, rather than each time the subscriber is seen,
# so the value is not an exact time of the last interaction with the subscriber but a time,
# within a certain range (default=60 minutes), when any action was last time carried out with the subscriber.
# For WSX subscribers, this value will never be less than their ping timeout.
last_interaction_time = cast_('floatnone', Column(Numeric(20, 7, asdecimal=False), nullable=True))
last_interaction_type = cast_('strnone', Column(String(200), nullable=True))
last_interaction_details = cast_('strnone', Column(Text, nullable=True))
# How many messages to deliver in a single batch for that endpoint
delivery_batch_size = cast_('int', Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.DELIVERY_BATCH_SIZE))
# If delivery_batch_size is 1, whether such a single message delivered to endpoint
# should be sent as-is or wrapped in a single-element list.
wrap_one_msg_in_list = cast_('bool', Column(Boolean(), nullable=False))
# How many bytes to send at most in a single delivery
delivery_max_size = cast_('int', Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.DELIVERY_MAX_SIZE)) # Unused for now
# How many times to retry delivery for a single message
delivery_max_retry = cast_('int', Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.DELIVERY_MAX_RETRY))
# Should a failed delivery of a single message block the entire delivery queue
# until that particular message has been successfully delivered.
delivery_err_should_block = cast_('bool', Column(Boolean(), nullable=False))
# How many seconds to wait on a TCP socket error
wait_sock_err = cast_('int', Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.WAIT_TIME_SOCKET_ERROR))
# How many seconds to wait on an error other than a TCP socket one
wait_non_sock_err = cast_('int', Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.WAIT_TIME_NON_SOCKET_ERROR))
# A hook service invoked before messages are delivered for this specific subscription
hook_service_id = cast_('intnone', Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True))
# REST/POST
out_http_method = cast_('strnone', Column(Text, nullable=True, default='POST')) # E.g. POST or PATCH
# AMQP
amqp_exchange = cast_('strnone', Column(Text, nullable=True))
amqp_routing_key = cast_('strnone', Column(Text, nullable=True))
# Flat files
files_directory_list = cast_('strnone', Column(Text, nullable=True))
# FTP
ftp_directory_list = cast_('strnone', Column(Text, nullable=True))
# SMS - Twilio
sms_twilio_from = cast_('strnone', Column(Text, nullable=True))
sms_twilio_to_list = cast_('strnone', Column(Text, nullable=True))
# SMTP
smtp_subject = cast_('strnone', Column(Text, nullable=True))
smtp_from = cast_('strnone', Column(Text, nullable=True))
smtp_to_list = cast_('strnone', Column(Text, nullable=True))
smtp_body = cast_('strnone', Column(Text, nullable=True))
smtp_is_html = cast_('boolnone', Column(Boolean(), nullable=True))
# JSON data is here
opaque1 = cast_('strnone', Column(_JSON(), nullable=True))
topic_id = cast_('int', Column(Integer, ForeignKey('pubsub_topic.id', ondelete='CASCADE'), nullable=False))
topic = relationship(
PubSubTopic, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
endpoint_id = cast_('int', Column(Integer, ForeignKey('pubsub_endpoint.id', ondelete='CASCADE'), nullable=True))
endpoint = relationship(
PubSubEndpoint, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
out_job_id = cast_('intnone', Column(Integer, ForeignKey('job.id', ondelete='CASCADE'), nullable=True))
out_job = relationship(
Job, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
out_http_soap_id = cast_('intnone', Column(Integer, ForeignKey('http_soap.id', ondelete='CASCADE'), nullable=True))
out_http_soap = relationship(
HTTPSOAP, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
out_smtp_id = cast_('intnone', Column(Integer, ForeignKey('email_smtp.id', ondelete='CASCADE'), nullable=True))
out_smtp = relationship(
SMTP, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
out_amqp_id = cast_('intnone', Column(Integer, ForeignKey('out_amqp.id', ondelete='CASCADE'), nullable=True))
out_amqp = relationship(
OutgoingAMQP, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
out_gen_conn_id = cast_('intnone', Column(Integer, ForeignKey('generic_conn.id', ondelete='CASCADE'), nullable=True))
out_gen_conn = relationship(
'GenericConn', backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
ws_channel_id = cast_('intnone', Column(Integer, ForeignKey('channel_web_socket.id', ondelete='CASCADE'), nullable=True))
ws_channel = relationship(
ChannelWebSocket, backref=backref('pubsub_ws_subs', order_by=id, cascade='all, delete, delete-orphan'))
# Server that will run the delivery task for this subscription
server_id = cast_('intnone', Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=True))
server = relationship(
Server, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = cast_('intnone', Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=True))
cluster = relationship(
Cluster, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
name = None # Not used by DB
topic_name = None # Not used by DB
total_depth = None # Not used by DB
current_depth_gd = None # Not used by DB
current_depth_non_gd = None # Not used by DB
sub_count = None # Not used by DB
# ################################################################################################################################
class PubSubEndpointEnqueuedMessage(Base):
""" A queue of messages for an individual endpoint subscribed to a topic.
"""
__tablename__ = 'pubsub_endp_msg_queue'
__table_args__ = (
Index('pubsb_enms_q_pubmid_idx', 'cluster_id', 'pub_msg_id', unique=False),
Index('pubsb_enms_q_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_enms_q_endp_idx', 'cluster_id', 'endpoint_id', unique=False),
Index('pubsb_enms_q_subs_idx', 'cluster_id', 'sub_key', unique=False),
Index('pubsb_enms_q_endptp_idx', 'cluster_id', 'endpoint_id', 'topic_id', unique=False),
{})
__mapper_args__ = {
'confirm_deleted_rows': False
}
id = cast_('int', Column(Integer, Sequence('pubsub_msg_seq'), primary_key=True))
creation_time = cast_('floatnone', Column(Numeric(20, 7, asdecimal=False), nullable=False)) # When was the message enqueued
delivery_count = cast_('int', Column(Integer, nullable=False, server_default='0'))
last_delivery_time = cast_('floatnone', Column(Numeric(20, 7, asdecimal=False), nullable=True))
is_in_staging = cast_('bool', Column(Boolean(), nullable=False, server_default=sa_false()))
sub_pattern_matched = cast_('str', Column(Text, nullable=False))
# A flag indicating whether this message is deliverable at all - will be set to False
# after delivery_count reaches max retries for subscription or if a hook services decides so.
is_deliverable = cast_('bool', Column(Boolean(), nullable=False, server_default=sa_true()))
delivery_status = cast_('int', Column(Integer, nullable=False, server_default=str(PUBSUB.DELIVERY_STATUS.INITIALIZED)))
delivery_time = cast_('floatnone', Column(Numeric(20, 7, asdecimal=False), nullable=True))
# JSON data is here
opaque1 = cast_('strnone', Column(_JSON(), nullable=True))
pub_msg_id = cast_('str', Column(String(200), ForeignKey('pubsub_message.pub_msg_id', ondelete='CASCADE'), nullable=False))
endpoint_id = cast_('int', Column(Integer, ForeignKey('pubsub_endpoint.id', ondelete='CASCADE'), nullable=False))
endpoint = relationship(PubSubEndpoint,
backref=backref('pubsub_endp_q_list', order_by=id, cascade='all, delete, delete-orphan'))
topic_id = cast_('int', Column(Integer, ForeignKey('pubsub_topic.id', ondelete='CASCADE'), nullable=False))
topic = relationship(PubSubTopic, backref=backref('pubsub_endp_q_list', order_by=id, cascade='all, delete, delete-orphan'))
sub_key = cast_('str', Column(String(200), ForeignKey('pubsub_sub.sub_key', ondelete='CASCADE'), nullable=False))
cluster_id = cast_('int', Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False))
cluster = relationship(Cluster, backref=backref('pubsub_endpoint_queues', order_by=id, cascade='all, delete, delete-orphan'))
queue_name = None # Not used by DB
# ################################################################################################################################
class PubSubEndpointQueueInteraction(Base):
""" A series of interactions with a message queue's endpoint.
"""
__tablename__ = 'pubsub_endp_msg_q_inter'
__table_args__ = (
Index('pubsb_enms_qi_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_enms_qi_endptp_idx', 'cluster_id', 'queue_id', unique=False),
{})
id = cast_('int', Column(Integer, Sequence('pubsub_msg_seq'), primary_key=True))
entry_timestamp = cast_('float', Column(Numeric(20, 7, asdecimal=False), nullable=False)) # When the row was created
inter_type = cast_('str', Column(String(200), nullable=False))
inter_details = cast_('strnone', Column(Text, nullable=True))
# JSON data is here
opaque1 = cast_('strnone', Column(_JSON(), nullable=True))
queue_id = cast_('int', Column(Integer, ForeignKey('pubsub_endp_msg_queue.id', ondelete='CASCADE'), nullable=False))
queue = relationship(
PubSubEndpointEnqueuedMessage, backref=backref(
'pubsub_endpoint_queue_interactions', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = cast_('int', Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False))
cluster = relationship(
Cluster, backref=backref('pubsub_endpoint_queue_interactions', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class PubSubChannel(Base):
""" An N:N mapping between arbitrary channels and topics to which their messages should be sent.
"""
__tablename__ = 'pubsub_channel'
__table_args__ = (UniqueConstraint('cluster_id', 'conn_id', 'conn_type', 'topic_id'), {})
id = cast_('int', Column(Integer, Sequence('pubsub_channel_seq'), primary_key=True))
is_active = cast_('bool', Column(Boolean(), nullable=False))
is_internal = cast_('bool', Column(Boolean(), nullable=False))
conn_id = cast_('str', Column(String(100), nullable=False))
conn_type = cast_('str', Column(String(100), nullable=False))
# JSON data is here
opaque1 = cast_('strnone', Column(_JSON(), nullable=True))
topic_id = cast_('int', Column(Integer, ForeignKey('pubsub_topic.id', ondelete='CASCADE'), nullable=False))
topic = relationship(
PubSubTopic, backref=backref('pubsub_channel_list', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = cast_('int', Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False))
cluster = relationship(Cluster, backref=backref('pubsub_channel_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class SMSTwilio(Base):
""" Outgoing SMS connections with Twilio.
"""
__tablename__ = 'sms_twilio'
__table_args__ = (
UniqueConstraint('name', 'cluster_id'),
{})
id = Column(Integer, Sequence('sms_twilio_id_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False, default=False)
account_sid = Column(String(200), nullable=False)
auth_token = Column(String(200), nullable=False)
default_from = Column(String(200), nullable=True)
default_to = Column(String(200), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('sms_twilio_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericObject(Base):
""" A generic data object.
"""
__tablename__ = 'generic_object'
__table_args__ = (
Index('gen_obj_uq_name_type', 'name', 'type_', 'cluster_id', unique=True,
mysql_length={'name':191, 'type_':191}),
Index('gen_obj_par_id', 'cluster_id', 'parent_id', 'parent_type', unique=False,
mysql_length={'parent_id':191, 'parent_type':191}),
Index('gen_obj_cat_id', 'cluster_id', 'category_id', unique=False,
mysql_length={'category_id':191}),
Index('gen_obj_cat_subcat_id', 'cluster_id', 'category_id', 'subcategory_id', unique=False,
mysql_length={'category_id':191, 'subcategory_id':191}),
Index('gen_obj_cat_name', 'cluster_id', 'category_name', unique=False,
mysql_length={'category_name':191}),
Index('gen_obj_cat_subc_name', 'cluster_id', 'category_name', 'subcategory_name', unique=False,
mysql_length={'category_name':191, 'subcategory_name':191}),
Index('gen_obj_par_obj_id', 'cluster_id', 'parent_object_id', unique=False),
{})
id = Column(Integer, Sequence('generic_object_seq'), primary_key=True)
name = Column(Text(191), nullable=False)
type_ = Column(Text(191), nullable=False)
subtype = Column(Text(191), nullable=True)
category_id = Column(Text(191), nullable=True)
subcategory_id = Column(Text(191), nullable=True)
creation_time = Column(DateTime, nullable=False, default=datetime.utcnow)
last_modified = Column(DateTime, nullable=False, default=datetime.utcnow)
category_name = Column(Text(191), nullable=True)
subcategory_name = Column(Text(191), nullable=True)
# This references back to generic objects
parent_object_id = Column(Integer, nullable=True)
# This may reference objects other than the current model
parent_id = Column(Text(191), nullable=True)
parent_type = Column(Text(191), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
generic_conn_def_id = Column(Integer, ForeignKey('generic_conn_def.id', ondelete='CASCADE'), nullable=True)
generic_conn_def_sec_id = Column(Integer, ForeignKey('generic_conn_def_sec.id', ondelete='CASCADE'), nullable=True)
generic_conn_id = Column(Integer, ForeignKey('generic_conn.id', ondelete='CASCADE'), nullable=True)
generic_conn_sec_id = Column(Integer, ForeignKey('generic_conn_sec.id', ondelete='CASCADE'), nullable=True)
generic_conn_client_id = Column(Integer, ForeignKey('generic_conn_client.id', ondelete='CASCADE'), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('generic_object_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericConnDef(Base):
""" Generic connection definitions - with details kept in JSON.
"""
__tablename__ = 'generic_conn_def'
__table_args__ = (
UniqueConstraint('name', 'type_', 'cluster_id'),
{})
id = Column(Integer, Sequence('generic_conn_def_seq'), primary_key=True)
name = Column(String(200), nullable=False)
type_ = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False, default=False)
cache_expiry = Column(Integer, nullable=True, default=0)
address = Column(Text(), nullable=True)
port = Column(Integer, nullable=True)
timeout = Column(Integer, nullable=True)
data_format = Column(String(60), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# Both are needed because some connections can be duplex
is_channel = Column(Boolean(), nullable=False)
is_outconn = Column(Boolean(), nullable=False)
version = Column(String(200), nullable=True)
extra = Column(Text(), nullable=True)
pool_size = Column(Integer(), nullable=False)
# This can be used if only one security definition should be assigned to the object
username = Column(String(1000), nullable=True)
username_type = Column(String(45), nullable=True)
secret = Column(String(1000), nullable=True)
secret_type = Column(String(45), nullable=True)
# Is RBAC enabled for the object
sec_use_rbac = Column(Boolean(), nullable=False, default=False)
cache_id = Column(Integer, ForeignKey('cache.id', ondelete='CASCADE'), nullable=True)
cache = relationship('Cache', backref=backref('generic_conn_def_list', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('generic_conn_def_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericConnDefSec(Base):
""" N:N security mappings for generic connection definitions.
"""
__tablename__ = 'generic_conn_def_sec'
__table_args__ = (
UniqueConstraint('conn_def_id', 'sec_base_id', 'cluster_id'),
{})
id = Column(Integer, Sequence('generic_conn_def_sec_seq'), primary_key=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
conn_def_id = Column(Integer, ForeignKey('generic_conn_def.id', ondelete='CASCADE'), nullable=False)
conn_def = relationship(GenericConnDef, backref=backref('generic_conn_def_sec_list', order_by=id,
cascade='all, delete, delete-orphan'))
sec_base_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=False)
sec_base = relationship(SecurityBase, backref=backref('generic_conn_def_sec_list', order_by=id,
cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('generic_conn_def_sec_list', order_by=id,
cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericConn(Base):
""" Generic connections - with details kept in JSON.
"""
__tablename__ = 'generic_conn'
__table_args__ = (
UniqueConstraint('name', 'type_', 'cluster_id'),
{})
id = Column(Integer, Sequence('generic_conn_def_seq'), primary_key=True)
name = Column(String(200), nullable=False)
type_ = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False, default=False)
cache_expiry = Column(Integer, nullable=True, default=0)
address = Column(Text(), nullable=True)
port = Column(Integer, nullable=True)
timeout = Column(Integer, nullable=True)
data_format = Column(String(60), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# Both are needed because some connections can be duplex
is_channel = Column(Boolean(), nullable=False)
is_outconn = Column(Boolean(), nullable=False)
version = Column(String(200), nullable=True)
extra = Column(Text(), nullable=True)
pool_size = Column(Integer(), nullable=False)
# This can be used if only one security definition should be assigned to the object
username = Column(String(1000), nullable=True)
username_type = Column(String(45), nullable=True)
secret = Column(String(1000), nullable=True)
secret_type = Column(String(45), nullable=True)
# Is RBAC enabled for the object
sec_use_rbac = Column(Boolean(), nullable=False, default=False)
# Some connections will have a connection definition assigned
conn_def_id = Column(Integer, ForeignKey('generic_conn_def.id', ondelete='CASCADE'), nullable=True)
conn_def = relationship(GenericConnDef, backref=backref('generic_conn_def_list',
order_by=id, cascade='all, delete, delete-orphan'))
cache_id = Column(Integer, ForeignKey('cache.id', ondelete='CASCADE'), nullable=True)
cache = relationship('Cache', backref=backref('generic_conn_list', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('generic_conn_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericConnSec(Base):
""" N:N security mappings for generic connections.
"""
__tablename__ = 'generic_conn_sec'
__table_args__ = (
UniqueConstraint('conn_id', 'sec_base_id', 'cluster_id'),
{})
id = Column(Integer, Sequence('generic_conn_sec_seq'), primary_key=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
conn_id = Column(Integer, ForeignKey('generic_conn.id', ondelete='CASCADE'), nullable=False)
conn = relationship(GenericConn, backref=backref('generic_conn_list', order_by=id,
cascade='all, delete, delete-orphan'))
sec_base_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=False)
sec_base = relationship(SecurityBase, backref=backref('generic_conn_sec_list', order_by=id,
cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('generic_conn_sec_list', order_by=id,
cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericConnClient(Base):
""" A live client connection.
"""
__tablename__ = 'generic_conn_client'
__table_args__ = (
Index('gen_conn_cli_idx', 'cluster_id', 'pub_client_id', unique=False),
Index('gen_conn_cli_ext_n_idx', 'cluster_id', 'ext_client_name', unique=False),
Index('gen_conn_cli_ext_i_idx', 'cluster_id', 'ext_client_id', unique=False),
Index('gen_conn_cli_pr_addr_idx', 'cluster_id', 'peer_address', unique=False),
Index('gen_conn_cli_pr_fqdn_idx', 'cluster_id', 'peer_fqdn', unique=False),
{})
# This ID is for SQL
id = Column(Integer, Sequence('generic_conn_client_seq'), primary_key=True)
is_internal = Column(Boolean(), nullable=False)
# This one is assigned by Zato
pub_client_id = Column(String(200), nullable=False)
# These are assigned by clients themselves
ext_client_id = Column(String(200), nullable=False)
ext_client_name = Column(String(200), nullable=True)
local_address = Column(String(400), nullable=False)
peer_address = Column(String(400), nullable=False)
peer_fqdn = Column(String(400), nullable=False)
connection_time = Column(DateTime, nullable=False)
last_seen = Column(DateTime, nullable=False)
server_proc_pid = Column(Integer, nullable=True)
server_name = Column(String(200), nullable=True) # References server.name
conn_id = Column(Integer, ForeignKey('generic_conn.id', ondelete='CASCADE'), nullable=False)
conn = relationship(
GenericConn, backref=backref('clients', order_by=local_address, cascade='all, delete, delete-orphan'))
server_id = Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=True)
server = relationship(
Server, backref=backref('gen_conn_clients', order_by=local_address, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(
Cluster, backref=backref('gen_conn_clients', order_by=last_seen, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class RateLimitState(Base):
""" Rate limiting persistent storage for exact definitions.
"""
__tablename__ = 'rate_limit_state'
__table_args__ = (
Index('rate_lim_obj_idx', 'object_type', 'object_id', 'period', 'last_network', unique=True,
mysql_length={'object_type':191, 'object_id':191, 'period':191, 'last_network':191}),
{})
id = Column(Integer(), Sequence('rate_limit_state_seq'), primary_key=True)
object_type = Column(Text(191), nullable=False)
object_id = Column(Text(191), nullable=False)
period = Column(Text(), nullable=False)
requests = Column(Integer(), nullable=False, server_default='0')
last_cid = Column(Text(), nullable=False)
last_request_time_utc = Column(DateTime(), nullable=False)
last_from = Column(Text(), nullable=False)
last_network = Column(Text(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('rate_limit_state_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
| 138,510
|
Python
|
.py
| 2,311
| 53.942449
| 143
| 0.621579
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,660
|
base.py
|
zatosource_zato/code/zato-common/src/zato/common/odb/model/base.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# dictalchemy
from dictalchemy import make_class_dictable
# SQLAlchemy
from sqlalchemy import Text, TypeDecorator
from sqlalchemy.ext.declarative import declarative_base
# Zato
from zato.common.json_internal import json_dumps, json_loads
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_
# ################################################################################################################################
# ################################################################################################################################
Base:'any_' = declarative_base()
make_class_dictable(Base)
# ################################################################################################################################
# ################################################################################################################################
class _JSON(TypeDecorator):
""" Python 2.7 ships with SQLite 3.8 whereas it was 3.9 that introduced the JSON datatype.
Because of it, we need our own wrapper around JSON data.
"""
@property
def python_type(self):
return object
impl = Text
def process_bind_param(self, value, dialect):
return json_dumps(value)
def process_literal_param(self, value, dialect):
return value
def process_result_value(self, value, dialect):
if value is not None and value != 'null':
try:
return json_loads(value)
except(ValueError, TypeError):
return None
# ################################################################################################################################
# ################################################################################################################################
| 2,303
|
Python
|
.py
| 43
| 49.186047
| 130
| 0.377619
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,661
|
client.py
|
zatosource_zato/code/zato-common/src/zato/common/events/client.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import socket
from datetime import datetime
from logging import getLogger
# gevent
from gevent import sleep
from gevent.lock import RLock
# orjson
from orjson import dumps
# Zato
from zato.common.events.common import Action
from zato.common.typing_ import asdict
from zato.common.util.api import new_cid
from zato.common.util.json_ import json_loads
from zato.common.util.tcp import read_from_socket, SocketReaderCtx, wait_until_port_taken
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.events.common import PushCtx
PushCtx = PushCtx
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
utcnow = datetime.utcnow
# ################################################################################################################################
# ################################################################################################################################
class Client:
def __init__(self, host, port):
# type: (str, int) -> None
self.host = host
self.port = port
self.remote_addr_str = '{}:{}'.format(self.host, self.port)
self.socket = None # type: socket.socket
self.peer_name = '<Client-peer_name-default>'
self.peer_name_str = '<Client-peer_name_str-default>'
self.conn_id = 'zstrcl' + new_cid(bytes=4)
self.max_wait_time = 30
self.max_msg_size = 30_000_000
self.read_buffer_size = 30_000_000
self.recv_timeout = 30
self.should_log_messages = False
self.is_connected = False
self.lock = RLock()
# ################################################################################################################################
def connect(self):
# For later use
start = utcnow()
with self.lock:
if self.is_connected:
return
self.socket = socket.socket(type=socket.SOCK_STREAM)
while not self.is_connected:
logger.info('Connecting to %s', self.remote_addr_str)
try:
self.socket.connect((self.host, self.port))
self.peer_name = self.socket.getpeername()
self.peer_name_str = '{}:{}'.format(*self.peer_name)
except Exception as e:
logger.info('Connection error `%s` (%s) -> %s', e.args, utcnow() - start, self.remote_addr_str)
sleep(1)
else:
logger.info('Connected to %s after %s', self.remote_addr_str, utcnow() - start)
self.is_connected = True
# ################################################################################################################################
def send(self, action, data=b''):
# type: (bytes) -> None
with self.lock:
try:
self.socket.sendall(action + data + b'\n')
except Exception as e:
self.is_connected = False
logger.info('Socket send error `%s` -> %s', e.args, self.remote_addr_str)
self.close()
self.connect()
# ################################################################################################################################
def read(self):
# type: () -> bytes
with self.lock:
# Build a receive context ..
ctx = SocketReaderCtx(
self.conn_id,
self.socket,
self.max_wait_time,
self.max_msg_size,
self.read_buffer_size,
self.recv_timeout,
self.should_log_messages
)
# .. wait for the reply and return it.
return read_from_socket(ctx)
# ################################################################################################################################
def ping(self):
logger.info('Pinging %s (%s)', self.peer_name_str, self.conn_id)
# Send the ping message ..
self.send(Action.Ping)
# .. wait for the reply ..
response = self.read()
# .. and raise an exception in case of any error.
if response and response != Action.PingReply:
raise ValueError('Unexpected response received from `{}` -> `{}`'.format(self.peer_name, response))
# ################################################################################################################################
def push(self, ctx):
# type: (PushCtx) -> None
# Serialise the context to dict ..
data = asdict(ctx)
# .. now to JSON ..
data = dumps(data)
# .. and send it across (there will be no response).
self.send(Action.Push, data)
# ################################################################################################################################
def get_table(self):
# Request the tabulated data ..
self.send(Action.GetTable)
# .. wait for the reply ..
response = self.read()
# .. and raise an exception in case of any error.
if response and (not response.startswith(Action.GetTableReply)):
raise ValueError('Unexpected response received from `{}` -> `{}`'.format(self.peer_name, response))
table = response[Action.LenAction:]
return json_loads(table) if table else None
# ################################################################################################################################
def sync_state(self):
# Request that the database sync its state with persistent storage ..
self.send(Action.SyncState)
# .. wait for the reply
self.read()
# ################################################################################################################################
def close(self):
self.socket.close()
# ################################################################################################################################
def run(self):
# Make sure that we have a port to connect to ..
wait_until_port_taken(self.port, 5)
# .. do connect now ..
self.connect()
# .. and ping the remote end to confirm that we have connectivity.
self.ping()
# ################################################################################################################################
# ################################################################################################################################
| 7,486
|
Python
|
.py
| 146
| 42.753425
| 130
| 0.385808
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,662
|
__init__.py
|
zatosource_zato/code/zato-common/src/zato/common/events/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,663
|
common.py
|
zatosource_zato/code/zato-common/src/zato/common/events/common.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from typing import Optional as optional
# Zato
from zato.common.typing_ import dataclass
# ################################################################################################################################
# ################################################################################################################################
class Default:
# This is relative to server.conf's main.work_dir
fs_data_path = 'events'
# Sync database to disk once in that many events ..
sync_threshold = 30_000
# .. or once in that many seconds.
sync_interval = 30
# ################################################################################################################################
# ################################################################################################################################
class EventInfo:
class EventType:
service_request = 1_000_000
service_response = 1_000_001
class CommonObject:
service = 2_000_000
# ################################################################################################################################
# ################################################################################################################################
# All event actions possible
class Action:
Ping = b'01'
PingReply = b'02'
Push = b'03'
GetTable = b'04'
GetTableReply = b'05'
SyncState = b'06'
LenAction = len(Ping)
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class PushCtx:
id: str
cid: str
timestamp: str
event_type: int
source_type: optional[str] = None
source_id: optional[str] = None
object_type: int
object_id: str
recipient_type: optional[str] = None
recipient_id: optional[str] = None
total_time_ms: int
def __hash__(self):
return hash(self.id)
# ################################################################################################################################
# ################################################################################################################################
| 2,594
|
Python
|
.py
| 56
| 42.375
| 130
| 0.313569
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,664
|
limiter.py
|
zatosource_zato/code/zato-common/src/zato/common/rate_limiting/limiter.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from copy import deepcopy
from datetime import datetime
# gevent
from gevent.lock import RLock
# netaddr
from netaddr import IPAddress
# Zato
from zato.common.odb.model import RateLimitState
from zato.common.odb.query.rate_limiting import current_period_list, current_state as current_state_query
from zato.common.rate_limiting.common import Const, AddressNotAllowed, RateLimitReached
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.rate_limiting import Approximate as RateLimiterApproximate, RateLimiting
from zato.common.rate_limiting.common import DefinitionItem, ObjectInfo
from zato.common.typing_ import any_, callable_, commondict, strcalldict, strdict, strlist
# For pyflakes
DefinitionItem = DefinitionItem
ObjectInfo = ObjectInfo
RateLimiterApproximate = RateLimiterApproximate
RateLimiting = RateLimiting
# ################################################################################################################################
# ################################################################################################################################
RateLimitStateTable = RateLimitState.__table__
RateLimitStateDelete = RateLimitStateTable.delete
# ################################################################################################################################
# ################################################################################################################################
class BaseLimiter:
""" A per-server, approximate, rate limiter object. It is approximate because it does not keep track
of what current rate limits in other servers are.
"""
__slots__ = 'current_idx', 'lock', 'api', 'object_info', 'definition', 'has_from_any', 'from_any_rate', 'from_any_unit', \
'is_limit_reached', 'ip_address_cache', 'current_period_func', 'by_period', 'parent_type', 'parent_name', \
'is_exact', 'from_any_object_id', 'from_any_object_type', 'from_any_object_name', 'cluster_id', 'is_active', \
'invocation_no'
api:'RateLimiting'
object_info:'ObjectInfo'
definition:'strlist'
has_from_any:'bool'
from_any_rate:'int'
from_any_unit:'str'
parent_type:'str'
parent_name:'str'
is_exact:'bool'
invocation_no:'int'
ip_address_cache:'strdict'
by_period:'strdict'
from_any_object_id:'int'
from_any_object_type:'str'
from_any_object_name:'str'
initial_state:'commondict' = {
'requests': 0,
'last_cid': None,
'last_request_time_utc': None,
'last_from': None,
'last_network': None,
}
def __init__(self, cluster_id:'int') -> 'None':
self.cluster_id = cluster_id
self.is_active = False
self.current_idx = 0
self.lock = RLock()
self.ip_address_cache = {}
self.by_period = {}
self.is_exact = False
self.invocation_no = 0
self.current_period_func:'strcalldict' = {
Const.Unit.day: self._get_current_day,
Const.Unit.hour: self._get_current_hour,
Const.Unit.minute: self._get_current_minute,
}
# ################################################################################################################################
@property
def has_parent(self) -> 'bool':
return self.parent_type and self.parent_name
# ################################################################################################################################
def cleanup(self) -> 'None':
""" Cleans up time periods that are no longer needed.
"""
with self.lock:
# First, periodically clear out the IP cache to limit its size to 1,000 items
if len(self.ip_address_cache) >= 1000:
self.ip_address_cache.clear()
now = datetime.utcnow()
current_minute = self._get_current_minute(now)
current_hour = self._get_current_hour(now)
current_day = self._get_current_day(now)
# We need a copy so as not to modify the dict in place
periods = self._get_current_periods()
to_delete = set()
current_periods_map = {
Const.Unit.minute: current_minute,
Const.Unit.hour: current_hour,
Const.Unit.day: current_day
}
for period in periods: # type: str
period_unit = period[0] # type: str # One of Const.Unit instances
current_period = current_periods_map[period_unit]
# If this period is in the past, add it to the ones to be deleted
if period < current_period:
to_delete.add(period)
if to_delete:
self._delete_periods(to_delete)
# ################################################################################################################################
def rewrite_rate_data(self, old_config) -> 'None':
""" Writes rate limiting information from old configuration to our own. Used by RateLimiting.edit action.
"""
# type: (RateLimiterApproximate)
# Already collected rate limits
self.by_period.clear()
self.by_period.update(old_config.by_period)
# ################################################################################################################################
def get_config_key(self) -> 'str':
# type: () -> str
return '{}:{}'.format(self.object_info.type_, self.object_info.name)
# ################################################################################################################################
def _get_rate_config_by_from(self, orig_from, _from_any=Const.from_any) -> 'DefinitionItem':
# type: (str, str) -> DefinitionItem
from_ = self.ip_address_cache.setdefault(orig_from, IPAddress(orig_from)) # type: IPAddress
found = None
for line in self.definition: # type: DefinitionItem
# A catch-all * pattern
if line.from_ == _from_any:
found = line
break
# A network match
elif from_ in line.from_:
found = line
break
# We did not match any line from configuration
if not found:
raise AddressNotAllowed('Address not allowed `{}`'.format(orig_from))
# We found a matching piece of from IP configuration
return found
# ################################################################################################################################
def _get_current_day(self, now, _prefix=Const.Unit.day, _format='%Y-%m-%d') -> 'str':
# type: (datetime, str, str) -> str
return '{}.{}'.format(_prefix, now.strftime(_format))
def _get_current_hour(self, now, _prefix=Const.Unit.hour, _format='%Y-%m-%dT%H') -> 'str':
# type: (datetime, str, str) -> str
return '{}.{}'.format(_prefix, now.strftime(_format))
def _get_current_minute(self, now, _prefix=Const.Unit.minute, _format='%Y-%m-%dT%H:%M') -> 'str':
# type: (datetime, str, str) -> str
return '{}.{}'.format(_prefix, now.strftime(_format))
# ################################################################################################################################
def _format_last_info(self, current_state) -> 'str':
# type: (dict) -> str
return 'last_from:`{last_from}; last_request_time_utc:`{last_request_time_utc}; last_cid:`{last_cid}`;'.format(
**current_state)
# ################################################################################################################################
def _raise_rate_limit_exceeded(self, rate, unit, orig_from, network_found, current_state, cid,
def_object_id, def_object_name, def_object_type) -> 'None':
raise RateLimitReached('Max. rate limit of {}/{} reached; from:`{}`, network:`{}`; {} (cid:{}) (def:{} {} {})'.format(
rate, unit, orig_from, network_found, self._format_last_info(current_state), cid, def_object_id, def_object_type,
def_object_name))
# ################################################################################################################################
def _check_limit(self, cid, orig_from, network_found, rate, unit, def_object_id, def_object_name, def_object_type,
_rate_any=Const.rate_any, _utcnow=datetime.utcnow) -> 'None':
# type: (str, str, str, int, str, str, object, str, str)
# Increase invocation counter
self.invocation_no += 1
# Local aliases
now = _utcnow()
# Get current period, e.g. current day, hour or minute
current_period_func = self.current_period_func[unit]
current_period = current_period_func(now)
current_state = self._get_current_state(current_period, network_found)
# Unless we are allowed to have any rate ..
if rate != _rate_any:
# We may have reached the limit already ..
if current_state['requests'] >= rate:
self._raise_rate_limit_exceeded(rate, unit, orig_from, network_found, current_state, cid,
def_object_id, def_object_name, def_object_type)
# Update current metadata state
self._set_new_state(current_state, cid, orig_from, network_found, now, current_period)
# Above, we checked our own rate limit but it is still possible that we have a parent
# that also wants to check it.
if self.has_parent:
self.api.check_limit(cid, self.parent_type, self.parent_name, orig_from)
# Clean up old entries periodically
if self.invocation_no % 1000 == 0:
self.cleanup()
# ################################################################################################################################
def check_limit(self, cid, orig_from) -> 'None':
# type: (str, str)
with self.lock:
if self.has_from_any:
rate = self.from_any_rate
unit = self.from_any_unit
network_found = Const.from_any
def_object_id = None
def_object_type = None
def_object_name = None
else:
found = self._get_rate_config_by_from(orig_from)
rate = found.rate
unit = found.unit
network_found = found.from_
def_object_id = found.object_id
def_object_type = found.object_type
def_object_name = found.object_name
# Now, check actual rate limits
self._check_limit(cid, orig_from, network_found, rate, unit, def_object_id, def_object_name, def_object_type)
# ################################################################################################################################
def _get_current_periods(self) -> 'None':
raise NotImplementedError()
_get_current_state = _set_new_state = _delete_periods = _get_current_periods
# ################################################################################################################################
# ################################################################################################################################
class Approximate(BaseLimiter):
def _get_current_periods(self) -> 'strlist':
return list(self.by_period.keys())
# ################################################################################################################################
def _delete_periods(self, to_delete) -> 'None':
for item in to_delete: # item: str
del self.by_period[item]
# ################################################################################################################################
def _get_current_state(self, current_period, network_found) -> 'strdict':
# type: (str, str) -> dict
# Get or create a dictionary of requests information for current period
period_dict = self.by_period.setdefault(current_period, {}) # type: dict
# Get information about already stored requests for that network in current period
return period_dict.setdefault(network_found, deepcopy(self.initial_state))
# ################################################################################################################################
def _set_new_state(self, current_state, cid, orig_from, network_found, now, *ignored:'any_') -> 'None':
current_state['requests'] += 1
current_state['last_cid'] = cid
current_state['last_request_time_utc'] = now.isoformat()
current_state['last_from'] = orig_from
current_state['last_network'] = str(network_found)
# ################################################################################################################################
# ################################################################################################################################
class Exact(BaseLimiter):
def __init__(self, cluster_id:'int', sql_session_func:'callable_') -> 'None':
super(Exact, self).__init__(cluster_id)
self.sql_session_func = sql_session_func
# ################################################################################################################################
def _fetch_current_state(self, session, current_period, network_found) -> 'RateLimitState':
# type: (str, str) -> RateLimitState
# We have a complex Python object but for the query we just need its string representation
network_found = str(network_found)
return current_state_query(session, self.cluster_id, self.object_info.type_, self.object_info.id,
current_period, network_found).\
first()
# ################################################################################################################################
def _get_current_state(self, current_period, network_found) -> 'strdict':
# type: (str, str) -> dict
current_state = deepcopy(self.initial_state) # type: dict
with closing(self.sql_session_func()) as session:
item = self._fetch_current_state(session, current_period, network_found)
if item:
current_state.update(item.asdict())
return current_state
# ################################################################################################################################
def _set_new_state(self, current_state, cid, orig_from, network_found, now, current_period) -> 'None':
# We just need a string representation of this object
network_found = str(network_found)
with closing(self.sql_session_func()) as session:
item = self._fetch_current_state(session, current_period, network_found)
if item:
item.last_cid = cid
item.last_from = orig_from
item.last_request_time_utc = now
else:
item = RateLimitState()
item.cluster_id = self.cluster_id
item.object_type = self.object_info.type_
item.object_id = self.object_info.id
item.requests = 0
item.period = current_period
item.network = network_found
item.last_cid = cid
item.last_from = orig_from
item.last_network = network_found
item.last_request_time_utc = now
item.requests += 1
session.add(item)
session.commit()
# ################################################################################################################################
def _get_current_periods(self) -> 'strlist':
with closing(self.sql_session_func()) as session:
return [elem[0] for elem in current_period_list(session, self.cluster_id).all()]
# ################################################################################################################################
def _delete_periods(self, to_delete) -> 'None':
with closing(self.sql_session_func()) as session:
session.execute(RateLimitStateDelete().where(
RateLimitStateTable.c.period.in_(to_delete)
))
session.commit()
# ################################################################################################################################
# ################################################################################################################################
| 17,203
|
Python
|
.py
| 298
| 48.704698
| 130
| 0.468984
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,665
|
__init__.py
|
zatosource_zato/code/zato-common/src/zato/common/rate_limiting/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from logging import getLogger
# gevent
from gevent.lock import RLock
# netaddr
from netaddr import IPNetwork
# SQLAlchemy
from sqlalchemy import and_
# Zato
from zato.common.rate_limiting.common import Const, DefinitionItem, ObjectInfo
from zato.common.rate_limiting.limiter import Approximate, Exact, RateLimitStateDelete, RateLimitStateTable
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.rate_limiting.limiter import BaseLimiter
from zato.common.typing_ import callable_, dict_, list_, strdict
from zato.distlock import LockManager
# For pyflakes
BaseLimiter = BaseLimiter
LockManager = LockManager
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class DefinitionParser:
""" Parser for user-provided rate limiting definitions.
"""
@staticmethod
def get_lines(
definition, # type: str
object_id, # type: int
object_type, # type: str
object_name, # type: str
parse_only=False # type: bool
) -> 'list_[DefinitionItem]':
if not parse_only:
out = []
definition = definition if isinstance(definition, str) else definition.decode('utf8')
for idx, orig_line in enumerate(definition.splitlines(), 1): # type: int, str
line = orig_line.strip()
if (not line) or line.startswith('#'):
continue
line = line.split('=')
if len(line) != 2:
raise ValueError('Invalid definition line `{}`; (idx:{})'.format(orig_line, idx))
from_, rate_info = line # type: str, str
from_ = from_.strip()
if from_ != Const.from_any:
from_ = IPNetwork(from_)
rate_info = rate_info.strip()
if rate_info == Const.rate_any:
rate = Const.rate_any
unit = Const.Unit.day # This is arbitrary but it does not matter because there is no rate limit in effect
else:
rate, unit = rate_info.split('/') # type: str, str
rate = int(rate.strip())
unit = unit.strip()
all_units = Const.all_units()
if unit not in all_units:
raise ValueError('Unit `{}` is not one of `{}`'.format(unit, all_units))
# In parse-only mode we do not build any actual output
if parse_only:
continue
item = DefinitionItem()
item.config_line = idx
item.from_ = from_
item.rate = rate
item.unit = unit
item.object_id = object_id
item.object_type = object_type
item.object_name = object_name
out.append(item)
if not parse_only:
return out
# ################################################################################################################################
@staticmethod
def check_definition(definition:'str') -> 'list_[DefinitionItem]':
DefinitionParser.get_lines(definition.strip(), None, None, None, True)
# ################################################################################################################################
@staticmethod
def check_definition_from_input(input_data:'strdict') -> 'None':
rate_limit_def = input_data.get('rate_limit_def') or ''
if rate_limit_def:
DefinitionParser.check_definition(rate_limit_def)
# ################################################################################################################################
def parse(
self,
definition, # type: str
object_id, # type: int
object_type, # type: str
object_name # type: str
) -> 'list_[DefinitionItem]':
return DefinitionParser.get_lines(definition.strip(), object_id, object_type, object_name)
# ################################################################################################################################
# ################################################################################################################################
class RateLimiting:
""" Main API for the management of rate limiting functionality.
"""
__slots__ = 'parser', 'config_store', 'lock', 'sql_session_func', 'global_lock_func', 'cluster_id'
def __init__(self) -> 'None':
self.parser = DefinitionParser() # type: DefinitionParser
self.config_store = {} # type: dict_[str, BaseLimiter]
self.lock = RLock()
self.global_lock_func = None # type: LockManager
self.sql_session_func = None # type: callable_
self.cluster_id = None # type: int
# ################################################################################################################################
def _get_config_key(self, object_type:'str', object_name:'str') -> 'str':
return '{}:{}'.format(object_type, object_name)
# ################################################################################################################################
def _get_config_by_object(self, object_type:'str', object_name:'str') -> 'BaseLimiter':
config_key = self._get_config_key(object_type, object_name)
return self.config_store.get(config_key)
# ################################################################################################################################
def _create_config(self, object_dict:'strdict', definition:'str', is_exact:'bool') -> 'BaseLimiter':
object_id = object_dict['id']
object_type = object_dict['type_']
object_name = object_dict['name']
info = ObjectInfo()
info.id = object_id
info.type_ = object_type
info.name = object_name
parsed = self.parser.parse(definition or '', object_id, object_type, object_name)
if parsed:
def_first = parsed[0]
has_from_any = def_first.from_ == Const.from_any
else:
has_from_any = False
config = Exact(self.cluster_id, self.sql_session_func) if is_exact else Approximate(self.cluster_id) # type: BaseLimiter
config.is_active = object_dict['is_active']
config.is_exact = is_exact
config.api = self
config.object_info = info
config.definition = parsed
config.parent_type = object_dict['parent_type']
config.parent_name = object_dict['parent_name']
if has_from_any:
config.has_from_any = has_from_any
config.from_any_rate = def_first.rate
config.from_any_unit = def_first.unit
config.from_any_object_id = object_id
config.from_any_object_type = object_type
config.from_any_object_name = object_name
return config
# ################################################################################################################################
def create(self, object_dict:'strdict', definition:'str', is_exact:'bool') -> 'None':
config = self._create_config(object_dict, definition, is_exact)
self.config_store[config.get_config_key()] = config
# ################################################################################################################################
def check_limit(self, cid:'str', object_type:'str', object_name:'str', from_:'str', needs_warn:'bool'=True) -> 'None':
""" Checks if input object has already reached its allotted usage limit.
"""
# type: (str, str, str, str)
with self.lock:
config = self._get_config_by_object(object_type, object_name)
# It is possible that we do not have configuration for such an object,
# in which case we will log a warning.
if config:
with config.lock:
config.check_limit(cid, from_)
else:
if needs_warn:
logger.warning('No such rate limiting object `%s` (%s)', object_name, object_type)
# ################################################################################################################################
def _delete_from_odb(self, object_type:'str', object_id:'int') -> 'None':
with closing(self.sql_session_func()) as session:
session.execute(RateLimitStateDelete().where(and_(
RateLimitStateTable.c.object_type==object_type,
RateLimitStateTable.c.object_id==object_id,
)))
session.commit()
# ################################################################################################################################
def _delete(self, object_type:'str', object_name:'str', remove_parent:'bool') -> 'None':
""" Deletes configuration for input data, optionally deleting references to it from all objects that depended on it.
Must be called with self.lock held.
"""
config_key = self._get_config_key(object_type, object_name)
limiter = self.config_store[config_key] # type: BaseLimiter
del self.config_store[config_key]
if limiter.is_exact:
self._delete_from_odb(object_type, limiter.object_info.id)
if remove_parent:
self._set_new_parent(object_type, object_name, None, None)
# ################################################################################################################################
def _set_new_parent(self, parent_type:'str', old_parent_name:'str', new_parent_type:'str', new_parent_name:'str') -> 'None':
""" Sets new parent for all configuration entries matching the old one. Must be called with self.lock held.
"""
for child_config in self.config_store.values(): # type: BaseLimiter
object_info = child_config.object_info
# This is our own config
if object_info.type_ == parent_type and object_info.name == old_parent_name:
continue
# This object has a parent, possibly it is our very configuration
if child_config.has_parent:
# Yes, this is our config ..
if child_config.parent_type == parent_type and child_config.parent_name == old_parent_name:
# We typically want to change the parent's name but it is possible
# that both type and name will be None (in case we are removing a parent from a child object)
# which is why both are set here.
child_config.parent_type = new_parent_type
child_config.parent_name = new_parent_name
# ################################################################################################################################
def edit(self, object_type:'str', old_object_name:'str', object_dict:'strdict', definition:'str', is_exact:'bool') -> 'None':
""" Changes, in place, an existing configuration entry to input data.
"""
# Note the whole of this operation is under self.lock to make sure the update is atomic
# from our callers' perspective.
with self.lock:
old_config = self._get_config_by_object(object_type, old_object_name)
if not old_config:
raise ValueError('Rate limiting object not found `{}` ({})'.format(old_object_name, object_type))
# Just to be sure we are doing the right thing, compare object types, old and new
if object_type != old_config.object_info.type_:
raise ValueError('Unexpected object_type, old:`{}`, new:`{}` ({}) ({})'.format(
old_config.object_info.type_, object_type, old_object_name, object_dict))
# Now, create a new config object ..
new_config = self._create_config(object_dict, definition, is_exact)
# .. in case it was a rename ..
if old_config.object_info.name != new_config.object_info.name:
# .. make all child objects depend on the new name, in case it changed
self._set_new_parent(object_type, old_object_name, new_config.object_info.type_, new_config.object_info.name)
# First, delete the old configuration, but do not delete any objects that depended on it
# because we are just editing the former, not deleting it altogether.
self._delete(object_type, old_object_name, False)
# Now, create a new key
self.config_store[new_config.get_config_key()] = new_config
# ################################################################################################################################
def delete(self, object_type:'str', object_name:'str') -> 'None':
""" Deletes configuration for input object and clears out parent references to it.
"""
with self.lock:
self._delete(object_type, object_name, True)
# ################################################################################################################################
def _get_config(self, object_type:'str', object_name:'str') -> 'None':
""" Returns configuration for the input object, assumming we have it at all.
"""
config_key = self._get_config_key(object_type, object_name)
return self.config_store.get(config_key)
# ################################################################################################################################
def get_config(self, object_type:'str', object_name:'str') -> 'BaseLimiter':
with self.lock:
return self._get_config(object_type, object_name)
# ################################################################################################################################
def has_config(self, object_type:'str', object_name:'str') -> 'bool':
with self.lock:
return bool(self._get_config(object_type, object_name))
# ################################################################################################################################
def cleanup(self) -> 'None':
""" Invoked periodically by the scheduler - goes through all configuration elements and cleans up
all time periods that are no longer needed.
"""
for config in self.config_store.values(): # type: BaseLimiter
config.cleanup()
# ################################################################################################################################
# ################################################################################################################################
| 15,594
|
Python
|
.py
| 262
| 50.259542
| 130
| 0.474475
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,666
|
common.py
|
zatosource_zato/code/zato-common/src/zato/common/rate_limiting/common.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, strset
# ################################################################################################################################
# ################################################################################################################################
class BaseException(Exception):
pass
class AddressNotAllowed(BaseException):
pass
class RateLimitReached(BaseException):
pass
# ################################################################################################################################
# ################################################################################################################################
class Const:
from_any = '*'
rate_any = '*'
class Unit:
minute = 'm'
hour = 'h'
day = 'd'
@staticmethod
def all_units() -> 'strset':
return {Const.Unit.minute, Const.Unit.hour, Const.Unit.day}
# ################################################################################################################################
# ################################################################################################################################
class ObjectInfo:
""" Information about an individual object covered by rate limiting.
"""
__slots__ = 'type_', 'id', 'name'
type_:'str'
id:'int'
name:'str'
# ################################################################################################################################
# ################################################################################################################################
class DefinitionItem:
__slots__ = 'config_line', 'from_', 'rate', 'unit', 'object_id', 'object_type', 'object_name'
config_line:'int'
from_:'any_'
rate:'int'
unit:'str'
object_id:'int'
object_type:'str'
object_name:'str'
def __repr__(self) -> 'str':
return '<{} at {}; line:{}, from:{}, rate:{}, unit:{} ({} {} {})>'.format(
self.__class__.__name__, hex(id(self)), self.config_line, self.from_, self.rate, self.unit,
self.object_id, self.object_name, self.object_type)
# ################################################################################################################################
# ################################################################################################################################
| 2,905
|
Python
|
.py
| 55
| 48.527273
| 130
| 0.27006
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,667
|
http_.py
|
zatosource_zato/code/zato-common/src/zato/common/util/http_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import copy
from cgi import FieldStorage
from collections.abc import Mapping
from io import BytesIO
from urllib.parse import parse_qsl, quote, urlencode
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, stranydict
# ################################################################################################################################
# ################################################################################################################################
def get_proxy_config(config):
""" Returns HTTP/HTTPS proxy configuration from a given object's configuration data if any was configured.
"""
# Proxy configuration, if any
if config.http_proxy_list or config.https_proxy_list:
proxy_config = {}
# For now, all proxies we produce are not lists but a single one,
# which can be revised if needed in the future.
if config.http_proxy_list:
proxy_config['http'] = config.http_proxy_list.strip()
if config.https_proxy_list:
proxy_config['https'] = config.https_proxy_list.strip()
return proxy_config
# ################################################################################################################################
def get_form_data(wsgi_environ:'stranydict', as_dict:'bool'=True) -> 'stranydict':
# Response to produce
out = {}
# This is the form data uploaded to a channel or service
data = wsgi_environ['zato.http.raw_request'] # type: any_
# Create a buffer to hold the form data and write the form to it
buff = BytesIO()
buff.write(data)
buff.seek(0)
# Output to return
form = FieldStorage(fp=buff, environ=wsgi_environ, keep_blank_values=True)
# Clean up
buff.close()
# Turn the FieldStorage object into a dict ..
if as_dict:
if form.list:
form_to_dict = {item.name: item.value for item in form.list}
out.update(form_to_dict)
# Return the dict now
return out
# type: ignore
# ################################################################################################################################
# ################################################################################################################################
# Original Django license for the code below
"""
Copyright (c) Django Software Foundation and individual contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of Django nor the names of its contributors may be used
to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS' AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# It's neither necessary nor appropriate to use
# django.utils.encoding.force_str() for parsing URLs and form inputs. Thus,
# this slightly more restricted function, used by QueryDict.
def bytes_to_text(s, encoding):
"""
Convert bytes objects to strings, using the given encoding. Illegally
encoded input characters are replaced with Unicode 'unknown' codepoint
(\ufffd).
Return any non-bytes objects without change.
"""
if isinstance(s, bytes):
return str(s, encoding, 'replace')
else:
return s
class MultiValueDict(dict):
"""
A subclass of dictionary customized to handle multiple values for the
same key.
>>> d = MultiValueDict({'name': ['Adrian', 'Simon'], 'position': ['Developer']})
>>> d['name']
'Simon'
>>> d.getlist('name')
['Adrian', 'Simon']
>>> d.getlist('doesnotexist')
[]
>>> d.getlist('doesnotexist', ['Adrian', 'Simon'])
['Adrian', 'Simon']
>>> d.get('lastname', 'nonexistent')
'nonexistent'
>>> d.setlist('lastname', ['Holovaty', 'Willison'])
This class exists to solve the irritating problem raised by cgi.parse_qs,
which returns a list for every key, even though most web forms submit
single name-value pairs.
"""
def __init__(self, key_to_list_mapping=()):
super().__init__(key_to_list_mapping)
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, super().__repr__())
def __getitem__(self, key):
"""
Return the last data value for this key, or [] if it's an empty list;
raise KeyError if not found.
"""
try:
list_ = super().__getitem__(key)
except KeyError:
raise Exception(key)
try:
return list_[-1]
except IndexError:
return []
def __setitem__(self, key, value):
super().__setitem__(key, [value])
def __copy__(self):
return self.__class__([(k, v[:]) for k, v in self.lists()])
def __deepcopy__(self, memo):
result = self.__class__()
memo[id(self)] = result
for key, value in dict.items(self):
dict.__setitem__(
result, copy.deepcopy(key, memo), copy.deepcopy(value, memo)
)
return result
def __getstate__(self):
return {**self.__dict__, '_data': {k: self._getlist(k) for k in self}}
def __setstate__(self, obj_dict):
data = obj_dict.pop('_data', {})
for k, v in data.items():
self.setlist(k, v)
self.__dict__.update(obj_dict)
def get(self, key, default=None):
"""
Return the last data value for the passed key. If key doesn't exist
or value is an empty list, return `default`.
"""
try:
val = self[key]
except KeyError:
return default
if val == []:
return default
return val
def _getlist(self, key, default=None, force_list=False):
"""
Return a list of values for the key.
Used internally to manipulate values list. If force_list is True,
return a new copy of values.
"""
try:
values = super().__getitem__(key)
except KeyError:
if default is None:
return []
return default
else:
if force_list:
values = list(values) if values is not None else None
return values
def getlist(self, key, default=None):
"""
Return the list of values for the key. If key doesn't exist, return a
default value.
"""
return self._getlist(key, default, force_list=True)
def setlist(self, key, list_):
super().__setitem__(key, list_)
def setdefault(self, key, default=None):
if key not in self:
self[key] = default
# Do not return default here because __setitem__() may store
# another value -- QueryDict.__setitem__() does. Look it up.
return self[key]
def setlistdefault(self, key, default_list=None):
if key not in self:
if default_list is None:
default_list = []
self.setlist(key, default_list)
# Do not return default_list here because setlist() may store
# another value -- QueryDict.setlist() does. Look it up.
return self._getlist(key)
def appendlist(self, key, value):
"""Append an item to the internal list associated with key."""
self.setlistdefault(key).append(value)
def items(self):
"""
Yield (key, value) pairs, where value is the last item in the list
associated with the key.
"""
for key in self:
yield key, self[key]
def lists(self):
"""Yield (key, list) pairs."""
return iter(super().items())
def values(self):
"""Yield the last value on every key list."""
for key in self:
yield self[key]
def copy(self):
"""Return a shallow copy of this object."""
return copy.copy(self)
def update(self, *args, **kwargs):
"""Extend rather than replace existing key lists."""
if len(args) > 1:
raise TypeError('update expected at most 1 argument, got %d' % len(args))
if args:
arg = args[0]
if isinstance(arg, MultiValueDict):
for key, value_list in arg.lists():
self.setlistdefault(key).extend(value_list)
else:
if isinstance(arg, Mapping):
arg = arg.items()
for key, value in arg:
self.setlistdefault(key).append(value)
for key, value in kwargs.items():
self.setlistdefault(key).append(value)
def dict(self):
"""Return current object as a dict with singular values."""
return {key: self[key] for key in self}
# ################################################################################################################################
class QueryDict(MultiValueDict):
"""
A specialized MultiValueDict which represents a query string.
A QueryDict can be used to represent GET or POST data. It subclasses
MultiValueDict since keys in such data can be repeated, for instance
in the data from a form with a <select multiple> field.
By default QueryDicts are immutable, though the copy() method
will always return a mutable copy.
Both keys and values set on this class are converted from the given encoding
(DEFAULT_CHARSET by default) to str.
"""
# These are both reset in __init__, but is specified here at the class
# level so that unpickling will have valid values
_mutable = True
_encoding = None
def __init__(self, query_string=None, mutable=False, encoding=None):
super().__init__()
self.encoding = encoding or 'utf8'
query_string = query_string or ''
parse_qsl_kwargs = {
'keep_blank_values': True,
'encoding': self.encoding,
'max_num_fields': 123,
}
if isinstance(query_string, bytes):
# query_string normally contains URL-encoded data, a subset of ASCII.
try:
query_string = query_string.decode(self.encoding)
except UnicodeDecodeError:
# ... but some user agents are misbehaving :-(
query_string = query_string.decode('iso-8859-1')
try:
for key, value in parse_qsl(query_string, **parse_qsl_kwargs):
self.appendlist(key, value)
except ValueError as e:
# ValueError can also be raised if the strict_parsing argument to
# parse_qsl() is True. As that is not used by Django, assume that
# the exception was raised by exceeding the value of max_num_fields
# instead of fragile checks of exception message strings.
raise Exception(
'The number of GET/POST parameters exceeded '
'settings.DATA_UPLOAD_MAX_NUMBER_FIELDS.'
) from e
self._mutable = mutable
@classmethod
def fromkeys(cls, iterable, value='', mutable=False, encoding=None):
"""
Return a new QueryDict with keys (may be repeated) from an iterable and
values from value.
"""
q = cls('', mutable=True, encoding=encoding)
for key in iterable:
q.appendlist(key, value)
if not mutable:
q._mutable = False
return q
@property
def encoding(self):
if self._encoding is None:
self._encoding = 'utf8'
return self._encoding
@encoding.setter
def encoding(self, value):
self._encoding = value
def _assert_mutable(self):
if not self._mutable:
raise AttributeError('This QueryDict instance is immutable')
def __setitem__(self, key, value):
self._assert_mutable()
key = bytes_to_text(key, self.encoding)
value = bytes_to_text(value, self.encoding)
super().__setitem__(key, value)
def __delitem__(self, key):
self._assert_mutable()
super().__delitem__(key)
def __copy__(self):
result = self.__class__('', mutable=True, encoding=self.encoding)
for key, value in self.lists():
result.setlist(key, value)
return result
def __deepcopy__(self, memo):
result = self.__class__('', mutable=True, encoding=self.encoding)
memo[id(self)] = result
for key, value in self.lists():
result.setlist(copy.deepcopy(key, memo), copy.deepcopy(value, memo))
return result
def setlist(self, key, list_):
self._assert_mutable()
key = bytes_to_text(key, self.encoding)
list_ = [bytes_to_text(elt, self.encoding) for elt in list_]
super().setlist(key, list_)
def setlistdefault(self, key, default_list=None):
self._assert_mutable()
return super().setlistdefault(key, default_list)
def appendlist(self, key, value):
self._assert_mutable()
key = bytes_to_text(key, self.encoding)
value = bytes_to_text(value, self.encoding)
super().appendlist(key, value)
def pop(self, key, *args):
self._assert_mutable()
return super().pop(key, *args)
def popitem(self):
self._assert_mutable()
return super().popitem()
def clear(self):
self._assert_mutable()
super().clear()
def setdefault(self, key, default=None):
self._assert_mutable()
key = bytes_to_text(key, self.encoding)
default = bytes_to_text(default, self.encoding)
return super().setdefault(key, default)
def copy(self):
"""Return a mutable copy of this object."""
return self.__deepcopy__({})
def urlencode(self, safe=None):
"""
Return an encoded string of all query string arguments.
`safe` specifies characters which don't require quoting, for example::
>>> q = QueryDict(mutable=True)
>>> q['next'] = '/a&b/'
>>> q.urlencode()
'next=%2Fa%26b%2F'
>>> q.urlencode(safe='/')
'next=/a%26b/'
"""
output = []
if safe:
safe = safe.encode(self.encoding)
def encode(k, v):
return '%s=%s' % ((quote(k, safe), quote(v, safe)))
else:
def encode(k, v):
return urlencode({k: v})
for k, list_ in self.lists():
output.extend(
encode(k.encode(self.encoding), str(v).encode(self.encoding))
for v in list_
)
return '&'.join(output)
# ################################################################################################################################
# ################################################################################################################################
| 16,456
|
Python
|
.py
| 382
| 34.879581
| 130
| 0.567211
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,668
|
json_.py
|
zatosource_zato/code/zato-common/src/zato/common/util/json_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# pysimdjson
try:
from simdjson import Parser as SIMDJSONParser # type: ignore
except ImportError:
has_simdjson = False
else:
has_simdjson = True
# ################################################################################################################################
# ################################################################################################################################
class BasicParser:
def parse(self, value):
return json_loads(value)
# ################################################################################################################################
# ################################################################################################################################
# Library pysimdjson is temporarily disabled
if False:
from simdjson import loads as json_loads # type: ignore
JSONParser = SIMDJSONParser # type: ignore
else:
from json import loads as json_loads
JSONParser = BasicParser
# ################################################################################################################################
# ################################################################################################################################
| 1,422
|
Python
|
.py
| 28
| 48.071429
| 130
| 0.322511
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,669
|
wmq.py
|
zatosource_zato/code/zato-common/src/zato/common/util/wmq.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from binascii import unhexlify
# ################################################################################################################################
def unhexlify_wmq_id(wmq_id, _prefix='ID'):
""" Converts the IBM MQ generated identifier back to bytes,
e.g. 'ID:414d5120535052494e47505954484f4ecc90674a041f0020' -> 'AMQ SPRINGPYTHON\xcc\x90gJ\x04\x1f\x00 '.
"""
return unhexlify(wmq_id.replace(_prefix, '', 1))
# ################################################################################################################################
| 822
|
Python
|
.py
| 15
| 52.266667
| 130
| 0.4925
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,670
|
python_.py
|
zatosource_zato/code/zato-common/src/zato/common/util/python_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import importlib
import sys
import traceback
from dataclasses import dataclass
from importlib.util import module_from_spec, spec_from_file_location
from logging import getLogger
from pathlib import Path
from threading import current_thread
# Zato
from zato.common.typing_ import cast_, module_
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import intnone
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class ModuleInfo:
name: 'str'
path: 'Path'
module: 'module_'
# ################################################################################################################################
# ################################################################################################################################
def get_python_id(item):
# Python-level ID contains all the core details about the object that requests this information and its current thread
_current_thread = current_thread()
_current_thread_ident = cast_('int', _current_thread.ident)
python_id = '{}.{}.{}'.format(hex(id(item)), _current_thread.name, hex(_current_thread_ident))
return python_id
# ################################################################################################################################
def get_current_stack():
sep = '*' * 80
out = ['\n', sep]
for line in traceback.format_stack():
out.append(line.strip())
out.append(sep)
return '\n'.join(out)
# ################################################################################################################################
def log_current_stack():
logger.info(get_current_stack())
# ################################################################################################################################
# Taken from https://stackoverflow.com/a/16589622
def get_full_stack():
exc = sys.exc_info()[0]
stack = traceback.extract_stack()[:-1] # last one would be full_stack()
if exc is not None: # i.e. if an exception is present
del stack[-1] # remove call of full_stack, the printed exception will contain the caught exception caller instead
trace = 'Traceback (most recent call last):\n'
stack_string = trace + ''.join(traceback.format_list(stack))
if exc is not None:
stack_string += ' '
stack_string += traceback.format_exc()
stack_string = stack_string.lstrip(trace)
return stack_string
# ################################################################################################################################
def reload_module_(mod_name:'str') -> 'None':
if mod_name in sys.modules:
_ = importlib.reload(sys.modules[mod_name])
else:
_ = importlib.import_module(mod_name)
# ################################################################################################################################
def get_module_name_by_path(path:'str | Path') -> 'str':
# Local aliases
root = ''
root_idx:'intnone' = None
mod_path = Path(path)
# If we are in a directory of that name, it means that this directory should be treated as our root,
# note that currently there is only one directory configured here.
immediate_root_list = ['services']
# All the roots that we may potentially find
root_list = ['src', 'source']
# Get and reverse the parts of the path for the ease of their manipulation
parts = mod_path.parts
parts = list(reversed(parts))
# This is our parent directory ..
parent = parts[1]
# .. first, check if our immediate root is a name that we recognize ..
if parent in immediate_root_list:
# .. if yes, the name of the file becomes the module's name.
mod_name = mod_path.stem
return mod_name
# We are here if our parent directory is not an immediate root and we need to find one ..
for root in root_list:
try:
root_idx = parts.index(root)
except ValueError:
pass
else:
# .. we have a match, i.e. we matched a specific root ..
break
# .. if there is no root, it means that we have no choice but to assume ..
# .. that the name of the module is the same as its file ..
if not root_idx:
mod_name = mod_path.stem
# .. otherwise, we can make use of the root found above ..
else:
# .. the first element is the file name so we need to remove its extension ..
mod_file = parts[0]
mod_file = Path(mod_file)
mod_file_name = mod_file.stem
# .. get the rest of the module's path, from right above its name (hence we start from 1) until the root ..
mod_name_parts = parts[1:root_idx]
# .. we have the names and we can reverse them back so they run from top to bottom again ..
mod_name_parts = list(reversed(mod_name_parts))
# .. we can append the final file name now unless it is __init__.py ..
# .. which we can ignore because Python recognizes it implicitly ..
if mod_file_name != '__init__':
mod_name_parts.append(mod_file_name)
# .. and this gives us the full module name ..
mod_name = '.'.join(mod_name_parts)
# .. we are ready to return the name to our caller ..
return mod_name
# ################################################################################################################################
def import_module_by_path(path:'str') -> 'ModuleInfo | None':
# Local aliases
mod_path = Path(path)
# .. turn the path into its corresponding module name ..
mod_name = get_module_name_by_path(mod_path)
# .. we have both the name of a module and its path so we can import it now ..
if spec := spec_from_file_location(mod_name, path):
module = module_from_spec(spec)
sys.modules[mod_name] = module
spec.loader.exec_module(module) # type: ignore
# .. build an object encapsulating what we know about the module
out = ModuleInfo()
out.name = mod_name
out.path = mod_path
out.module = module
# .. finally, we can return it to our caller.
return out
# ################################################################################################################################
# ################################################################################################################################
| 7,379
|
Python
|
.py
| 141
| 46.865248
| 130
| 0.466453
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,671
|
hot_deploy_.py
|
zatosource_zato/code/zato-common/src/zato/common/util/hot_deploy_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
from pathlib import Path
# Zato
from zato.common.api import HotDeploy
from zato.common.hot_deploy_ import HotDeployProject, pickup_order_patterns
from zato.common.typing_ import cast_
from zato.common.util.env import get_list_from_environment
from zato.common.util.file_system import resolve_path
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import iterator_, list_, pathlist, strdictdict
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
def get_project_info(
root, # type: str
src_dir_name, # type: str
) -> 'list_[HotDeployProject] | None':
# Local aliases
root_path = Path(root)
# Assume we will not find such a directory
out:'list_[HotDeployProject]' = []
# Any potential project directories will go here
project_dirs:'pathlist' = []
# Look up the project directories recursively ..
for item in root_path.rglob(src_dir_name):
# .. if any is found, append it for later use ..
project_dirs.append(item)
# .. now, go through all the project directories found and construct project objects ..
# .. along with their directories to pick up code from ..
for item in project_dirs:
# .. do build the business object ..
project = HotDeployProject()
project.pickup_from_path = []
# .. this is what will be added to sys.path by a starting server ..
project.sys_path_entry = item
# .. make use of the default patterns ..
patterns = pickup_order_patterns[:]
# .. append any extra patterns found in the environment ..
if extra_patterns := get_list_from_environment(HotDeploy.Env.Pickup_Patterns, ','):
logger.info('Found extra hot-deployment patterns via %s -> %s', HotDeploy.Env.Pickup_Patterns, extra_patterns)
patterns.extend(extra_patterns)
# .. look up all the directories to pick up from ..
for pattern in patterns:
# .. remove any potential whitespace ..
pattern = pattern.strip()
# .. do look them up ..
pickup_dirs = item.rglob(pattern)
# .. go over any found ..
for pickup_dir in pickup_dirs:
# .. ignore Python's own directories ..
if '__pycache__' in pickup_dir.parts:
continue
# .. ignore items that are not directories ..
if not pickup_dir.is_dir():
continue
# .. and add it to the project's list of directories ..
if not pickup_dir in project.pickup_from_path:
project.pickup_from_path.append(pickup_dir)
# .. we can append the project to our result ..
out.append(project)
# .. now, we can return the result to our caller.
return out
# ################################################################################################################################
# ################################################################################################################################
def extract_pickup_from_items(
base_dir, # type: str
pickup_config, # type: strdictdict
src_dir_name, # type: str
) -> 'iterator_[str | list_[HotDeployProject]]':
# Go through each piece of the hot-deployment configuration ..
for key, value in pickup_config.items(): # type: ignore
# .. we handle only specific keys ..
if key.startswith(HotDeploy.UserPrefix):
# .. proceed only if we know where to pick up from ..
if pickup_from := value.get('pickup_from'): # type: ignore
# .. type hints ..
pickup_from = cast_('str', pickup_from)
# .. this will resolve home directories and environment variables ..
pickup_from = resolve_path(pickup_from, base_dir)
# .. check if this path points to a project ..
if project_list := get_project_info(pickup_from, src_dir_name):
yield project_list
else:
yield pickup_from
# ################################################################################################################################
# ################################################################################################################################
| 5,313
|
Python
|
.py
| 96
| 47.197917
| 130
| 0.464375
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,672
|
tcp.py
|
zatosource_zato/code/zato-common/src/zato/common/util/tcp.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import errno
from datetime import datetime, timedelta
from logging import getLogger
from socket import timeout as SocketTimeoutException
from time import sleep
from traceback import format_exc
from uuid import uuid4
# gevent
from gevent import socket
from gevent.server import StreamServer
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
class SocketReaderCtx:
""" Configuration and context used to read that from sockets via read_from_socket.
"""
__slots__ = 'conn_id', 'socket', 'max_wait_time', 'max_msg_size', 'read_buffer_size', 'recv_timeout', \
'should_log_messages', 'buffer', 'is_ok', 'reason'
def __init__(self, conn_id, socket, max_wait_time, max_msg_size, read_buffer_size, recv_timeout, should_log_messages):
# type: (str, socket, int, int, int, int, object)
self.conn_id = conn_id
self.socket = socket
self.max_wait_time = max_wait_time
self.max_msg_size = max_msg_size
self.read_buffer_size = read_buffer_size
self.recv_timeout = recv_timeout
self.should_log_messages = should_log_messages
self.buffer = []
self.is_ok = False
self.reason = ''
# ################################################################################################################################
# ################################################################################################################################
def get_free_port(start=30000):
port = start
while is_port_taken(port):
port += 1
return port
# ################################################################################################################################
# Taken from http://grodola.blogspot.com/2014/04/reimplementing-netstat-in-cpython.html
def is_port_taken(port):
# Zato
from .platform_ import is_linux
if not is_linux:
return False
# psutil
import psutil
# Shortcut for Linux so as not to bind to a socket which in turn means waiting until it's closed by OS
if is_linux:
for conn in psutil.net_connections(kind='tcp'):
if conn.laddr[1] == port and conn.status == psutil.CONN_LISTEN:
return True
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.bind(('', port))
sock.close()
except socket.error as e:
if e.args[0] == errno.EADDRINUSE:
return True
raise
# ################################################################################################################################
def _is_port_ready(port, needs_taken):
taken = is_port_taken(port)
return taken if needs_taken else not taken
# ################################################################################################################################
def _wait_for_port(port, timeout, interval, needs_taken):
port_ready = _is_port_ready(port, needs_taken)
if not port_ready:
start = datetime.utcnow()
wait_until = start + timedelta(seconds=timeout)
while not port_ready:
sleep(interval)
port_ready = _is_port_ready(port, needs_taken)
if datetime.utcnow() > wait_until:
break
return port_ready
# ################################################################################################################################
def wait_for_zato(address, url_path, timeout=60, interval=0.1, needs_log=True):
""" Waits until a Zato server responds.
"""
# Requests
from requests import get as requests_get
# Imported here to avoid circular imports
from zato.common.util.api import wait_for_predicate
# Full URL to check a Zato server under
url = address + url_path
def _predicate_zato_ping(*ignored_args, **ignored_kwargs):
try:
requests_get(url, timeout=interval)
except Exception as e:
if needs_log:
logger.warning('Waiting for `%s` (%s)', url, e)
else:
return True
return wait_for_predicate(_predicate_zato_ping, timeout, interval, address, needs_log=needs_log)
# ################################################################################################################################
def wait_for_zato_ping(address, timeout=60, interval=0.1, needs_log=True):
""" Waits for timeout seconds until address replies to a request sent to /zato/ping.
"""
wait_for_zato(address, '/zato/ping', timeout, interval, needs_log)
# ################################################################################################################################
def wait_until_port_taken(port, timeout=2, interval=0.1):
""" Waits until a given TCP port becomes taken, i.e. a process binds to a TCP socket.
"""
return _wait_for_port(port, timeout, interval, True)
# ################################################################################################################################
def wait_until_port_free(port, timeout=2, interval=0.1):
""" Waits until a given TCP port becomes free, i.e. a process releases a TCP socket.
"""
return _wait_for_port(port, timeout, interval, False)
# ################################################################################################################################
def get_fqdn_by_ip(ip_address, default, log_msg_prefix):
# type: (str, str) -> str
try:
host = socket.gethostbyaddr(ip_address)[0]
return socket.getfqdn(host)
except Exception:
logger.warning('%s exception in FQDN lookup `%s`', log_msg_prefix, format_exc())
return '(unknown-{}-fqdn)'.format(default)
# ################################################################################################################################
def read_from_socket(
ctx, # type: SocketReaderCtx
_utcnow=datetime.utcnow,
_timedelta=timedelta
) -> 'bytes':
""" Reads data from an already connected TCP socket.
"""
# Local aliases
_should_log_messages = ctx.should_log_messages
_log_info = logger.warning
_log_debug = logger.warning
_conn_id = ctx.conn_id
_max_msg_size = ctx.max_msg_size
_read_buffer_size = ctx.read_buffer_size
_recv_timeout = ctx.recv_timeout
_socket_recv = ctx.socket.recv
_socket_settimeout = ctx.socket.settimeout
# Wait for that many seconds
wait_until = _utcnow() + timedelta(seconds=ctx.max_wait_time)
# How many bytes have we read so far
msg_size = 0
# Buffer to accumulate data in
buffer = []
# No data received yet
data = '<initial-no-data>'
# Run the main loop
while _utcnow() < wait_until:
# Check whether reading the data would not exceed our message size limit
new_size = msg_size + _read_buffer_size
if new_size > _max_msg_size:
reason = 'Message would exceed max. size allowed `{}` > `{}`'.format(new_size, _max_msg_size)
raise ValueError(reason)
try:
_socket_settimeout(_recv_timeout)
data = _socket_recv(_read_buffer_size)
if _should_log_messages:
_log_debug('Data received by `%s` (%d) -> `%s`', _conn_id, len(data), data)
except SocketTimeoutException:
# This is fine, we just iterate until wait_until time.
pass
else:
# Some data was received ..
if data:
buffer.append(data)
# .. otherwise, the remote end disconnected so we can end.
break
# If we are here, it means that we have all the data needed so we can just return it now
result = b''.join(buffer)
if _should_log_messages:
_log_info('Returning result from `%s` (%d) -> `%s`', _conn_id, len(result), result)
return result
# ################################################################################################################################
def parse_address(address):
# type: (str) -> (str, int)
# First, let's reverse it in case input contains an IPv6 address ..
address = address[::-1] # type: str
# .. now, split on the first colon to give the information we seek ..
port, host = address.split(':', 1)
# .. reverse the values back
host = host[::-1]
port = port[::-1]
# .. port needs to be an integer ..
port = int(port)
# .. and now we can return the result.
return host, port
# ################################################################################################################################
def get_current_ip():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.settimeout(0)
try:
s.connect(('192.0.2.0', 1))
out = s.getsockname()[0]
except Exception:
out = 'noip-' + uuid4().hex
finally:
s.close()
return out
# ################################################################################################################################
# ################################################################################################################################
class ZatoStreamServer(StreamServer):
# ################################################################################################################################
def shutdown(self):
self.close()
# ################################################################################################################################
# These two methods are reimplemented from gevent.server to make it possible to use SO_REUSEPORT.
@classmethod
def get_listener(self, address, backlog=None, family=None):
if backlog is None:
backlog = self.backlog
return ZatoStreamServer._make_socket(address, backlog=backlog, reuse_addr=self.reuse_addr, family=family)
@staticmethod
def _make_socket(address, backlog=50, reuse_addr=None, family=socket.AF_INET):
sock = socket.socket(family=family)
if reuse_addr is not None:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, reuse_addr)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
try:
sock.bind(address)
except socket.error as e:
strerror = getattr(e, 'strerror', None)
if strerror is not None:
e.strerror = strerror + ': ' + repr(address)
raise
sock.listen(backlog)
sock.setblocking(0)
return sock
# ################################################################################################################################
# ################################################################################################################################
| 11,382
|
Python
|
.py
| 235
| 41.859574
| 130
| 0.474207
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,673
|
config.py
|
zatosource_zato/code/zato-common/src/zato/common/util/config.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from logging import getLogger
from pathlib import Path
from traceback import format_exc
from urllib.parse import parse_qsl, urlparse, urlunparse
# Bunch
from bunch import Bunch
# parse
from parse import PARSE_RE as parse_re
# Zato
from zato.common.api import EnvConfigCtx, EnvVariable, SCHEDULER, Secret_Shadow, URLInfo
from zato.common.const import SECRETS
from zato.common.ext.configobj_ import ConfigObj
from zato.common.util.tcp import get_current_ip
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.crypto.api import CryptoManager
from zato.common.typing_ import any_, strdict
from zato.server.base.parallel import ParallelServer
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
# Values of these generic attributes may contain query string elements that have to be masked out
mask_attrs = ['address']
# ################################################################################################################################
# ################################################################################################################################
zato_sys_current_ip = 'Zato_Sys_Current_IP'
# ################################################################################################################################
# ################################################################################################################################
def get_url_protocol_from_config_item(use_tls:'bool') -> 'str':
return 'https' if use_tls else 'http'
# ################################################################################################################################
def get_scheduler_api_client_for_server_auth_required(args:'any_') -> 'str':
if not (value := os.environ.get(SCHEDULER.Env.Server_Auth_Required)):
if not (value := getattr(args, 'scheduler_api_client_for_server_auth_required', None)):
value = SCHEDULER.Default_API_Client_For_Server_Auth_Required
return value
# ################################################################################################################################
def get_scheduler_api_client_for_server_username(args:'any_') -> 'str':
if not (value := os.environ.get(SCHEDULER.Env.Server_Username)):
if not (value := getattr(args, 'scheduler_api_client_for_server_username', None)):
value = SCHEDULER.Default_API_Client_For_Server_Username
return value
# ################################################################################################################################
def get_scheduler_api_client_for_server_password(
args:'any_',
cm:'CryptoManager',
*,
initial_password:'str'='',
needs_encrypt:'bool'=False
) -> 'str':
if not (value := os.environ.get(SCHEDULER.Env.Server_Password)):
if not (value := getattr(args, 'scheduler_api_client_for_server_password', None)):
if not (value := initial_password):
value = cm.generate_password()
if needs_encrypt:
value = cm.encrypt(value)
value = value.decode('utf8')
return value
# ################################################################################################################################
def make_name_env_compatible(name:'str') -> 'str':
return name.replace('.', '__').replace('-','__')
# ################################################################################################################################
def get_env_config_ctx(file_path:'str | any_') -> 'EnvConfigCtx':
# Our response that will be populated
out = EnvConfigCtx()
if file_path and isinstance(file_path, str):
file_path = file_path.lower()
if 'server' in file_path:
component = 'Server'
elif 'scheduler' in file_path:
component = 'Scheduler'
elif ('web-admin' in file_path) or ('dashboard' in file_path):
component = 'Dashboard'
elif 'user-conf' in file_path:
component = 'User_Config'
else:
component = 'Unknown'
file_name = Path(file_path).name
else:
component = 'Not_Applicable_component'
file_name = 'Not_Applicable_Zato_file_path'
# Now, we can return the response to our caller
out.component = component
out.file_name = file_name
return out
# ################################################################################################################################
# ################################################################################################################################
def enrich_config_from_environment(file_name:'str', config:'Bunch') -> 'Bunch':
# stdlib
from io import StringIO
# Local variables
_env_key_prefix = EnvVariable.Key_Prefix
# Make the file name use only the characters that an environment variable can have
file_name = make_name_env_compatible(file_name)
# This is the basic context of the configuration file that we are processing ..
ctx = get_env_config_ctx(file_name)
# This is a map of stanzas to key/value pairs that exist in the environment for this stanza
# but do not exist in the stanza itself. These new keys will be merged into each such stanza.
extra = {}
# .. go through each of the stanzas ..
for stanza in config:
# .. make sure the name is what an environment variable would use ..
stanza = make_name_env_compatible(stanza)
# .. populate it because we may need to use it ..
extra[stanza] = {}
# .. find all the keys in the environment that correspond to this stanza ..
env_key_prefix = f'{_env_key_prefix}_{ctx.component}_{ctx.file_name}_{stanza}_'
for env_name, env_value in os.environ.items():
if env_name.startswith(env_key_prefix):
key_name = env_name.replace(env_key_prefix, '')
extra[stanza][key_name] = env_value
# .. build a temporary string object that ConfigObj will parse in a moment below ..
extra_str = StringIO()
for stanza, key_values in extra.items():
_ = extra_str.write(f'[{stanza}]\n')
for key, value in key_values.items():
_ = extra_str.write(f'{key}={value}\n\n')
_ = extra_str.seek(0)
# .. the file has to based to a temporary location now, which is what ConfigObject expects ..
# .. build a new ConfigObj from the extra data, ..
# .. which we need because we want for the ConfigObj's parser ..
# .. to parse string environment variables to Python objects ..
extra_config = ConfigObj(extra_str)
# .. go through all the extra pieces of configuration ..
for stanza, extra_key_values in extra_config.items():
# .. if we have anything new for that stanza ..
if extra_key_values:
# .. do assign it to the original config object ..
orig_stanza = config[stanza]
for new_extra_key, new_extra_value in extra_key_values.items(): # type: ignore
orig_stanza[new_extra_key] = new_extra_value
# .. now, we are ready to return the enriched the configuration to our caller.
return config
# ################################################################################################################################
# ################################################################################################################################
def get_env_config_value(
component:'str',
file_name:'str',
stanza:'str',
key:'str',
use_default:'bool'=True
) -> 'str':
# Local variables
_env_key_prefix = EnvVariable.Key_Prefix
# Make sure the individual components are what an environment variable can use
file_name = make_name_env_compatible(file_name)
stanza = make_name_env_compatible(stanza)
key = make_name_env_compatible(key)
# This is the name of an environment variable that we will be looking up ..
env_name = f'{_env_key_prefix}_{component}_{file_name}_{stanza}_{key}'
# .. use what we find in the environment ..
if value := os.environ.get(env_name, ''):
# .. store an entry that we did find it ..
logger.info('Found env. key -> %s', env_name)
# .. or, optionally, build a default value if there is no such key ..
else:
if use_default:
value = env_name + EnvVariable.Key_Missing_Suffix
# .. now, we can return the value to our caller.
return value
# ################################################################################################################################
def resolve_name(name:'str') -> 'str':
suffix = '.' + zato_sys_current_ip
if isinstance(name, str): # type: ignore
if name.endswith(suffix):
current_ip = get_current_ip()
name = name.replace(zato_sys_current_ip, current_ip)
return name
# ################################################################################################################################
def resolve_value(key, value, decrypt_func=None, _default=object(), _secrets=SECRETS):
""" Resolves final value of a given variable by looking it up in environment if applicable.
"""
# Skip non-resolvable items
if not isinstance(value, str):
return value
if not value:
return value
value = value.decode('utf8') if isinstance(value, bytes) else value
# It may be an environment variable ..
if value.startswith('$'):
# .. but not if it's $$ which is a signal to skip this value ..
if value.startswith('$$'):
return value
# .. a genuine pointer to an environment variable.
else:
# .. we support for exact and upper-case forms ..
env_key = value[1:].strip()
env_key1 = env_key
env_key2 = env_key.upper()
env_keys = [env_key1, env_key2]
for item in env_keys:
value = os.environ.get(item, _default)
if value is not _default:
break
else:
# .. use a placeholder if none of the keys matched
value = 'Env_Key_Missing_{}'.format(env_key)
# It may be an encrypted value
elif key in _secrets.PARAMS and value.startswith(_secrets.PREFIX):
value = decrypt_func(value)
# Pre-processed, we can assign this pair to output
return value
# ################################################################################################################################
def resolve_env_variables(data):
""" Given a Bunch instance on input, iterates over all items and resolves all keys/values to ones extracted
from environment variables.
"""
out = Bunch()
for key, value in data.items():
out[key] = resolve_value(None, value)
return out
# ################################################################################################################################
def _replace_query_string_items(server:'ParallelServer', data:'any_') -> 'str':
# If there is no input, we can return immediately
if not data:
return ''
# Local variables
query_string_new = []
# Parse the data ..
data = urlparse(data)
# .. extract the query string ..
query_string = data.query
query_string = parse_qsl(query_string)
# .. convert the data to a list to make it possible to unparse it later on ..
data = list(data)
# .. replace all the required elements ..
for key, value in query_string:
# .. so we know if we matched something in the inner loops ..
should_continue = True
# .. check exact keys ..
for name in server.sio_config.secret_config.exact:
if key == name:
value = Secret_Shadow
should_continue = False
break
# .. check prefixes ..
if should_continue:
for name in server.sio_config.secret_config.prefixes:
if key.startswith(name):
value = Secret_Shadow
should_continue = should_continue
break
# .. check suffixes ..
if should_continue:
for name in server.sio_config.secret_config.suffixes:
if key.endswith(name):
value = Secret_Shadow
break
# .. if we are here, either it means that the value was replaced ..
# .. or we are going to use as it was because it needed no replacing ..
query_string_new.append(f'{key}={value}')
# .. replace the query string ..
query_string_new = '&'.join(query_string_new)
# .. now, set the query string back ..
data[-2] = query_string_new
# .. build a full address once more ..
data = urlunparse(data)
# .. and return it to our caller.
return data
# ################################################################################################################################
def replace_query_string_items(server:'ParallelServer', data:'any_') -> 'str':
try:
return _replace_query_string_items(server, data)
except Exception:
logger.info('Items could not be masked -> %s', format_exc())
# ################################################################################################################################
def replace_query_string_items_in_dict(server:'ParallelServer', data:'strdict') -> 'None':
# Note that we use a list because we are going to modify the dict in place
for key, value in list(data.items()):
# Add a key with a value that is masked
if key in mask_attrs:
value_masked = str(value)
value_masked = replace_query_string_items(server, value)
key_masked = f'{key}_masked'
data[key_masked] = value_masked
# ################################################################################################################################
def extract_param_placeholders(data:'str') -> 'any_':
# Parse out groups for path parameters ..
groups = parse_re.split(data)
# .. go through each group ..
for group in groups:
# .. if it is a parameter placeholder ..
if group and group[0] == '{':
# .. yield it to our caller.
yield group
# ################################################################################################################################
def get_config_object(repo_location:'str', conf_file:'str') -> 'Bunch | ConfigObj':
# Zato
from zato.common.util import get_config
return get_config(repo_location, conf_file, bunchified=False)
# ################################################################################################################################
def update_config_file(config:'ConfigObj', repo_location:'str', conf_file:'str') -> 'None':
conf_path = os.path.join(repo_location, conf_file)
with open(conf_path, 'wb') as f:
_ = config.write(f)
# ################################################################################################################################
def parse_url_address(address:'str', default_port:'int') -> 'URLInfo':
# Our response to produce
out = URLInfo()
# Extract the details from the address
parsed = urlparse(address)
scheme = parsed.scheme.lower()
use_tls = scheme == 'https'
# If there is no scheme and netloc, the whole address will be something like '10.151.17.19',
# and it will be found under .path actually ..
if (not parsed.scheme) and (not parsed.netloc):
host_port = parsed.path
# .. otherwise, the address will be in the network location.
else:
host_port = parsed.netloc
# We need to split it because we may have a port
host_port = host_port.split(':')
# It will be a two-element list if there is a port ..
if len(host_port) == 2:
host, port = host_port
port = int(port)
# .. otherwise, assume the scheduler's default port ..
else:
host = host_port[0]
port = default_port
# .. populate the response ..
out.address = address
out.host = host
out.port = port
out.use_tls = use_tls
# .. and return it to our caller.
return out
# ################################################################################################################################
# ################################################################################################################################
| 17,571
|
Python
|
.py
| 339
| 44.976401
| 130
| 0.488973
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,674
|
hook.py
|
zatosource_zato/code/zato-common/src/zato/common/util/hook.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.common.util.api import is_func_overridden
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, callable_
# ################################################################################################################################
class HookTool:
def __init__(self, server, hook_ctx_class, hook_type_to_method, invoke_func):
self.server = server
self.hook_ctx_class = hook_ctx_class
self.hook_type_to_method = hook_type_to_method
self.invoke_func = invoke_func
# ################################################################################################################################
def is_hook_overridden(self, service_name, hook_type):
impl_name = self.server.service_store.name_to_impl_name[service_name]
service_class = self.server.service_store.service_data(impl_name)['service_class']
func_name = self.hook_type_to_method[hook_type]
if func := getattr(service_class, func_name, None):
return is_func_overridden(func)
else:
return False
# ################################################################################################################################
def get_hook_service_invoker(self, service_name:'str', hook_type:'str') -> 'callable_':
""" Returns a function that will invoke ooks or None if a given service does not implement input hook_type.
"""
# Do not continue if we already know that user did not override the hook method
if not self.is_hook_overridden(service_name, hook_type):
return
def _invoke_hook_service(*args:'any_', **kwargs:'any_') -> 'any_':
""" A function to invoke hook services.
"""
ctx = self.hook_ctx_class(hook_type, *args, **kwargs)
response = self.invoke_func(service_name, {'ctx':ctx}, serialize=False)
if not isinstance(response, dict):
response = response.getvalue(serialize=False)
if 'response' in response:
response = response['response']
return response
return _invoke_hook_service
# ################################################################################################################################
| 2,580
|
Python
|
.py
| 45
| 49.533333
| 130
| 0.475963
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,675
|
open_.py
|
zatosource_zato/code/zato-common/src/zato/common/util/open_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# ################################################################################################################################
# Default encoding used with all text files
default_encoding = 'utf8'
# ################################################################################################################################
if 0:
from zato.common.typing_ import binaryio_, iobytes_, textio_
# ################################################################################################################################
def open_r(path:'str', encoding:'str'=default_encoding) -> 'textio_':
return open(path, 'r', encoding=encoding)
# ################################################################################################################################
def open_rb(path:'str') -> 'binaryio_':
return open(path, 'rb')
# ################################################################################################################################
def open_rw(path:'str', encoding:'str'=default_encoding) -> 'textio_':
return open(path, 'w+', encoding=encoding)
# ################################################################################################################################
def open_w(path:'str', encoding:'str'=default_encoding) -> 'textio_':
return open(path, 'w', encoding=encoding)
# ################################################################################################################################
def open_wb(path:'str') -> 'iobytes_':
return open(path, 'wb')
# ################################################################################################################################
| 1,843
|
Python
|
.py
| 27
| 65.740741
| 130
| 0.288494
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,676
|
exception.py
|
zatosource_zato/code/zato-common/src/zato/common/util/exception.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from datetime import datetime
from traceback import TracebackException
# Zato
from zato.common.version import get_version
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import callnone
# ################################################################################################################################
# ################################################################################################################################
def pretty_format_exception(e:'Exception', cid:'str', utcnow_func:'callnone'=None) -> 'str':
# Response to produce
out = []
focus_character = '·'
focus_marker = focus_character * 3
header1 = f'{focus_marker} Error {focus_marker}'
header2 = f'{focus_marker} Details {focus_marker}'
header3 = f'{focus_marker} Context {focus_marker}'
# Extract the traceback from the exception
tb = TracebackException.from_exception(e)
exc_arg = e.args[0] if e.args else e.args
exc_type = e.__class__.__name__
# For the error summary, we need to last frame in the stack,
# i.e. the one that actually triggered the exception.
frame = tb.stack[-1]
file_path = frame.filename
line_number = frame.lineno
func_name = frame.name
error_line = f'>>> {exc_type}: \'{exc_arg}\''
file_line = f'>>> File "{file_path}", line {line_number}, in {func_name}'
code_line = f'>>> {frame.line}'
details = ''.join(list(tb.format()))
now = utcnow_func() if utcnow_func else datetime.utcnow().isoformat() + ' (UTC)'
out.append(header1)
out.append('\n')
out.append('\n')
out.append(error_line)
out.append('\n')
out.append(file_line)
out.append('\n')
out.append(code_line)
out.append('\n')
out.append('\n')
out.append(header2)
out.append('\n')
out.append('\n')
out.append(details)
out.append('\n')
out.append(header3)
out.append('\n')
out.append('\n')
out.append(cid)
out.append('\n')
out.append(now)
out.append('\n')
out.append(get_version())
out.append('\n')
result = ''.join(out)
result = result.strip()
return result
# ################################################################################################################################
# ################################################################################################################################
| 2,834
|
Python
|
.py
| 68
| 37.279412
| 130
| 0.460695
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,677
|
event.py
|
zatosource_zato/code/zato-common/src/zato/common/util/event.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from collections import deque
from datetime import datetime
from itertools import count
# gevent
from gevent.lock import RLock
# ################################################################################################################################
class default:
max_size = 1000
# ################################################################################################################################
class Event:
""" An individual event emitted to an event log.
"""
__slots__ = 'log_id', 'event_id', 'name', 'timestamp', 'ctx'
def __init__(self, log_id, event_id, name, ctx, _utcnow=datetime.utcnow):
self.log_id = log_id
self.event_id = event_id
self.name = name
self.ctx = ctx
self.timestamp = _utcnow()
def __repr__(self):
return '<{} at {} log:{} id:{} n:{} t:{}>'.format(self.__class__.__name__, hex(id(self)),
self.log_id, self.event_id, self.name, self.timestamp)
def to_dict(self):
return {
'log_id': self.log_id,
'event_id': self.event_id,
'name': self.name,
'timestamp': self.timestamp.isoformat(),
'ctx': None if self.ctx is None else repr(self.ctx)
}
# ################################################################################################################################
class EventLog:
""" A backlog of max_size events of arbitrary nature described by attributes such as ID, name, timestamp and opaque context.
"""
def __init__(self, log_id, max_size=default.max_size):
self.log_id = log_id
self.event_id_counter = count(1)
self.lock = RLock()
self.events = deque(maxlen=max_size)
# ################################################################################################################################
def emit(self, name, ctx=None):
self.events.append(Event(self.log_id, next(self.event_id_counter), name, ctx))
# ################################################################################################################################
def get_event_list(self):
return [elem.to_dict() for elem in self.events]
# ################################################################################################################################
if __name__ == '__main__':
el = EventLog('aaa')
for x in range(1, 50):
el.emit('aaa-{}'.format(x))
print(list(reversed(el.events)))
| 2,754
|
Python
|
.py
| 58
| 41.741379
| 130
| 0.438085
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,678
|
file_transfer.py
|
zatosource_zato/code/zato-common/src/zato/common/util/file_transfer.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
# Zato
from zato.common.util.platform_ import is_non_windows
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import intlist, strlist
# ################################################################################################################################
# ################################################################################################################################
def parse_extra_into_list(data:'str') -> 'intlist':
# type: (str) -> list
return [int(elem.strip()) for elem in data.split(';') if elem]
# ################################################################################################################################
def path_string_to_list(base_dir:'str', data:'str') -> 'strlist':
# A list of path strings to produce
out = []
# A list of path separators to try out
path_sep_list = [',', ';']
# This can be appended only if we are not on Windows where it would mean a drive name separator
if is_non_windows:
path_sep_list.append(':')
# Try to find which path separator should be used, if any at all
path_sep = None
for elem in path_sep_list:
if elem in data:
path_sep = elem
break
# If there is no path separator, it means that there is no multi-element list to build,
# in which case we turn the only string into the resulting list ..
if not path_sep:
path_data = [data]
else:
path_data = data.split(path_sep)
# Remove whitespace for completeness
path_data = [elem.strip() for elem in path_data]
# Now, turn the list into absolute paths
for path in path_data:
if not os.path.isabs(path):
path = os.path.join(base_dir, path)
path = os.path.normpath(path)
out.append(path)
return out
# ################################################################################################################################
def path_string_list_to_list(base_dir:'str', data:'str | strlist') -> 'strlist':
if isinstance(data, str):
return path_string_to_list(base_dir, data)
# A list of path strings to produce
out = []
for elem in data:
result = path_string_to_list(base_dir, elem)
out.extend(result)
return out
# ################################################################################################################################
# ################################################################################################################################
| 2,974
|
Python
|
.py
| 60
| 44.45
| 130
| 0.414964
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,679
|
expiring_dict.py
|
zatosource_zato/code/zato-common/src/zato/common/util/expiring_dict.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
### Original license ###
Copyright (c) 2019 David Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# stdlib
from collections.abc import MutableMapping
from time import time
# gevent
from gevent import sleep
from gevent.threading import Thread, Lock
# SortedContainers
from sortedcontainers import SortedKeyList
# ################################################################################################################################
# ################################################################################################################################
class ExpiringDict(MutableMapping):
def __init__(self, ttl=None, interval=0.100, *args, **kwargs):
self._store = dict(*args, **kwargs)
# self._keys = SortedKeyList(key=lambda x: x[0])
self._keys = SortedKeyList(key=self._sort_func)
self._ttl = ttl
self.impl_lock = Lock()
self._interval = interval
Thread(target=self._worker, daemon=True).start()
# ################################################################################################################################
def _sort_func(self, elem):
# Object 'elem' is a two-element tuple.
# Index 0 is the expiration time.
# Index 1 is the actual object held at that key.
# By using index 0, we can sort objects by their expiration time.
return elem[0]
# ################################################################################################################################
def flush(self):
now = time()
max_index = 0
with self.impl_lock:
for index, (timestamp, key) in enumerate(self._keys):
if timestamp > now: # Break as soon as we find a key whose expiration time is in the future
max_index = index
break
try:
del self._store[key]
except KeyError:
pass # Ignore it if it was deleted early
del self._keys[0:max_index]
# ################################################################################################################################
def _worker(self):
while True:
self.flush()
sleep(self._interval)
# ################################################################################################################################
def get(self, key, default=None):
return self._store.get(key, default)
# ################################################################################################################################
def set(self, key, value, ttl=None):
ttl = ttl or self._ttl
self._set_with_expire(key, value, ttl)
# ################################################################################################################################
def ttl(self, key, value, ttl):
self._set_with_expire(key, value, ttl)
# ################################################################################################################################
def delete(self, key):
_ = self._store.pop(key, None)
# ################################################################################################################################
def _set_with_expire(self, key, value, ttl):
self.impl_lock.acquire()
self._keys.add((time() + ttl, key))
self._store[key] = value
self.impl_lock.release()
# ################################################################################################################################
def __iter__(self):
return iter(self._store)
# ################################################################################################################################
def __len__(self):
return len(self._store)
# ################################################################################################################################
# Methods below are not used
def __delitem__(self, *ignored_args, **ignored_kwargs):
raise NotImplementedError*()
def __getitem__(self, *ignored_args, **ignored_kwargs):
raise NotImplementedError*()
def __setitem__(self, *ignored_args, **ignored_kwargs):
raise NotImplementedError*()
# ################################################################################################################################
# ################################################################################################################################
| 5,747
|
Python
|
.py
| 104
| 49.298077
| 130
| 0.435303
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,680
|
search.py
|
zatosource_zato/code/zato-common/src/zato/common/util/search.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# ################################################################################################################################
# ################################################################################################################################
if 0:
from typing import Callable
Callable = Callable
# ################################################################################################################################
# ################################################################################################################################
_search_attrs = 'num_pages', 'cur_page', 'prev_page', 'next_page', 'has_prev_page', 'has_next_page', 'page_size', 'total'
# ################################################################################################################################
# ################################################################################################################################
class SearchResults:
def __init__(self, q, result, columns, total):
# type: (object, object, object, int) -> None
self.q = q
self.result = result
self.total = total
self.columns = columns # type: list
self.num_pages = 0
self.cur_page = 0
self.prev_page = 0
self.next_page = 0
self.has_prev_page = False
self.has_next_page = False
self.page_size = None # type: int
# ################################################################################################################################
def __iter__(self):
return iter(self.result)
# ################################################################################################################################
def __repr__(self):
# To avoice circular imports - this is OK because we very rarely repr(self) anyway
from zato.common.util.api import make_repr
return make_repr(self)
# ################################################################################################################################
def set_data(self, cur_page, page_size):
num_pages, rest = divmod(self.total, page_size)
# Apparently there are some results in rest that did not fit a full page
if rest:
num_pages += 1
self.num_pages = num_pages
self.cur_page = cur_page + 1 # Adding 1 because, again, the external API is 1-indexed
self.prev_page = self.cur_page - 1 if self.cur_page > 1 else 0
self.next_page = self.cur_page + 1 if self.cur_page < self.num_pages else None
self.has_prev_page = self.prev_page >= 1
self.has_next_page = bool(self.next_page and self.next_page <= self.num_pages) or False
self.page_size = page_size
# ################################################################################################################################
@staticmethod
def from_list(
data_list, # type: list
cur_page, # type: int
page_size, # type: int
needs_sort=False, # type: bool
post_process_func=None, # type: Callable
sort_key=None, # type: object
needs_reverse=True # type: bool
):
cur_page = cur_page - 1 if cur_page else 0 # We index lists from 0
# Set it here because later on it may be shortened to the page_size of elements
total = len(data_list)
# If we get here, we must have collected some data at all
if data_list:
# We need to sort the output ..
if needs_sort:
data_list.sort(key=sort_key, reverse=needs_reverse)
# .. the output may be already sorted but we may perhaps need to reverse it.
else:
if needs_reverse:
data_list.reverse()
start = cur_page * page_size
end = start + page_size
data_list = data_list[start:end]
if post_process_func:
post_process_func(data_list)
search_results = SearchResults(None, data_list, None, total)
search_results.set_data(cur_page, page_size)
return search_results
# ################################################################################################################################
def to_dict(self, _search_attrs=_search_attrs):
out = {}
out['result'] = self.result
for name in _search_attrs:
out[name] = getattr(self, name, None)
return out
# ################################################################################################################################
# ################################################################################################################################
| 4,951
|
Python
|
.py
| 90
| 47.177778
| 130
| 0.389855
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,681
|
posix_ipc_.py
|
zatosource_zato/code/zato-common/src/zato/common/util/posix_ipc_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from datetime import datetime, timedelta
from logging import getLogger
from mmap import mmap
from time import sleep
from traceback import format_exc
try:
import posix_ipc as ipc
except ImportError:
# Ignore it under Windows
pass
# Zato
from zato.common.json_internal import dumps, loads
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, strordict
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
_shmem_pattern = '/zm{}'
# ################################################################################################################################
# ################################################################################################################################
class SharedMemoryIPC:
""" An IPC object which Zato processes use to communicate with each other using mmap files
backed by shared memory. All data in shared memory is kept as a dictionary and serialized as JSON
each time any read or write is needed.
"""
key_name = '<invalid>'
def __init__(self):
self.shmem_name = ''
self.size = -1
self._mmap = None
self.running = False
self._mem = None
# ################################################################################################################################
def create(self, shmem_suffix, size, needs_create):
""" Creates all IPC structures.
"""
self.shmem_name = _shmem_pattern.format(shmem_suffix)[:30]
self.size = size
# Create or read share memory
logger.debug('%s shmem `%s` (%s %s)', 'Creating' if needs_create else 'Opening', self.shmem_name,
self.size, self.key_name)
try:
self._mem = ipc.SharedMemory(self.shmem_name, ipc.O_CREAT if needs_create else 0, size=self.size)
except ipc.ExistentialError:
raise ValueError('Could not create shmem `{}` ({}), e:`{}`'.format(self.shmem_name, self.key_name, format_exc()))
# Map memory to mmap
self._mmap = mmap(self._mem.fd, self.size)
# Write initial data so that JSON .loads always succeeds
self.store_initial()
self.running = True
# ################################################################################################################################
def store(self, data):
""" Serializes input data as JSON and stores it in RAM, overwriting any previous data.
"""
self._mmap.seek(0)
self._mmap.write(dumps(data).encode('utf8'))
self._mmap.flush()
# ################################################################################################################################
def store_initial(self):
""" Stores initial data in shmem unless there is already data in there.
"""
if self.load(False):
return
else:
self.store({})
# ################################################################################################################################
def load(self, needs_loads=True):
""" Reads in all data from RAM and, optionally, loads it as JSON.
"""
self._mmap.seek(0)
data = self._mmap.read(self.size).strip(b'\x00')
return loads(data.decode('utf8')) if needs_loads else data
# ################################################################################################################################
def close(self):
""" Closes all underlying in-RAM structures.
"""
if not self.running:
logger.debug('Skipped close, IPC not running (%s)', self.key_name)
return
else:
logger.info('Closing IPC (%s)', self.key_name)
self._mmap.close()
try:
self._mem.unlink()
except ipc.ExistentialError:
pass
# ################################################################################################################################
def get_parent(self, parent_path, needs_data=True):
""" Returns element pointed to by parent_path, creating all elements along the way, if neccessary.
"""
data = self.load()
parent_path = [elem for elem in parent_path.split('/') if elem]
# Find or create element that is parent of input key
current = data
while parent_path:
next = parent_path.pop(0)
current = current.setdefault(next, {})
return (data, current) if needs_data else current
# ################################################################################################################################
def set_key(self, parent, key, value):
""" Set key to value under element called 'parent'.
"""
# Get parent to add our key to - will create it if needed
data, parent = self.get_parent(parent)
# Set key to value
parent[key] = value
# Save it all back
self.store(data)
# ################################################################################################################################
def _get_key(self, parent, key):
""" Low-level implementation of get_key which does not handle timeouts.
"""
parent = self.get_parent(parent, False)
return parent[key]
# ################################################################################################################################
def get_key(self, parent, key, timeout=None, _sleep=sleep, _utcnow=datetime.utcnow):
""" Returns a specific key from parent dictionary.
"""
try:
return self._get_key(parent, key)
except KeyError:
if timeout:
now = _utcnow()
start = now
until = now + timedelta(seconds=timeout)
idx = 0
while now <= until:
try:
value = self._get_key(parent, key)
if value:
msg = 'Returning value `%s` for parent/key `%s` `%s` after %s'
logger.info(msg, value, parent, key, now - start)
return value
except KeyError:
_sleep(0.1)
idx += 1
if idx % 10 == 0:
logger.info('Waiting for parent/key `%s` `%s` (timeout: %ss)', parent, key, timeout)
now = _utcnow()
# We get here if we did not return the key within timeout seconds,
# in which case we need to log an error and raise an exception.
# Same message for logger and exception
msg = 'Could not get parent/key `{}` `{}` after {}s'.format(parent, key, timeout)
logger.warning(msg)
raise KeyError(msg)
# No exception = re-raise exception immediately
else:
raise
# ################################################################################################################################
# ################################################################################################################################
class ServerStartupIPC(SharedMemoryIPC):
""" A shared memory-backed IPC object for server startup initialization.
"""
key_name = '/pubsub/pid'
def create(self, deployment_key:'str', size:int, needs_create:'bool'=True) -> 'None':
super(ServerStartupIPC, self).create('s{}'.format(deployment_key), size, needs_create)
def set_pubsub_pid(self, pid:'int') -> 'None':
self.set_key(self.key_name, 'current', pid)
def get_pubsub_pid(self, timeout:'int'=60) -> 'any_':
return self.get_key(self.key_name, 'current', timeout)
# ################################################################################################################################
# ################################################################################################################################
class ConnectorConfigIPC(SharedMemoryIPC):
""" A shared memory-backed IPC object for configuration of subprocess-based containers.
"""
needs_create = False
key_name = '/connector/config'
def create(self, deployment_key, size, needs_create=True):
super(ConnectorConfigIPC, self).create('c{}'.format(deployment_key), size, needs_create)
def set_config(self, connector_key, config):
self.set_key(self.key_name, connector_key, config)
def get_config(self, connector_key:'str', timeout:'int'=60, as_dict:'bool'=False) -> 'strordict':
response = self.get_key(self.key_name, connector_key, timeout)
if response:
return loads(response) if as_dict else response
# ################################################################################################################################
# ################################################################################################################################
class CommandStoreIPC(SharedMemoryIPC):
""" A shared memory-backed IPC object for CLI commands used by Zato.
"""
needs_create = False
key_name = '/cli/command/store'
def create(self, size=100_000, needs_create=True):
super(CommandStoreIPC, self).create('i', size, needs_create)
def add_parser(self, parser_data):
self.set_key(self.key_name, 'parser', parser_data)
def get_config(self, timeout=3):
return self.get_key(self.key_name, 'parser', timeout)
# ################################################################################################################################
# ################################################################################################################################
| 10,796
|
Python
|
.py
| 200
| 45.5
| 130
| 0.41578
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,682
|
cli.py
|
zatosource_zato/code/zato-common/src/zato/common/util/cli.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from json import dumps
import select
import sys
# gevent
from gevent import sleep
# ################################################################################################################################
# ################################################################################################################################
if 0:
from sh import RunningCommand
from zato.cli import ServerAwareCommand
from zato.common.typing_ import any_, anydict, anylist, stranydict
# ################################################################################################################################
# ################################################################################################################################
class CommandName:
# This is the default name if $PATH is populated
Default = 'zato'
# This is the default path based on .deb / .rpm installers,
# in case $PATH is not populated.
PackageFullPath = '/opt/zato/current/bin/zato'
# ################################################################################################################################
# ################################################################################################################################
def get_zato_sh_command(command_name:'str'=CommandName.Default) -> 'RunningCommand':
# sh
import sh
from sh import CommandNotFound
try:
command = getattr(sh, command_name) # type: ignore
return command
except CommandNotFound:
# In case we were using the default name, let's try again with a fallback one ..
if command_name == CommandName.Default:
command = getattr(sh, CommandName.PackageFullPath)
return command
# .. otherwise, re-raise the exception as we are not sure what to do otherwise.
else:
raise
# ################################################################################################################################
# ################################################################################################################################
def read_stdin_data(strip=True):
""" Reads data from sys.stdin without blocking the caller - in its current form (using select),
it will work only on Linux and OS X.
"""
# This function is not support under Windows
if sys.platform.startswith('win32'):
return ''
# Note that we check only sys.stdin for read and that there is no timeout,
# because we expect for sys.stdin to be available immediately when we run.
to_read, _, _ = select.select([sys.stdin], [], [], 0)
if to_read:
data = to_read[0].readline()
out = data.strip() if strip else data
else:
out = ''
return out
# ################################################################################################################################
# ################################################################################################################################
class CommandLineInvoker:
def __init__(
self,
expected_stdout=b'', # type: bytes
check_stdout=True, # type: bool
check_exit_code=True, # type: bool
server_location='' # type: str
) -> 'None':
# Imported here to rule out circular references
from zato.common.test.config import TestConfig
self.check_stdout = check_stdout
self.check_exit_code = check_exit_code
self.expected_stdout = expected_stdout or TestConfig.default_stdout
self.server_location = server_location or TestConfig.server_location
# ################################################################################################################################
def _assert_command_line_result(self, out:'RunningCommand') -> 'None':
if self.check_exit_code:
if out.exit_code != 0:
raise ValueError(f'Exit code should be 0 instead `{out.exit_code}`')
if self.check_stdout:
if out.stdout != self.expected_stdout:
raise ValueError(f'Stdout should {self.expected_stdout} instead of {out.stdout}')
# ################################################################################################################################
def invoke_cli(self, cli_params:'anylist', command_name:'str'=CommandName.Default) -> 'RunningCommand':
command = get_zato_sh_command(command_name)
out = command(*cli_params)
return out
# ################################################################################################################################
# ################################################################################################################################
class CommandLineServiceInvoker(CommandLineInvoker):
def invoke(self, service:'str', request:'anydict') -> 'any_':
cli_params = []
cli_params.append('service')
cli_params.append('invoke')
if request:
request = dumps(request)
cli_params.append('--payload')
cli_params.append(request)
cli_params.append(self.server_location)
cli_params.append(service)
return self.invoke_cli(cli_params)
# ################################################################################################################################
def invoke_and_test(self, service:'str') -> 'any_':
out = self.invoke(service, {})
self._assert_command_line_result(out)
return out
# ################################################################################################################################
# ################################################################################################################################
class _AuthManager:
# A CLI command on whose behalf we run
command: 'ServerAwareCommand'
# Is the definition active upon creation
is_active: 'bool'
# API service to invoke to create a new definition
create_service: 'str'
# API service to invoke to change password of the newly created definition
change_password_service: 'str'
name: 'str'
password: 'str'
def __init__(self, command:'ServerAwareCommand', name:'str', is_active:'bool', password:'str') -> 'None':
self.command = command
self.name = name
self.is_active = is_active
self.password = password
# ################################################################################################################################
def _create(self, create_request:'stranydict', needs_stdout:'bool'=False) -> 'stranydict':
# This will create a new definition and, in the next step, we will change its password.
create_response = self.command._invoke_service(self.create_service, create_request)
if needs_stdout:
self.command._log_response(create_response, needs_stdout=needs_stdout)
# Wait a moment to make sure that the definition has been created
sleep(0.5)
# Change the newly created definition's password
self._change_password(self.name, self.password, False)
return create_response
# ################################################################################################################################
def _change_password(self, name:'str', password:'str', needs_stdout:'bool'=False) -> 'stranydict':
# API request to send to create a new definition
change_password_request = {
'name': name,
'password1': password,
'password2': password,
}
# Change the password
self.command._invoke_service_and_log_response(
self.change_password_service,
change_password_request,
needs_stdout=needs_stdout
)
# ################################################################################################################################
# ################################################################################################################################
class BasicAuthManager(_AuthManager):
create_service = 'zato.security.basic-auth.create'
change_password_service = 'zato.security.basic-auth.change-password'
def __init__(
self,
command:'ServerAwareCommand',
name:'str',
is_active:'bool',
username:'str',
realm:'str',
password:'str'
) -> 'None':
super().__init__(command, name, is_active, password)
self.username = username
self.realm = realm
# ################################################################################################################################
def create(self, needs_stdout:'bool'=False) -> 'stranydict':
# API request to send to create a new definition
create_request = {
'name': self.name,
'realm': self.realm,
'username': self.username,
'password': self.password,
'is_active': self.is_active,
}
return self._create(create_request, needs_stdout)
# ################################################################################################################################
def change_password(self, needs_stdout:'bool'=False) -> 'stranydict':
return self._change_password(self.name, self.password, needs_stdout)
# ################################################################################################################################
# ################################################################################################################################
class APIKeyManager(_AuthManager):
create_service = 'zato.security.apikey.create'
change_password_service = 'zato.security.apikey.change-password'
def __init__(
self,
command:'ServerAwareCommand',
name:'str',
is_active:'bool',
header:'str',
key:'str'
) -> 'None':
super().__init__(command, name, is_active, key)
self.header = header
self.key = key
# ################################################################################################################################
def create(self, needs_stdout:'bool'=False) -> 'stranydict':
# API request to send to create a new definition
create_request = {
'name': self.name,
'username': self.header,
'password': self.key,
'is_active': self.is_active,
}
return self._create(create_request, needs_stdout)
# ################################################################################################################################
# ################################################################################################################################
| 11,132
|
Python
|
.py
| 210
| 45.904762
| 130
| 0.425434
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,683
|
api.py
|
zatosource_zato/code/zato-common/src/zato/common/util/api.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import ast
import copy
import errno
import gc
import importlib.util
import inspect
import linecache
import logging
import os
import random
import re
import signal
import threading
import socket
import sys
import unicodedata
from ast import literal_eval
from base64 import b64decode
from binascii import hexlify as binascii_hexlify
from contextlib import closing
from datetime import datetime, timedelta
from getpass import getuser as getpass_getuser
from glob import glob
from hashlib import sha256
from inspect import isfunction, ismethod
from itertools import tee, zip_longest
from io import StringIO
from logging.config import dictConfig
from operator import itemgetter
from os.path import abspath, isabs, join
from pathlib import Path
from pprint import pprint as _pprint, PrettyPrinter
from string import Template
from subprocess import Popen, PIPE
from tempfile import NamedTemporaryFile, gettempdir
from threading import current_thread
from time import sleep
from traceback import format_exc
from uuid import uuid4
# Bunch
from bunch import Bunch, bunchify
# ciso8601
try:
from ciso8601 import parse_datetime # type: ignore
except ImportError:
from dateutil.parser import parse as parse_datetime
# This is a forward declaration added for the benefit of other callers
parse_datetime = parse_datetime
# datetutil
from dateutil.parser import parse as dt_parse
# gevent
from gevent import sleep as gevent_sleep, spawn, Timeout
from gevent.greenlet import Greenlet
from gevent.hub import Hub
# lxml
from lxml import etree
# OpenSSL
from OpenSSL import crypto
# portalocker
try:
import portalocker
has_portalocker = True
except ImportError:
has_portalocker = False
# pytz
import pytz
# requests
import requests
# SQLAlchemy
import sqlalchemy as sa
from sqlalchemy import orm
# Texttable
from texttable import Texttable
# YAML
import yaml
# Python 2/3 compatibility
from builtins import bytes
from zato.common.ext.future.utils import iteritems, raise_
from zato.common.py23_.past.builtins import basestring, cmp, reduce, unicode
from six import PY3
from six.moves.urllib.parse import urlparse
from zato.common.py23_ import ifilter, izip
from zato.common.py23_.spring_ import CAValidatingHTTPSConnection, SSLClientTransport
if PY3:
from functools import cmp_to_key
# Zato
from zato.common.api import CHANNEL, CLI_ARG_SEP, DATA_FORMAT, engine_def, engine_def_sqlite, HL7, KVDB, MISC, \
SECRET_SHADOW, SIMPLE_IO, TLS, TRACE1, zato_no_op_marker, ZATO_NOT_GIVEN, ZMQ
from zato.common.broker_message import SERVICE
from zato.common.const import SECRETS, ServiceConst
from zato.common.crypto.api import CryptoManager
from zato.common.exception import ZatoException
from zato.common.ext.configobj_ import ConfigObj
from zato.common.ext.validate_ import is_boolean, is_integer, VdtTypeError
from zato.common.json_internal import dumps, loads
from zato.common.odb.model import Cluster, HTTPBasicAuth, HTTPSOAP, Server
from zato.common.util.config import enrich_config_from_environment
from zato.common.util.tcp import get_free_port, is_port_taken, wait_for_zato_ping, wait_until_port_free, wait_until_port_taken
from zato.common.util.eval_ import as_bool, as_list
from zato.common.util.file_system import fs_safe_name, fs_safe_now
from zato.common.util.logging_ import ColorFormatter
from zato.common.util.open_ import open_r, open_w
from zato.hl7.parser import get_payload_from_request as hl7_get_payload_from_request
# ################################################################################################################################
if 0:
from typing import Iterable as iterable
from zato.client import ZatoClient
from zato.common.typing_ import any_, anydict, callable_, dictlist, intlist, listnone, strlist, strlistnone, strnone, strset
iterable = iterable
# ################################################################################################################################
random.seed()
# ################################################################################################################################
logger = logging.getLogger(__name__)
logging.addLevelName(TRACE1, 'TRACE1')
_repr_template = Template('<$class_name at $mem_loc$attrs>')
_uncamelify_re = re.compile(r'((?<=[a-z])[A-Z]|(?<!\A)[A-Z](?=[a-z]))')
_epoch = datetime.utcfromtimestamp(0) # Start of UNIX epoch
cid_symbols = '0123456789abcdefghjkmnpqrstvwxyz'
encode_cid_symbols = {idx: elem for (idx, elem) in enumerate(cid_symbols)}
cid_base = len(cid_symbols)
# ################################################################################################################################
# For pyflakes
ColorFormatter = ColorFormatter
fs_safe_now = fs_safe_now
# ################################################################################################################################
asbool = as_bool
aslist = as_list
# ################################################################################################################################
_data_format_json = DATA_FORMAT.JSON
_data_format_json_like = DATA_FORMAT.JSON, DATA_FORMAT.DICT
_data_format_hl7_v2 = HL7.Const.Version.v2.id
# ################################################################################################################################
# Kept here for backward compatibility
get_free_port = get_free_port
is_port_taken = is_port_taken
wait_until_port_free = wait_until_port_free
wait_until_port_taken = wait_until_port_taken
# ################################################################################################################################
class ModuleCtx:
PID_To_Port_Pattern = 'zato-ipc-port-{cluster_name}-{server_name}-{pid}.txt'
# ################################################################################################################################
# We can initialize it once per process here
_hostname = socket.gethostname()
_fqdn = socket.getfqdn()
_current_host = '{}/{}'.format(_hostname, _fqdn)
_current_user = getpass_getuser()
# ################################################################################################################################
TLS_KEY_TYPE = {
crypto.TYPE_DSA: 'DSA',
crypto.TYPE_RSA: 'RSA'
}
# ################################################################################################################################
def is_encrypted(data:'str | bytes') -> 'bool':
# Zato
from zato.common.const import SECRETS
if isinstance(data, bytes):
data = data.decode('utf8')
elif not isinstance(data, str):
data = str(data)
result = data.startswith(SECRETS.Encrypted_Indicator)
return result
# ################################################################################################################################
def is_method(class_, func=isfunction if PY3 else ismethod):
return func(class_)
# ################################################################################################################################
def absjoin(base, path):
""" Turns a path into an absolute path if it's relative to the base location. If the path is already an absolute path,
it is returned as-is.
"""
if isabs(path):
return path
return abspath(join(base, path))
# ################################################################################################################################
def absolutize(path, base=''):
""" Turns a relative path into an absolute one or returns it as is if it's already absolute.
"""
if not isabs(path):
path = os.path.expanduser(path)
if not isabs(path):
path = os.path.normpath(os.path.join(base, path))
return path
# ################################################################################################################################
def current_host():
return _current_host
# ################################################################################################################################
def current_user(_getpass_getuser=getpass_getuser):
return _getpass_getuser()
# ################################################################################################################################
def pprint(obj):
""" Pretty-print an object into a string buffer.
"""
# Get dicts' items.
if hasattr(obj, 'items'):
obj = sorted(obj.items())
buf = StringIO()
_pprint(obj, buf)
value = buf.getvalue()
buf.close()
return value
# ################################################################################################################################
def get_zato_command():
""" Returns the full path to the 'zato' command' in a buildout environment.
"""
return os.path.join(os.path.dirname(sys.executable), 'zato')
# ################################################################################################################################
def object_attrs(_object, ignore_double_underscore, to_avoid_list, sort):
attrs = dir(_object)
if ignore_double_underscore:
attrs = ifilter(lambda elem: not elem.startswith('__'), attrs)
_to_avoid_list = getattr(_object, to_avoid_list, None) # Don't swallow exceptions
if _to_avoid_list is not None:
attrs = ifilter(lambda elem: not elem in _to_avoid_list, attrs)
if sort:
attrs = sorted(attrs)
return attrs
# ################################################################################################################################
def make_repr(_object, ignore_double_underscore=True, to_avoid_list='repr_to_avoid', sort=True):
""" Makes a nice string representation of an object, suitable for logging purposes.
"""
attrs = object_attrs(_object, ignore_double_underscore, to_avoid_list, sort)
buff = StringIO()
for attr in attrs:
attr_obj = getattr(_object, attr)
if not callable(attr_obj):
buff.write('; %s:%r' % (attr, attr_obj))
out = _repr_template.safe_substitute(
class_name=_object.__class__.__name__, mem_loc=hex(id(_object)), attrs=buff.getvalue())
buff.close()
return out
# ################################################################################################################################
def to_form(_object):
""" Reads public attributes of an object and creates a dictionary out of it;
handy for providing initial data to a Django form which isn't backed by
a true Django model.
"""
out = {}
attrs = object_attrs(_object, True, 'repr_to_avoid', False)
for attr in attrs:
out[attr] = getattr(_object, attr)
return out
# ################################################################################################################################
def get_lb_client(is_tls_enabled, lb_host, lb_agent_port, ssl_ca_certs, ssl_key_file, ssl_cert_file, timeout):
""" Returns an SSL XML-RPC client to the load-balancer.
"""
from zato.agent.load_balancer.client import LoadBalancerAgentClient, TLSLoadBalancerAgentClient
http_proto = 'https' if is_tls_enabled else 'http'
agent_uri = '{}://{}:{}/RPC2'.format(http_proto, lb_host, lb_agent_port)
if is_tls_enabled:
if sys.version_info >= (2, 7):
class Python27CompatTransport(SSLClientTransport):
def make_connection(self, host):
return CAValidatingHTTPSConnection(
host, strict=self.strict, ca_certs=self.ca_certs,
keyfile=self.keyfile, certfile=self.certfile, cert_reqs=self.cert_reqs,
ssl_version=self.ssl_version, timeout=self.timeout)
transport = Python27CompatTransport
else:
transport = None
return TLSLoadBalancerAgentClient(
agent_uri, ssl_ca_certs, ssl_key_file, ssl_cert_file, transport=transport, timeout=timeout)
else:
return LoadBalancerAgentClient(agent_uri)
# ################################################################################################################################
def tech_account_password(password_clear, salt):
return sha256(password_clear+ ':' + salt).hexdigest()
# ################################################################################################################################
def new_cid(bytes:'int'=12, needs_padding:'bool'=False, _random:'callable_'=random.getrandbits) -> 'str':
""" Returns a new 96-bit correlation identifier. It is not safe to use the ID
for any cryptographical purposes; it is only meant to be used as a conveniently
formatted ticket attached to each of the requests processed by Zato servers.
"""
# Note that we need to convert bytes to bits here ..
out = hex(_random(bytes * 8))[2:]
# .. and that we optionally ensure it is always 24 characters on output ..
if needs_padding:
out = out.ljust(24, 'a')
# .. return the output to the caller.
return out
# ################################################################################################################################
def get_user_config_name(name:'str') -> 'str':
items = name.split(os.sep)
file_name = items[-1]
return file_name.split('.')[0]
# ################################################################################################################################
def _get_config(
*,
conf, # type: ConfigObj
config_name, # type: str
bunchified, # type: bool
needs_user_config, # type: bool
repo_location=None # type: strnone
) -> 'Bunch | ConfigObj':
conf = bunchify(conf) if bunchified else conf # type: ignore
if needs_user_config:
conf.user_config_items = {} # type: ignore
user_config = conf.get('user_config')
if user_config:
for name, path in user_config.items(): # type: ignore
path = absolutize(path, repo_location)
if not os.path.exists(path):
logger.warning('User config not found `%s`, name:`%s`', path, name)
else:
user_conf = ConfigObj(path)
user_conf = bunchify(user_conf) if bunchified else user_conf
conf.user_config_items[name] = user_conf # type: ignore
# At this point, we have a Bunch instance that contains
# the contents of the file. Now, we need to enrich it
# with values that may be potentially found in the environment.
if isinstance(conf, Bunch):
conf = enrich_config_from_environment(config_name, conf) # type: ignore
return conf # type: ignore
# ################################################################################################################################
def get_config(
repo_location, # type: str
config_name, # type: str
bunchified=True, # type: bool
needs_user_config=True, # type: bool
crypto_manager=None, # type: CryptoManager | None
secrets_conf=None, # type: any_
raise_on_error=False, # type: bool
log_exception=True, # type: bool
require_exists=True, # type: bool
conf_location=None # type: strnone
) -> 'Bunch | ConfigObj':
""" Returns the configuration object. Will load additional user-defined config files, if any are available.
"""
# Default output to produce
result = Bunch()
try:
if not conf_location:
conf_location = os.path.join(repo_location, config_name)
conf_location = os.path.abspath(conf_location)
if require_exists:
if not os.path.exists(conf_location):
raise Exception(f'Path does not exist -> `{conf_location}`', )
logger.info('Getting configuration from `%s`', conf_location)
conf = ConfigObj(conf_location, zato_crypto_manager=crypto_manager, zato_secrets_conf=secrets_conf)
result = _get_config(
conf=conf,
config_name=config_name,
bunchified=bunchified,
needs_user_config=needs_user_config,
repo_location=repo_location
)
except Exception:
if log_exception:
logger.warning('Error while reading %s from %s; e:`%s`', config_name, repo_location, format_exc())
if raise_on_error:
raise
else:
return result
else:
return result
# ################################################################################################################################
def get_config_from_file(conf_location, config_name):
return get_config(repo_location=None, config_name=config_name, conf_location=conf_location)
# ################################################################################################################################
def set_up_logging(repo_location:'str') -> 'None':
with open_r(os.path.join(repo_location, 'logging.conf')) as f:
dictConfig(yaml.load(f, yaml.FullLoader))
# ################################################################################################################################
def get_config_from_string(data):
""" A simplified version of get_config which creates a config object from string, skipping any user-defined config files.
"""
buff = StringIO()
_ = buff.write(data)
_ = buff.seek(0)
conf = ConfigObj(buff)
out = _get_config(conf=conf, config_name='', bunchified=True, needs_user_config=False)
buff.close()
return out
# ################################################################################################################################
def get_current_user():
return _current_user
# ################################################################################################################################
def service_name_from_impl(impl_name):
""" Turns a Zato internal service's implementation name into a shorter
service name
"""
return impl_name.replace('server.service.internal.', '')
# ################################################################################################################################
def deployment_info(method, object_, timestamp, fs_location, remote_host='', remote_user='', should_deploy_in_place=False):
""" Returns a JSON document containing information who deployed a service
onto a server, where from and when it was.
"""
return {
'method': method,
'object': object_,
'timestamp': timestamp,
'fs_location':fs_location,
'remote_host': remote_host or os.environ.get('SSH_CONNECTION', ''),
'remote_user': remote_user,
'current_host': current_host(),
'current_user': get_current_user(),
'should_deploy_in_place': should_deploy_in_place
}
# ################################################################################################################################
def get_body_payload(body):
body_children_count = body[0].countchildren()
if body_children_count == 0:
body_payload = None
elif body_children_count == 1:
body_payload = body[0].getchildren()[0]
else:
body_payload = body[0].getchildren()
return body_payload
# ################################################################################################################################
def payload_from_request(json_parser, cid, request, data_format, transport, channel_item=None):
""" Converts a raw request to a payload suitable for usage with SimpleIO.
"""
if request is not None:
#
# JSON and dicts
#
if data_format in _data_format_json_like:
if not request:
return ''
if isinstance(request, basestring) and data_format == _data_format_json:
try:
request_bytes = request if isinstance(request, bytes) else request.encode('utf8')
try:
payload = json_parser.parse(request_bytes)
except ValueError:
payload = request_bytes
if hasattr(payload, 'as_dict'):
payload = payload.as_dict()
except ValueError:
logger.warning('Could not parse request as JSON:`%s`, (%s), e:`%s`', request, type(request), format_exc())
raise
else:
payload = request
#
# HL7 v2
#
elif data_format == _data_format_hl7_v2:
payload = hl7_get_payload_from_request(
request,
channel_item['data_encoding'],
channel_item['hl7_version'],
channel_item['json_path'],
channel_item['should_parse_on_input'],
channel_item['should_validate']
)
#
# Other data formats
#
else:
payload = request
else:
payload = request
return payload
# ################################################################################################################################
BZ2_EXTENSIONS = ('.tar.bz2', '.tbz')
XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma')
ZIP_EXTENSIONS = ('.zip', '.whl')
TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar')
ARCHIVE_EXTENSIONS = (ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS)
# ################################################################################################################################
def splitext(path):
"""Like os.path.splitext, but take off .tar too"""
base, ext = os.path.splitext(path)
if base.lower().endswith('.tar'):
ext = base[-4:] + ext
base = base[:-4]
return base, ext
# ################################################################################################################################
def is_archive_file(name):
"""Return True if `name` is a considered as an archive file."""
ext = splitext(name)[1].lower()
if ext in ARCHIVE_EXTENSIONS:
return True
return False
# ################################################################################################################################
def is_python_file(name):
""" Is it a Python file we can import Zato services from?
"""
for suffix in('py', 'pyw'):
if name.endswith(suffix):
return True
# ################################################################################################################################
# ################################################################################################################################
class _DummyLink:
""" A dummy class for staying consistent with pip's API in certain places
below.
"""
def __init__(self, url):
self.url = url
# ################################################################################################################################
# ################################################################################################################################
class ModuleInfo:
def __init__(self, file_name, module):
self.file_name = file_name
self.module = module
# ################################################################################################################################
def import_module_from_path(file_name, base_dir=None):
if not os.path.isabs(file_name):
file_name = os.path.normpath(os.path.join(base_dir, file_name))
if not os.path.exists(file_name):
raise ValueError('Module could not be imported, path:`{}` does not exist'.format(file_name))
_, mod_file = os.path.split(file_name)
mod_name, _ = os.path.splitext(mod_file)
# Delete compiled bytecode if it exists so that importlib actually picks up the source module
for suffix in('c', 'o'):
path = file_name + suffix
if os.path.exists(path):
os.remove(path)
spec = importlib.util.spec_from_file_location(mod_name, file_name)
mod = importlib.util.module_from_spec(spec)
sys.modules[mod_name] = mod
spec.loader.exec_module(mod)
return ModuleInfo(file_name, mod)
# ################################################################################################################################
def visit_py_source(
dir_name, # type: str
order_patterns=None # type: strlistnone
) -> 'any_':
# We enter here if we are not given any patterns on input ..
if not order_patterns:
# .. individual Python files will be deployed in this order ..
order_patterns = [
'common*',
'util*',
'model*',
'*'
]
# .. now, append the .py extension to each time so that we can find such files below ..
for idx, elem in enumerate(order_patterns):
new_item = f'{elem}.py'
order_patterns[idx] = new_item
# For storing names of files that we have already deployed,
# to ensure that there will be no duplicates.
already_visited:'strset' = set()
# .. append the default ones, unless they are already there ..
for default in ['*.py', '*.pyw']:
if default not in order_patterns:
order_patterns.append(default)
for pattern in order_patterns:
pattern = pattern.strip()
glob_path = os.path.join(dir_name, pattern)
for py_path in sorted(glob(glob_path)):
if py_path in already_visited:
continue
else:
already_visited.add(py_path)
yield py_path
# ################################################################################################################################
def _os_remove(path):
""" A helper function so it's easier to mock it in unittests.
"""
return os.remove(path)
# ################################################################################################################################
def hot_deploy(parallel_server, file_name, path, delete_path=True, notify=True, should_deploy_in_place=False):
""" Hot-deploys a package if it looks like a Python module or archive.
"""
logger.debug('About to hot-deploy `%s`', path)
now = datetime.utcnow()
di = dumps(deployment_info('hot-deploy', file_name, now.isoformat(), path, should_deploy_in_place=should_deploy_in_place))
# Insert the package into the DB ..
package_id = parallel_server.odb.hot_deploy(
now, di, file_name, open(path, 'rb').read(), parallel_server.id)
# .. and optionally notify all the servers they're to pick up a delivery
if notify:
parallel_server.notify_new_package(package_id)
if delete_path:
_os_remove(path)
return package_id
# ################################################################################################################################
# As taken from http://wiki.python.org/moin/SortingListsOfDictionaries
def multikeysort(items, columns):
comparers = [((itemgetter(col[1:].strip()), -1) if col.startswith('-') else (itemgetter(col.strip()), 1)) for col in columns]
def comparer(left, right):
for fn, mult in comparers:
result = cmp(fn(left), fn(right))
if result:
return mult * result
else:
return 0
if PY3:
return sorted(items, key=cmp_to_key(comparer))
else:
return sorted(items, cmp=comparer)
# ################################################################################################################################
# From http://docs.python.org/release/2.7/library/itertools.html#recipes
def grouper(n, iterable, fillvalue=None) -> 'any_':
""" grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx
"""
args = [iter(iterable)] * n
return zip_longest(*args, fillvalue=fillvalue)
# ################################################################################################################################
def translation_name(system1, key1, value1, system2, key2):
return KVDB.SEPARATOR.join((KVDB.TRANSLATION, system1, key1, value1, system2, key2))
# ################################################################################################################################
def dict_item_name(system, key, value):
return KVDB.SEPARATOR.join((system, key, value))
# ################################################################################################################################
# From http://docs.python.org/release/2.7/library/itertools.html#recipes
def pairwise(iterable):
""" s -> (s0,s1), (s1,s2), (s2, s3), ...
"""
a, b = tee(iterable)
next(b, None)
return izip(a, b)
# ################################################################################################################################
def from_local_to_utc(dt, tz_name, dayfirst=True):
""" What is the UTC time given the local time and the timezone's name?
"""
if not isinstance(dt, datetime):
dt = dt_parse(dt, dayfirst=dayfirst)
dt = pytz.timezone(tz_name).localize(dt)
utc_dt = pytz.utc.normalize(dt.astimezone(pytz.utc))
return utc_dt
# ################################################################################################################################
def from_utc_to_local(dt, tz_name):
""" What is the local time in the user-provided time zone name?
"""
if not isinstance(dt, datetime):
dt = dt_parse(dt)
local_tz = pytz.timezone(tz_name)
dt = local_tz.normalize(dt.astimezone(local_tz))
return dt
# ################################################################################################################################
def _utcnow():
""" See zato.common.util.utcnow for docstring.
"""
return datetime.utcnow()
# ################################################################################################################################
def utcnow():
""" A thin wrapper around datetime.utcnow added so that tests can mock it
out and return their own timestamps at will.
"""
return _utcnow()
# ################################################################################################################################
def _now(tz):
""" See zato.common.util.utcnow for docstring.
"""
return datetime.now(tz)
# ################################################################################################################################
def now(tz=None):
""" A thin wrapper around datetime.now added so that tests can mock it
out and return their own timestamps at will.
"""
return _now(tz)
# ################################################################################################################################
def datetime_to_seconds(dt):
""" Converts a datetime object to a number of seconds since UNIX epoch.
"""
return (dt - _epoch).total_seconds()
# ################################################################################################################################
# Inspired by http://stackoverflow.com/a/9283563
def uncamelify(s, separator='-', elem_func=unicode.lower):
""" Converts a CamelCaseName into a more readable one, e.g.
will turn ILikeToReadWSDLDocsNotReallyNOPENotMeQ into
i-like-to-read-wsdl-docs-not-really-nope-not-me-q or a similar one,
depending on the value of separator and elem_func.
"""
return separator.join(elem_func(elem) for elem in re.sub(_uncamelify_re, r' \1', s).split())
# ################################################################################################################################
def get_component_name(prefix='parallel'):
""" Returns a name of the component issuing a given request so it's possible
to trace which Zato component issued it.
"""
return '{}/{}/{}/{}'.format(prefix, current_host(), os.getpid(), current_thread().name)
# ################################################################################################################################
def dotted_getattr(o, path):
return reduce(getattr, path.split('.'), o)
# ################################################################################################################################
def hexlify(item, _hexlify=binascii_hexlify):
""" Returns a nice hex version of a string given on input.
"""
item = item if isinstance(item, unicode) else item.decode('utf8')
return ' '.join(hex(ord(elem)) for elem in item)
# ################################################################################################################################
def validate_input_dict(cid, *validation_info):
""" Checks that input belongs is one of allowed values.
"""
for key_name, key, source in validation_info:
if not source.has(key):
msg = 'Invalid {}:[{}]'.format(key_name, key)
log_msg = '{} (attrs: {})'.format(msg, source.attrs)
logger.warning(log_msg)
raise ZatoException(cid, msg)
# ################################################################################################################################
# Code below taken from tripod https://github.com/shayne/tripod/blob/master/tripod/sampler.py and slightly modified
# under the terms of LGPL (see LICENSE.txt file for details).
class SafePrettyPrinter(PrettyPrinter):
def format(self, obj, context, maxlevels, level):
try:
return super(SafePrettyPrinter, self).format(
obj, context, maxlevels, level)
except Exception:
return object.__repr__(obj)[:-1] + ' (bad repr)>', True, False
def spformat(obj, depth=None):
return SafePrettyPrinter(indent=1, width=76, depth=depth).pformat(obj)
def formatvalue(v):
s = spformat(v, depth=1).replace('\n', '')
if len(s) > 12500:
s = object.__repr__(v)[:-1] + ' (really long repr)>'
return '=' + s
def get_stack(f, with_locals=False):
limit = getattr(sys, 'tracebacklimit', None)
frames = []
n = 0
while f is not None and (limit is None or n < limit):
lineno, co = f.f_lineno, f.f_code
name, filename = co.co_name, co.co_filename
args = inspect.getargvalues(f)
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, f.f_globals)
if line:
line = line.strip()
else:
line = None
frames.append((filename, lineno, name, line, f.f_locals, args))
f = f.f_back
n += 1
frames.reverse()
out = []
for filename, lineno, name, line, localvars, args in frames:
out.append(' File "%s", line %d, in %s' % (filename, lineno, name))
if line:
out.append(' %s' % line.strip())
if with_locals:
args = inspect.formatargvalues(formatvalue=formatvalue, *args)
out.append('\n Arguments: %s%s' % (name, args))
if with_locals and localvars:
out.append(' Local variables:\n')
try:
reprs = spformat(localvars)
except Exception:
reprs = 'failed to format local variables'
out += [' ' + line for line in reprs.splitlines()]
out.append('')
return '\n'.join(out)
# ################################################################################################################################
def get_threads_traceback(pid):
result = {}
id_name = {th.ident: th.name for th in threading.enumerate()}
for thread_id, frame in iteritems(sys._current_frames()):
key = '{}:{}'.format(pid, id_name.get(thread_id, '(No name)'))
result[key] = get_stack(frame, True)
return result
# ################################################################################################################################
def get_greenlets_traceback(pid):
result = {}
for item in gc.get_objects():
if not isinstance(item, (Greenlet, Hub)):
continue
if not item:
continue
key = '{}:{}'.format(pid, repr(item))
result[key] = ''.join(get_stack(item.gr_frame, True))
return result
# ################################################################################################################################
def dump_stacks(*ignored):
pid = os.getpid()
table = Texttable()
table.set_cols_width((30, 90))
table.set_cols_dtype(['t', 't'])
rows = [['Proc:Thread/Greenlet', 'Traceback']]
rows.extend(sorted(iteritems(get_threads_traceback(pid))))
rows.extend(sorted(iteritems(get_greenlets_traceback(pid))))
table.add_rows(rows)
logger.info('\n' + table.draw())
# ################################################################################################################################
def register_diag_handlers():
""" Registers diagnostic handlers dumping stacks, threads and greenlets on receiving a signal.
"""
signal.signal(signal.SIGURG, dump_stacks)
# ################################################################################################################################
def parse_simple_type(value:'any_', convert_bool:'bool'=True) -> 'any_':
if convert_bool:
try:
value = is_boolean(value)
except VdtTypeError:
# It's cool, not a boolean
pass
try:
value = is_integer(value)
except VdtTypeError:
# OK, not an integer
pass
# Could be a dict or another simple type then
value = parse_literal_dict(value)
# Either parsed out or as it was received
return value
# ################################################################################################################################
def parse_literal_dict(value:'str') -> 'str | anydict':
try:
value = literal_eval(value)
except Exception:
pass
finally:
return value
# ################################################################################################################################
def parse_extra_into_dict(lines:'str | bytes', convert_bool:'bool'=True):
""" Creates a dictionary out of key=value lines.
"""
if isinstance(lines, bytes):
lines = lines.decode('utf8')
_extra = {}
if lines:
extra = ';'.join(lines.splitlines())
for line in extra.split(';'):
original_line = line
if line:
line = line.replace(r'\r', '')
line = line.strip()
if line.startswith('#'):
continue
line = line.split('=', 1)
if not len(line) == 2:
raise ValueError('Each line must be a single key=value entry, not `{}`'.format(original_line))
key, value = line
value = value.strip()
value = parse_simple_type(value, convert_bool)
# OK, let's just treat it as string
_extra[key.strip()] = value
return _extra
# ################################################################################################################################
# Taken from http://plumberjack.blogspot.cz/2009/09/how-to-treat-logger-like-output-stream.html
class LoggerWriter:
def __init__(self, logger, level):
self.logger = logger
self.level = level
def write(self, message):
if message != '\n':
self.logger.log(self.level, message)
# ################################################################################################################################
def validate_xpath(expr):
""" Evaluates an XPath expression thus confirming it is correct.
"""
etree.XPath(expr)
return True
# ################################################################################################################################
def get_haproxy_agent_pidfile(component_dir):
json_config = loads(
open(os.path.join(component_dir, 'config', 'repo', 'lb-agent.conf'), encoding='utf8').read()
)
return os.path.abspath(os.path.join(component_dir, json_config['pid_file']))
def store_pidfile(component_dir, pidfile=MISC.PIDFILE):
open(os.path.join(component_dir, pidfile), 'w', encoding='utf8').write('{}'.format(os.getpid()))
# ################################################################################################################################
def get_kvdb_config_for_log(config):
config = copy.deepcopy(config)
if config.shadow_password_in_logs:
config.password = SECRET_SHADOW
return config
# ################################################################################################################################
def validate_tls_from_payload(payload, is_key=False):
with NamedTemporaryFile(prefix='zato-tls-') as tf:
payload = payload.encode('utf8') if isinstance(payload, unicode) else payload
tf.write(payload)
tf.flush()
pem = open(tf.name, encoding='utf8').read()
cert_info = crypto.load_certificate(crypto.FILETYPE_PEM, pem)
cert_info = sorted(cert_info.get_subject().get_components())
cert_info = '; '.join('{}={}'.format(k.decode('utf8'), v.decode('utf8')) for k, v in cert_info)
if is_key:
key_info = crypto.load_privatekey(crypto.FILETYPE_PEM, pem)
key_info = '{}; {} bits'.format(TLS_KEY_TYPE[key_info.type()], key_info.bits())
return '{}; {}'.format(key_info, cert_info)
else:
return cert_info
get_tls_from_payload = validate_tls_from_payload
# ################################################################################################################################
def get_tls_full_path(root_dir, component, info):
return os.path.join(root_dir, component, fs_safe_name(info) + '.pem')
# ################################################################################################################################
def get_tls_ca_cert_full_path(root_dir, info):
return get_tls_full_path(root_dir, TLS.DIR_CA_CERTS, info)
# ################################################################################################################################
def get_tls_key_cert_full_path(root_dir, info):
return get_tls_full_path(root_dir, TLS.DIR_KEYS_CERTS, info)
# ################################################################################################################################
def store_tls(root_dir, payload, is_key=False):
# Raises exception if it's not really a certificate.
info = get_tls_from_payload(payload, is_key)
pem_file_path = get_tls_full_path(root_dir, TLS.DIR_KEYS_CERTS if is_key else TLS.DIR_CA_CERTS, info)
pem_file = open(pem_file_path, 'w', encoding='utf8')
if has_portalocker:
exception_to_catch = portalocker.LockException
else:
# Purposefully, catch an exception that will never be raised
# so that we can actually get the traceback.
exception_to_catch = ZeroDivisionError
try:
if has_portalocker:
portalocker.lock(pem_file, portalocker.LOCK_EX)
pem_file.write(payload)
pem_file.close()
os.chmod(pem_file_path, 0o640)
return pem_file_path
except exception_to_catch:
pass # It's OK, something else is doing the same thing right now
# ################################################################################################################################
def replace_private_key(orig_payload):
if isinstance(orig_payload, basestring):
if isinstance(orig_payload, bytes):
orig_payload = orig_payload.decode('utf8')
for item in TLS.BEGIN_END:
begin = '-----BEGIN {}PRIVATE KEY-----'.format(item)
if begin in orig_payload:
end = '-----END {}PRIVATE KEY-----'.format(item)
begin_last_idx = orig_payload.find(begin) + len(begin) + 1
end_preceeding_idx = orig_payload.find(end) -1
return orig_payload[0:begin_last_idx] + SECRET_SHADOW + orig_payload[end_preceeding_idx:]
# No private key at all in payload
return orig_payload
# ################################################################################################################################
def delete_tls_material_from_fs(server, info, full_path_func):
try:
os.remove(full_path_func(server.tls_dir, info))
except OSError as e:
if e.errno == errno.ENOENT:
# It's ok - some other worker must have deleted it already
pass
else:
raise
# ################################################################################################################################
def ping_solr(config):
result = urlparse(config.address)
requests.get('{}://{}{}'.format(result.scheme, result.netloc, config.ping_path))
# ################################################################################################################################
def ping_odoo(conn):
user_model = conn.get_model('res.users')
ids = user_model.search([('login', '=', conn.login)])
user_model.read(ids, ['login'])[0]['login']
# ################################################################################################################################
def ping_sap(conn):
conn.ping()
# ################################################################################################################################
class StaticConfig(Bunch):
def __init__(self, base_dir):
# type: (str) -> None
super(StaticConfig, self).__init__()
self.base_dir = base_dir
def read_file(self, full_path, file_name):
# type: (str, str) -> None
f = open(full_path, encoding='utf8')
file_contents = f.read()
f.close()
# Convert to a Path object to prepare to manipulations ..
full_path = Path(full_path)
# .. this is the path to the directory containing the file
# relative to the base directory, e.g. the "config/repo/static" part
# in "/home/zato/server1/config/repo/static" ..
relative_dir = Path(full_path.parent).relative_to(self.base_dir)
# .. now, convert all the components from relative_dir into a nested Bunch of Bunch instances ..
relative_dir_elems = list(relative_dir.parts)
# .. start with ourselves ..
_bunch = self
# .. if there are no directories leading to the file, simply assign
# its name to self and return ..
if not relative_dir_elems:
_bunch[file_name] = file_contents
return
# .. otherwise, if there are directories leading to the file,
# iterate until they exist and convert their names to Bunch keys ..
while relative_dir_elems:
# .. name of a directory = a Bunch key ..
elem = relative_dir_elems.pop(0)
# .. attach to the parent Bunch as a new Bunch instance ..
_bunch = _bunch.setdefault(elem, Bunch())
# .. this was the last directory to visit so we can now attach the file name and its contents
# to the Bunch instance representing this directory.
if not relative_dir_elems:
_bunch[file_name] = file_contents
def read_directory(self, root_dir):
for elem in Path(root_dir).rglob('*'): # type: Path
full_path = str(elem)
try:
if elem.is_file():
self.read_file(full_path, elem.name)
except Exception as e:
logger.warning('Could not read file `%s`, e:`%s`', full_path, e.args)
# ################################################################################################################################
def get_basic_auth_credentials(auth):
if not auth:
return None, None
prefix = 'Basic '
if not auth.startswith(prefix):
return None, None
_, auth = auth.split(prefix)
auth = b64decode(auth.strip())
return auth.split(':', 1)
# ################################################################################################################################
def parse_tls_channel_security_definition(value):
# type: (bytes) -> iterable(str, str)
if not value:
raise ValueError('No definition given `{}`'.format(repr(value)))
else:
if isinstance(value, bytes):
value = value.decode('utf8')
for line in value.splitlines():
line = line.strip()
if not line:
continue
if not '=' in line:
raise ValueError("Line `{}` has no '=' key/value separator".format(line))
# It's possible we will have multiple '=' symbols.
sep_index = line.find('=')
key, value = line[:sep_index], line[sep_index+1:]
if not key:
raise ValueError('Key missing in line `{}`'.format(line))
if not value:
raise ValueError('Value missing in line `{}`'.format(line))
yield 'HTTP_X_ZATO_TLS_{}'.format(key.upper()), value
# ################################################################################################################################
def get_http_json_channel(name, service, cluster, security):
return HTTPSOAP(None, '{}.json'.format(name), True, True, 'channel', 'plain_http', None, '/zato/json/{}'.format(name),
None, '', None, SIMPLE_IO.FORMAT.JSON, service=service, cluster=cluster, security=security)
# ################################################################################################################################
def get_engine(args):
return sa.create_engine(get_engine_url(args))
# ################################################################################################################################
def get_session(engine):
session = orm.sessionmaker() # noqa
session.configure(bind=engine)
return session()
# ################################################################################################################################
def get_crypto_manager_from_server_config(config, repo_dir):
priv_key_location = os.path.abspath(os.path.join(repo_dir, config.crypto.priv_key_location))
cm = CryptoManager(priv_key_location=priv_key_location)
cm.load_keys()
return cm
# ################################################################################################################################
def get_odb_session_from_server_config(config, cm, odb_password_encrypted):
engine_args = Bunch()
engine_args.odb_type = config.odb.engine
engine_args.odb_user = config.odb.username
engine_args.odb_host = config.odb.host
engine_args.odb_port = config.odb.port
engine_args.odb_db_name = config.odb.db_name
if odb_password_encrypted:
engine_args.odb_password = cm.decrypt(config.odb.password) if config.odb.password else ''
else:
engine_args.odb_password = config.odb.password
return get_session(get_engine(engine_args))
# ################################################################################################################################
def get_odb_session_from_component_dir(component_dir, config_file, CryptoManagerClass):
repo_dir = get_repo_dir_from_component_dir(component_dir)
cm = CryptoManagerClass.from_repo_dir(None, repo_dir, None)
secrets_conf = get_config(repo_dir, 'secrets.conf', needs_user_config=False)
config = get_config(repo_dir, config_file, crypto_manager=cm, secrets_conf=secrets_conf)
return get_odb_session_from_server_config(config, None, False)
# ################################################################################################################################
def get_odb_session_from_server_dir(server_dir):
# Zato
from zato.common.crypto.api import ServerCryptoManager
return get_odb_session_from_component_dir(server_dir, 'server.conf', ServerCryptoManager)
# ################################################################################################################################
def get_server_client_auth(
config,
repo_dir,
cm,
odb_password_encrypted,
*,
url_path=None,
) -> 'any_':
""" Returns credentials to authenticate with against Zato's own inocation channels.
"""
# This is optional on input
url_path = url_path or ServiceConst.API_Admin_Invoke_Url_Path
session = get_odb_session_from_server_config(config, cm, odb_password_encrypted)
with closing(session) as session:
# This will exist if config is read from server.conf,
# otherwise, it means that it is based on scheduler.conf.
token = config.get('main', {}).get('token')
# This is server.conf ..
if token:
cluster = session.query(Server).\
filter(Server.token == config.main.token).\
one().cluster
# .. this will be scheduler.conf.
else:
cluster_id = config.get('cluster', {}).get('id')
cluster_id = cluster_id or 1
cluster = session.query(Cluster).\
filter(Cluster.id == cluster_id).\
one()
channel = session.query(HTTPSOAP).\
filter(HTTPSOAP.cluster_id == cluster.id).\
filter(HTTPSOAP.url_path == url_path).\
filter(HTTPSOAP.connection== 'channel').\
one()
if channel.security_id:
security = session.query(HTTPBasicAuth).\
filter(HTTPBasicAuth.id == channel.security_id).\
first()
if security:
if password:= security.password:
password = security.password.replace(SECRETS.PREFIX, '')
if password.startswith(SECRETS.Encrypted_Indicator):
if cm:
password = cm.decrypt(password)
else:
password = ''
return (security.username, password)
# ################################################################################################################################
def get_client_from_server_conf(
server_dir, # type: str
require_server=True, # type: bool
stdin_data=None, # type: strnone
*,
url_path=None, # type: strnone
initial_wait_time=60 # type: int
) -> 'ZatoClient':
# Imports go here to avoid circular dependencies
from zato.client import get_client_from_server_conf as client_get_client_from_server_conf
# Get the client object ..
client = client_get_client_from_server_conf(
server_dir,
get_server_client_auth,
get_config,
stdin_data=stdin_data,
url_path=url_path
)
# .. make sure the server is available ..
if require_server:
wait_for_zato_ping(client.address, initial_wait_time)
# .. return the client to our caller now.
return client
# ################################################################################################################################
def get_repo_dir_from_component_dir(component_dir:'str') -> 'str':
return os.path.join(os.path.abspath(os.path.join(component_dir)), 'config', 'repo')
# ################################################################################################################################
django_sa_mappings = {
'NAME': 'db_name',
'HOST': 'host',
'PORT': 'port',
'USER': 'username',
'PASSWORD': 'password',
'odb_type': 'engine',
'db_type': 'engine',
}
cli_sa_mappings = {
'odb_db_name': 'db_name',
'odb_host': 'host',
'odb_port': 'port',
'odb_user': 'username',
'odb_password': 'password',
'odb_type': 'engine',
}
# ################################################################################################################################
def get_engine_url(args):
attrs = {}
is_sqlite = False
is_django = 'NAME' in args
has_get = getattr(args, 'get', False)
odb_type = getattr(args, 'odb_type', None)
if odb_type:
is_sqlite = odb_type == 'sqlite'
else:
is_sqlite = args.get('engine') == 'sqlite' or args.get('db_type') == 'sqlite'
names = (
'engine', 'username', 'password', 'host', 'port', 'name', 'db_name', 'db_type', 'sqlite_path', 'odb_type',
'odb_user', 'odb_password', 'odb_host', 'odb_port', 'odb_db_name', 'odb_type', 'ENGINE', 'NAME', 'HOST', 'USER',
'PASSWORD', 'PORT'
)
for name in names:
if has_get:
attrs[name] = args.get(name, '')
else:
attrs[name] = getattr(args, name, '')
# Re-map Django params into SQLAlchemy params
if is_django:
for name in django_sa_mappings:
value = attrs.get(name, ZATO_NOT_GIVEN)
if value != ZATO_NOT_GIVEN:
if not value and (name in 'db_type', 'odb_type'):
continue
attrs[django_sa_mappings[name]] = value
# Zato CLI to SQLAlchemy
if not attrs.get('engine'):
for name in cli_sa_mappings:
value = attrs.get(name, ZATO_NOT_GIVEN)
if value != ZATO_NOT_GIVEN:
attrs[cli_sa_mappings[name]] = value
# Re-map server ODB params into SQLAlchemy params
if attrs['engine'] == 'sqlite':
db_name = attrs.get('db_name')
sqlite_path = attrs.get('sqlite_path')
if db_name:
attrs['sqlite_path'] = db_name
if sqlite_path:
attrs['db_name'] = sqlite_path
return (engine_def_sqlite if is_sqlite else engine_def).format(**attrs)
# ################################################################################################################################
def startup_service_payload_from_path(name, value, repo_location):
""" Reads payload from a local file. Abstracted out to ease in testing.
"""
orig_path = value.replace('file://', '')
if not os.path.isabs(orig_path):
path = os.path.normpath(os.path.join(repo_location, orig_path))
else:
path = orig_path
try:
payload = open(path, encoding='utf8').read()
except Exception:
logger.warning(
'Could not open payload path:`%s` `%s`, skipping startup service:`%s`, e:`%s`', orig_path, path, name, format_exc())
else:
return payload
# ################################################################################################################################
def invoke_startup_services(source, key, fs_server_config, repo_location, broker_client=None, service_name=None,
skip_include=True, worker_store=None, is_sso_enabled=False):
""" Invoked when we are the first worker and we know we have a broker client and all the other config is ready
so we can publish the request to execute startup services. In the worst case the requests will get back to us but it's
also possible that other workers are already running. In short, there is no guarantee that any server or worker in particular
will receive the requests, only that there will be exactly one.
"""
for name, payload in iteritems(fs_server_config.get(key, {})):
# Don't invoke SSO services if the feature is not enabled
if not is_sso_enabled:
if 'zato.sso' in name:
continue
if service_name:
# We are to skip this service:
if skip_include:
if name == service_name:
continue
# We are to include this service only, any other is rejected
else:
if name != service_name:
continue
if isinstance(payload, basestring) and payload.startswith('file://'):
payload = startup_service_payload_from_path(name, payload, repo_location)
if not payload:
continue
cid = new_cid()
msg = {}
msg['action'] = SERVICE.PUBLISH.value
msg['service'] = name
msg['payload'] = payload
msg['cid'] = cid
msg['channel'] = CHANNEL.STARTUP_SERVICE
if broker_client:
broker_client.invoke_async(msg)
else:
worker_store.on_message_invoke_service(msg, msg['channel'], msg['action'])
# ################################################################################################################################
def timeouting_popen(command, timeout, timeout_msg, rc_non_zero_msg, common_msg=''):
""" Runs a command in background and returns its return_code, stdout and stderr.
stdout and stderr will be None if return code = 0
"""
stdout, stderr = None, None
# Run the command
p = Popen(command, stdout=PIPE, stderr=PIPE)
# Sleep as long as requested and poll for results
sleep(timeout)
p.poll()
if p.returncode is None:
msg = timeout_msg + common_msg + 'command:[{}]'.format(command)
raise Exception(msg.format(timeout))
else:
if p.returncode != 0:
stdout, stderr = p.communicate()
msg = rc_non_zero_msg + common_msg + 'command:[{}], return code:[{}], stdout:[{}], stderr:[{}] '.format(
command, p.returncode, stdout, stderr)
raise Exception(msg)
return p.returncode
# ################################################################################################################################
def spawn_greenlet(callable, *args, **kwargs):
""" Spawns a new greenlet and waits up to timeout seconds for its response. It is expected that the response never arrives
because if it does, it means that there were some errors.
"""
try:
timeout = kwargs.pop('timeout', 0.2)
g = spawn(callable, *args, **kwargs)
gevent_sleep(0)
g.join(timeout)
if g.exception:
type_, value, traceback = g.exc_info
exc_type = Exception(value, str(g.exception))
raise_(exc_type, None, traceback)
except Timeout:
pass # Timeout = good = no errors
else:
return g
# ################################################################################################################################
def get_logger_for_class(class_):
return logging.getLogger('{}.{}'.format(inspect.getmodule(class_).__name__, class_.__name__))
# ################################################################################################################################
def get_worker_pids_by_parent(parent_pid:'int') -> 'intlist':
""" Returns all children PIDs of the process whose PID is given on input.
"""
# psutil
import psutil
return sorted(elem.pid for elem in psutil.Process(parent_pid).children())
# ################################################################################################################################
def get_worker_pids():
""" Returns all sibling worker PIDs of the server process we are being invoked on, including our own worker too.
"""
# psutil
import psutil
# This is our own process ..
current_process = psutil.Process()
# .. and this is its parent PID ..
parent_pid = current_process.ppid()
# .. now, we can return PIDs of all the workers.
return get_worker_pids_by_parent(parent_pid)
# ################################################################################################################################
def update_bind_port(data, idx):
address_info = urlparse(data.address)
base, port = address_info.netloc.split(':')
port = int(port) + idx
data.address = '{}://{}:{}{}'.format(address_info.scheme, base, port, address_info.path)
data.bind_port = port
# ################################################################################################################################
def start_connectors(worker_store, service_name, data):
for idx, pid in enumerate(get_worker_pids()):
if 'socket_method' in data and data.socket_method == ZMQ.METHOD_NAME.BIND:
update_bind_port(data, idx)
worker_store.server.invoke(service_name, data, pid=pid, is_async=True, data_format=DATA_FORMAT.DICT)
# ################################################################################################################################
def require_tcp_port(address):
if not ':' in address:
raise Exception('No TCP port in {}'.format(address))
port = address.split(':')[-1]
if not port.strip():
raise Exception('No TCP port in {}'.format(address))
try:
int(port)
except ValueError:
raise Exception('Invalid TCP port in {}'.format(address))
# ################################################################################################################################
def update_apikey_username_to_channel(config):
config.header = 'HTTP_{}'.format(config.get('header', '').upper().replace('-', '_'))
# ################################################################################################################################
def get_response_value(response):
""" Extracts the actual response string from a response object produced by services.
"""
return (response.payload.getvalue() if hasattr(response.payload, 'getvalue') else response.payload) or ''
# ################################################################################################################################
def get_lb_agent_json_config(repo_dir):
return loads(open(os.path.join(repo_dir, 'lb-agent.conf'), encoding='utf8').read())
# ################################################################################################################################
def parse_cmd_line_options(argv):
options = argv.split(CLI_ARG_SEP)
options = '\n'.join(options)
return parse_extra_into_dict(options)
# ################################################################################################################################
def get_sa_model_columns(model):
""" Returns all columns (as string) of an input SQLAlchemy model.
"""
return [elem.key for elem in model.__table__.columns]
# ################################################################################################################################
def is_class_pubsub_hook(class_):
""" Returns True if input class subclasses PubSubHook.
"""
# Imported here to avoid circular dependencies
from zato.server.service import PubSubHook
return issubclass(class_, PubSubHook) and (class_ is not PubSubHook)
# ################################################################################################################################
def ensure_pubsub_hook_is_valid(self, input, instance, attrs):
""" An instance hook that validates if an optional pub/sub hook given on input actually subclasses PubSubHook.
"""
if input.get('hook_service_id'):
impl_name = self.server.service_store.id_to_impl_name[input.hook_service_id]
details = self.server.service_store.services[impl_name]
if not is_class_pubsub_hook(details['service_class']):
raise ValueError('Service `{}` is not a PubSubHook subclass'.format(details['name']))
# ################################################################################################################################
def is_func_overridden(func):
""" Returns True if input func was overridden by user in a subclass - used to decide
whether users implemented a given hook. If there is a special internal marker in input arguments,
it means that it is an internal function from parent class, not a user-defined one.
"""
if func and is_method(func):
func_defaults = func.__defaults__ if PY3 else func.im_func.func_defaults
# Only internally defined methods will fulfill conditions that they have default arguments
# and one of them is our no-op marker, hence if we negate it and the result is True,
# it means it must have been a user-defined method.
if not (func_defaults and isinstance(func_defaults, tuple) and zato_no_op_marker in func_defaults):
return True
# ################################################################################################################################
def get_sql_engine_display_name(engine, fs_sql_config):
display_name = None
for key, value in fs_sql_config.items():
if key == engine:
display_name = value.get('display_name')
break
if not display_name:
raise ValueError('Could not find display name for engine `{}` in config `{}`'.format(
engine, fs_sql_config))
else:
return display_name
# ################################################################################################################################
def pretty_format_float(value):
return ('%f' % value).rstrip('0').rstrip('.') if value else value
# ################################################################################################################################
# The slugify function below is taken from Django:
"""
Copyright (c) Django Software Foundation and individual contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of Django nor the names of its contributors may be used
to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
def slugify(value, allow_unicode=False):
""" Convert to ASCII if 'allow_unicode' is False. Convert spaces to underscores.
Remove characters that aren't alphanumerics, underscores, or hyphens.
Convert to lowercase. Also strip leading and trailing whitespace.
"""
if allow_unicode:
value = unicodedata.normalize('NFKC', value)
value = re.sub('[^\w\s-]', '', value, flags=re.U).strip().lower() # noqa: W605
return re.sub('[-\s]+', '_', value, flags=re.U) # noqa: W605
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii')
value = re.sub('[^\w\s-]', '', value).strip().lower() # noqa: W605
return re.sub('[-\s]+', '_', value) # noqa: W605
# ################################################################################################################################
def wait_for_predicate(
predicate_func, # type: callable_
timeout, # type: int
interval, # type: float
log_msg_details=None, # type: strnone
needs_log=True, # type: bool
*args, # type: any_
**kwargs # type: any_
) -> 'bool':
# Try out first, perhaps it already fulfilled
is_fulfilled = bool(predicate_func(*args, **kwargs))
# Use an explicit loop index for reporting
loop_idx = 1
# After that many seconds, we are going to start logging the fact that we are still waiting.
# In this way, we do not need to log information about all the predicates that finish quickly.
log_after_seconds = 5
# Perhaps we can already return and we enter this branch if we cannot ..
if not is_fulfilled:
# For later use
start = datetime.utcnow()
# The time at which we will start to log details
log_after = start + timedelta(seconds=log_after_seconds)
# We will wait for the predicate until this time
wait_until = start + timedelta(seconds=timeout)
# Optionally, we may already have something to log
if (datetime.utcnow() > log_after) and needs_log and log_msg_details:
logger.info('Waiting for %s (#%s -> %ss)', log_msg_details, loop_idx, interval)
# Keep looping until the predicate is fulfilled ..
while not is_fulfilled:
# .. sleep for a moment ..
gevent_sleep(interval)
# .. for later use ..
now = datetime.utcnow()
# .. return if we have run out of time ..
if now > wait_until:
break
# .. optionally, log the current state ..
if (now > log_after) and needs_log and log_msg_details:
logger.info('Waiting for %s (#%s -> %ss)', log_msg_details, loop_idx, interval)
# .. otherwise, check if we have the predicate fulfilled already ..
is_fulfilled = predicate_func(*args, **kwargs)
# .. keep looping ..
loop_idx += 1
# .. at this point, the predicate is fulfilled or we have run out of time ..
# .. but, in either case, we can return the result to our caller.
return is_fulfilled
# ################################################################################################################################
def wait_for_dict_key_by_get_func(
get_key_func, # type: callable_
key, # type: any_
timeout=9999,# type: int
interval=0.01 # type: float
) -> 'any_':
# In this function, we wait for a key through a user-provided get-key function,
# which may be different than dict.get, e.g. topic_api.get_topic_by_name.
def _predicate_dict_key(*_ignored_args, **_ignored_kwargs) -> 'any_':
try:
value = get_key_func(key)
return value
except KeyError:
return False
return wait_for_predicate(_predicate_dict_key, timeout, interval, log_msg_details=f'dict key -> `{key}`')
# ################################################################################################################################
def wait_for_dict_key(
_dict, # type: anydict
key, # type: any_
timeout=9999, # type: int
interval=0.01 # type: float
) -> 'any_':
# In this function, we wait for a key by accessing the dict object directly,
# using the dict's own .get method.
def _predicate_dict_key(*_ignored_args, **_ignored_kwargs) -> 'any_':
value = _dict.get(key)
return value
return wait_for_dict_key_by_get_func(_predicate_dict_key, key, timeout, interval)
# ################################################################################################################################
def wait_for_file(full_path:'str', timeout:'int'=9999, interval:'float'=0.01) -> 'any_':
def _predicate_wait_for_file(*_ignored_args, **_ignored_kwargs) -> 'any_':
file_exists = os.path.exists(full_path)
# If the file already exists, wait a little longer to make sure
# that everything that needs to be saved in there is actually saved.
if file_exists:
sleep(0.2)
return file_exists
return wait_for_predicate(_predicate_wait_for_file, timeout, interval, log_msg_details=f'path -> `{full_path}`')
# ################################################################################################################################
def hex_sequence_to_bytes(elems):
# type: (str) -> bytes
elems = [int(elem.strip(), 16) for elem in elems.split()]
elems = [chr(elem) for elem in elems]
elems = [bytes(elem, 'utf8') for elem in elems]
return b''.join(elems)
# ################################################################################################################################
def tabulate_dictlist(data:'dictlist', skip_keys:'listnone'=None) -> 'str':
# stdlib
from copy import deepcopy
# Tabulate
from tabulate import tabulate
# Return early if there is not anything that we can tabulate
if not data:
return ''
# If we have keys to skip, we need re-build the dictionary without these keys first.
if skip_keys:
skip_keys = skip_keys if isinstance(skip_keys, list) else [skip_keys]
_data = []
for elem in data:
_elem = deepcopy(elem)
for key in skip_keys:
_elem.pop(key, None)
_data.append(_elem)
data = _data
# We assume that all elements will have the same keys
elem0 = data[0]
len_keys = len(elem0)
# We align all the columns to the left hand side
col_align = ('left',) * len_keys
return tabulate(data, headers='keys', tablefmt='pretty', colalign=col_align)
# ################################################################################################################################
def needs_suffix(item:'any_') -> 'bool':
if isinstance(item, int):
len_item = item
else:
len_item = len(item)
return len_item == 0 or len_item > 1
# ################################################################################################################################
def get_new_tmp_full_path(file_name:'str'='', *, prefix:'str'='', suffix:'str'='', random_suffix:'str'='') -> 'str':
if prefix:
prefix = f'{prefix}-'
if not random_suffix:
random_suffix = uuid4().hex
# This may be provided by users on input
file_name = file_name or 'zato-tmp-' + prefix + random_suffix
if suffix:
file_name += f'-{suffix}'
tmp_dir = gettempdir()
full_path = os.path.join(tmp_dir, file_name)
return full_path
# ################################################################################################################################
def get_ipc_pid_port_path(cluster_name:'str', server_name:'str', pid:'int') -> 'str':
# This is where the file name itself ..
file_name = ModuleCtx.PID_To_Port_Pattern.format(
cluster_name=cluster_name,
server_name=server_name,
pid=pid,
)
# .. make sure the name is safe to use in the file-system ..
file_name = fs_safe_name(file_name)
# .. now, we can obtain a full path to a temporary directory ..
full_path = get_new_tmp_full_path(file_name)
# .. and return the result to our caller.
return full_path
# ################################################################################################################################
def save_ipc_pid_port(cluster_name:'str', server_name:'str', pid:'int', port:'int') -> 'None':
# Make sure we store a string ..
port = str(port)
# .. get a path where we can save it ..
path = get_ipc_pid_port_path(cluster_name, server_name, pid)
# .. and do save it now.
with open_w(path) as f:
_ = f.write(port)
f.flush()
# ################################################################################################################################
def load_ipc_pid_port(cluster_name:'str', server_name:'str', pid:'int') -> 'int':
# Get a path to load the port from ..
path = get_ipc_pid_port_path(cluster_name, server_name, pid)
# .. wait until the file exists (which may be required when the server starts up) ..
wait_for_file(path, interval=2.5)
# .. load it now ..
with open_r(path) as f:
data = f.read()
data = data.strip()
out = int(data)
return out
# ################################################################################################################################
def make_list_from_string_list(value:'str', separator:'str') -> 'strlist':
value = value.split(separator) # type: ignore
value = [elem.strip() for elem in value if elem] # type: ignore
return value
# ################################################################################################################################
def validate_python_syntax(data:'str') -> 'None':
_ = ast.parse(data)
# ################################################################################################################################
class _DemoPyFsLocations:
pickup_incoming_full_path:'str'
work_dir_full_path:'str'
def get_demo_py_fs_locations(base_dir:'str') -> '_DemoPyFsLocations':
# Local variables
file_name = 'demo.py'
out = _DemoPyFsLocations()
out.pickup_incoming_full_path = os.path.join(base_dir, 'pickup', 'incoming', 'services', file_name)
out.work_dir_full_path = os.path.join(base_dir, 'work', 'hot-deploy', 'current', 'demo.py')
return out
# ################################################################################################################################
| 81,942
|
Python
|
.py
| 1,628
| 43.850123
| 130
| 0.504476
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,684
|
time_.py
|
zatosource_zato/code/zato-common/src/zato/common/util/time_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from datetime import datetime, timedelta, timezone
from time import time
import logging
# Arrow
import arrow
# tzlocal
from tzlocal import get_localzone
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import unicode
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_
# ################################################################################################################################
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class ModuleCtx:
Date_Format = 'YYYY-MM-DD'
Date_Time_Format = 'YYYY-MM-DDTHH:mm:ss'
Timestamp_Format = 'YYYY-MM-DDTHH:mm:ss.SSSSSS'
# ################################################################################################################################
# ################################################################################################################################
_epoch = datetime.utcfromtimestamp(0) # Start of UNIX epoch
local_tz = get_localzone()
local_tz_zone = str(local_tz)
# ################################################################################################################################
def utc_now():
""" Returns current time in UTC with the timezone information included.
"""
return datetime.now(timezone.utc)
# ################################################################################################################################
def native_to_utc(dt):
""" Converts a native datetime object to a UTC one.
"""
return dt.replace(tzinfo=timezone.utc)
# ################################################################################################################################
def datetime_to_ms(dt):
""" Converts a datetime object to a number of milliseconds since UNIX epoch.
"""
return (dt - _epoch).total_seconds() * 1000
# ################################################################################################################################
def datetime_to_sec(dt):
""" Converts a datetime object to a number of seconds since UNIX epoch.
"""
return (dt - _epoch).total_seconds()
# ################################################################################################################################
def utcnow_as_ms(_time=time):
""" Returns current UTC time in milliseconds since epoch. As of now, uses time.time but may eventually choose
to use alternative implementations on different systems.
"""
return _time()
# ################################################################################################################################
def datetime_from_ms(ms:'float', isoformat:'bool'=True) -> 'str | datetime':
""" Converts a number of milliseconds since UNIX epoch to a datetime object.
"""
value = _epoch + timedelta(milliseconds=ms)
if isoformat:
return value.isoformat()
else:
return value
# ################################################################################################################################
class TimeUtil:
""" A thin layer around Arrow's date/time handling library customized for our needs.
Default format is always taken from ISO 8601 (so it's sorted lexicographically)
and default timezone is always UTC.
"""
# ################################################################################################################################
def now(self, format=ModuleCtx.Date_Time_Format, tz=local_tz_zone, needs_format=True, delta=None) -> 'str | datetime':
""" Returns now in a specified timezone.
"""
now = arrow.now(tz=tz)
if delta:
now = now + delta
if needs_format:
return now.format(format)
return now
# ################################################################################################################################
def _time_from(self, value, delta, format, needs_format):
value = arrow.get(value)
value_from = value + timedelta(**delta)
if needs_format:
return value_from.format(format)
else:
return value_from
# ################################################################################################################################
def one_day_from(self, date, format=ModuleCtx.Date_Format, needs_format=True):
delta = {'days': 1}
return self._time_from(date, delta, format, needs_format)
# ################################################################################################################################
def one_hour_from(self, date, format=ModuleCtx.Date_Format, needs_format=True):
delta = {'minutes': 60}
return self._time_from(date, delta, format, needs_format)
# ################################################################################################################################
def one_minute_from(self, date, format=ModuleCtx.Date_Format, needs_format=True):
delta = {'minutes': 1}
return self._time_from(date, delta, format, needs_format)
# ################################################################################################################################
def yesterday(self, format=ModuleCtx.Date_Time_Format, tz=local_tz_zone, needs_format=True):
return self.now(format, tz, needs_format, delta=timedelta(days=-1))
# ################################################################################################################################
def tomorrow(self, format=ModuleCtx.Date_Time_Format, tz=local_tz_zone, needs_format=True):
return self.now(format, tz, needs_format, delta=timedelta(days=1))
# ################################################################################################################################
def utcnow(self, format=ModuleCtx.Date_Time_Format, needs_format=True) -> 'any_':
""" Returns now in UTC formatted as given in 'format'.
"""
return self.now(format, 'UTC', needs_format)
# ################################################################################################################################
def utcnow_as_float(self, format=ModuleCtx.Date_Time_Format, needs_format=True) -> 'any_':
""" Returns now in UTC as a float number.
"""
return self.utcnow(needs_format=False).float_timestamp
# ################################################################################################################################
def today(self, format=ModuleCtx.Date_Format, tz=local_tz_zone, needs_format=True):
""" Returns current day in a given timezone.
"""
now = arrow.now(tz=tz)
today = arrow.Arrow(year=now.year, month=now.month, day=now.day)
if tz != 'UTC':
today = today.to(tz)
if needs_format:
return today.format(format)
else:
return today
# ################################################################################################################################
def isonow(self, tz=local_tz_zone, needs_format=True, _format=ModuleCtx.Timestamp_Format):
return self.now(_format, tz, needs_format)
# ################################################################################################################################
def isoutcnow(self, needs_format=True, _format=ModuleCtx.Timestamp_Format):
return self.utc_now(_format, needs_format)
# ################################################################################################################################
def reformat(self, value, from_, to):
""" Reformats value from one datetime format to another, for instance
from 23-03-2013 to 03/23/13 (MM-DD-YYYY to DD/MM/YY).
"""
try:
# Arrow compares to str, not basestring
value = str(value) if isinstance(value, unicode) else value
from_ = str(from_) if isinstance(from_, unicode) else from_
return arrow.get(value, from_).format(to)
except Exception:
logger.error('Could not reformat value:`%s` from:`%s` to:`%s`',
value, from_, to)
raise
# ################################################################################################################################
| 9,168
|
Python
|
.py
| 153
| 54.222222
| 130
| 0.384212
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,685
|
url_dispatcher.py
|
zatosource_zato/code/zato-common/src/zato/common/util/url_dispatcher.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
# Zato
from zato.common.api import HTTP_SOAP, MISC
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
accept_any_http = HTTP_SOAP.ACCEPT.ANY
accept_any_internal = HTTP_SOAP.ACCEPT.ANY_INTERNAL
method_any_internal = HTTP_SOAP.METHOD.ANY_INTERNAL
# ################################################################################################################################
def get_match_target(config, sep=MISC.SEPARATOR, accept_any_http=accept_any_http, accept_any_internal=accept_any_internal,
method_any_internal=method_any_internal, http_methods_allowed_re=None):
http_method = config.get('method') or config.get('http_method')
if not http_method:
http_method = http_methods_allowed_re
http_accept = config.get('http_accept') or accept_any_http
http_accept = http_accept.replace('*', '{}'.format(accept_any_internal)).replace('/', 'HTTP_SEP')
# Extract variables needed to build the pattern
soap_action = config['soap_action']
url_path = config['url_path']
# Support parentheses in URL paths
url_path = url_path.replace('(', r'\(')
url_path = url_path.replace(')', r'\)')
# Build the pattern ..
pattern = f'{soap_action}{sep}{http_method}{sep}{http_accept}{sep}{url_path}'
# .. and return it to our caller
return pattern
# ################################################################################################################################
| 1,960
|
Python
|
.py
| 35
| 52.514286
| 130
| 0.518363
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,686
|
auth.py
|
zatosource_zato/code/zato-common/src/zato/common/util/auth.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
from base64 import b64decode
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import unicode
from six import PY2
# Zato
from zato.common.api import AUTH_RESULT
from zato.common.crypto.api import is_string_equal
from zato.server.connection.http_soap import Forbidden
logger = getLogger('zato')
def parse_basic_auth(auth, prefix='Basic '):
""" Parses username/password out of incoming HTTP Basic Auth data.
"""
if not auth:
raise ValueError('No auth received in `{}` ({})'.format(auth, AUTH_RESULT.BASIC_AUTH.NO_AUTH))
if not auth.startswith(prefix):
raise ValueError('Invalid prefix in `{}` ({})'.format(auth, AUTH_RESULT.BASIC_AUTH.NO_AUTH))
_, auth = auth.split(prefix)
auth = b64decode(auth.strip())
auth = auth if PY2 else auth.decode('utf8')
return auth.split(':', 1)
# ################################################################################################################################
# ################################################################################################################################
# Code below comes from another project - will be moved elsewhere at one point thus the location of imports and definitions
# ################################################################################################################################
# ################################################################################################################################
# Python 2/3 compatibility
from zato.common.ext.future.moves.urllib.parse import quote_plus
# lxml
from lxml import etree
# PyYAML
from yaml import dump
try:
from yaml import CDumper as Dumper
except ImportError: # pragma: no cover
from yaml import Dumper # pragma: no cover
# ################################################################################################################################
# ################################################################################################################################
class AuthResult:
""" Represents the result of validating a URL against the config. 'status' is the main boolean flag indicating
whether the successful was successful or not. 'code' equal to '0' means success and any other value is a failure,
note that 'code' may be a multi-character string including punctuation. 'description' is an optional attribute holding
any additional textual information a callee might wish to pass to the calling layer. 'auth_info' is either
an empty string or information regarding the authorization data presented by the calling application.
Instances of this class are considered True or False in boolean comparisons
according to the boolean value of self.status.
"""
def __init__(self, status=False, code='-1', description=''):
self.status = status
self.code = code
self.description = description
self._auth_info = b''
@property
def auth_info(self):
return self._auth_info
@auth_info.setter
def auth_info(self, value):
self._auth_info = dump(value, Dumper=Dumper)
def __repr__(self):
return '<{0} at {1} status={2} code={3} description={4} auth_info={5}>'.format(
self.__class__.__name__, hex(id(self)), self.status, self.code,
self.description, self.auth_info)
def __bool__(self):
""" Returns the boolean value of self.status. Useful when an instance
must be compared in a boolean context.
"""
return bool(self.status)
__nonzero__ = __bool__
# ################################################################################################################################
# ################################################################################################################################
class SecurityException(Exception):
""" Indicates problems with validating incoming requests. The 'description'
attribute holds textual information suitable for showing to human users.
"""
def __init__(self, description):
self.description = description
# ################################################################################################################################
# ################################################################################################################################
Auth_WSSE_No_Data = '0003.0001'
Auth_WSSE_Validation_Error = '0003.0002'
Auth_Basic_No_Auth = 'No_Auth_Provided'
Auth_Basic_Invalid_Prefix = 'Invalid_Auth_Prefix'
Auth_Basic_Username_Or_Password_Mismatch = 'Invalid_Username_Or_Password'
# ################################################################################################################################
# ################################################################################################################################
def on_wsse_pwd(wsse, url_config, data, needs_auth_info=True):
""" Visit _RequestApp._on_wsse_pwd method's docstring.
"""
if not data:
return AuthResult(False, Auth_WSSE_No_Data)
request = etree.fromstring(data)
try:
ok, wsse_username = wsse.validate(request, url_config)
except SecurityException as e:
return AuthResult(False, Auth_WSSE_Validation_Error, e.description)
else:
auth_result = AuthResult(True, '0')
if needs_auth_info:
auth_result.auth_info = {b'wsse-pwd-username': str(wsse_username)}
return auth_result
# ################################################################################################################################
# ################################################################################################################################
def extract_basic_auth(cid:'str', auth:'str', *, raise_on_error:'bool'=False) -> 'str':
if not auth:
if raise_on_error:
logger.warn(f'Basic Auth -> {Auth_Basic_No_Auth} -> {cid}')
raise Forbidden(cid)
else:
return None, Auth_Basic_No_Auth
prefix = 'Basic '
if not auth.startswith(prefix):
if raise_on_error:
logger.warn(f'Basic Auth -> {Auth_Basic_Invalid_Prefix} -> {cid}')
raise Forbidden(cid)
else:
return None, Auth_Basic_Invalid_Prefix
_, auth = auth.split(prefix)
auth = auth.strip()
auth = b64decode(auth)
auth = auth if isinstance(auth, unicode) else auth.decode('utf8')
username, password = auth.split(':', 1)
return username, password
# ################################################################################################################################
# ################################################################################################################################
def check_basic_auth(cid, auth, expected_username, expected_password):
""" A low-level call for checking HTTP Basic Auth credentials.
"""
result = extract_basic_auth(cid, auth, raise_on_error=False)
if result[0]:
username, password = result
else:
return result[1]
if is_string_equal(username, expected_username) and is_string_equal(password, expected_password):
return True
else:
return Auth_Basic_Username_Or_Password_Mismatch
# ################################################################################################################################
# ################################################################################################################################
def on_basic_auth(cid, env, url_config, needs_auth_info=True):
""" Visit _RequestApp.check_basic_auth method's docstring.
"""
username = url_config['basic-auth-username']
result = check_basic_auth(cid, env.get('HTTP_AUTHORIZATION', ''), username, url_config['basic-auth-password'])
is_success = result is True # Note that we need to compare with True
auth_result = AuthResult(is_success)
if is_success:
if needs_auth_info:
auth_result.auth_info = {b'basic-auth-username': quote_plus(username).encode('utf-8')}
else:
auth_result.code = result
return auth_result
# ################################################################################################################################
# ################################################################################################################################
def enrich_with_sec_data(data_dict, sec_def, sec_def_type):
data_dict['zato.sec_def'] = {}
data_dict['zato.sec_def']['id'] = sec_def['id']
data_dict['zato.sec_def']['name'] = sec_def['name']
data_dict['zato.sec_def']['username'] = sec_def.get('username')
data_dict['zato.sec_def']['impl'] = sec_def
data_dict['zato.sec_def']['type'] = sec_def_type
# ################################################################################################################################
# ################################################################################################################################
| 9,341
|
Python
|
.py
| 168
| 50.535714
| 130
| 0.468099
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,687
|
cache.py
|
zatosource_zato/code/zato-common/src/zato/common/util/cache.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Requests
from requests import Session as RequestsSession
# Zato
from zato.common.api import CACHE, NotGiven
from zato.common.crypto.api import ServerCryptoManager
from zato.common.json_internal import dumps
from zato.common.util.api import as_bool, get_config, get_odb_session_from_server_dir, get_repo_dir_from_component_dir
from zato.common.odb.model import Cluster, HTTPBasicAuth, Server
# ################################################################################################################################
if 0:
from requests import Response as RequestsResponse
RequestsResponse = RequestsResponse
# ################################################################################################################################
# Maps cache operations to HTTP verbos
op_verb_map = {
'get': 'GET',
'set': 'POST',
'delete': 'DELETE'
}
# ################################################################################################################################
# ################################################################################################################################
class CommandConfig:
__slots__ = 'command', 'modifier', 'key', 'value', 'is_string_key', 'is_int_key', 'is_string_value', 'is_int_value', \
'is_bool_value', 'format'
def __init__(self):
self.command = None # type: str
self.modifier = None # type: str
self.key = None # type: str
self.value = None # type: str
self.is_string_key = None # type: bool
self.is_int_key = None # type: bool
self.is_string_value = None # type: bool
self.is_int_value = None # type: bool
self.is_bool_value = None # type: bool
self.format = None # type: str
def to_dict(self):
out = {}
for name in self.__slots__:
out[name] = getattr(self, name)
return out
# ################################################################################################################################
# ################################################################################################################################
class CommandResponse:
__slots__ = 'key', 'text', 'has_value'
def __init__(self):
self.key = None # type: object
self.text = None # type: str
self.has_value = None # type: bool
# ################################################################################################################################
# ################################################################################################################################
class Client:
""" An HTTP-based Zato cache client.
"""
__slots__ = 'address', 'username', 'password', 'cache_name', 'session'
def __init__(self):
self.address = None # type: str
self.username = None # type: str
self.password = None # type: str
self.cache_name = None # type: str
self.session = None # type: RequestsSession
# ################################################################################################################################
@staticmethod
def from_server_conf(server_dir, cache_name, is_https):
# type: (str, str, bool) -> Client
repo_dir = get_repo_dir_from_component_dir(server_dir)
cm = ServerCryptoManager.from_repo_dir(None, repo_dir, None)
secrets_conf = get_config(repo_dir, 'secrets.conf', needs_user_config=False)
config = get_config(repo_dir, 'server.conf', crypto_manager=cm, secrets_conf=secrets_conf)
session = None
password = None
try:
session = get_odb_session_from_server_dir(server_dir)
cluster = session.query(Server).\
filter(Server.token == config.main.token).\
one().cluster # type: Cluster
security = session.query(HTTPBasicAuth).\
filter(Cluster.id == HTTPBasicAuth.cluster_id).\
filter(HTTPBasicAuth.username == CACHE.API_USERNAME).\
filter(HTTPBasicAuth.cluster_id == cluster.id).\
first() # type: HTTPBasicAuth
if security:
password = security.password
finally:
if session:
session.close()
return Client.from_dict({
'username': CACHE.API_USERNAME,
'password': password,
'address': config.main.gunicorn_bind,
'cache_name': cache_name,
'is_https': is_https,
})
# ################################################################################################################################
@staticmethod
def from_dict(config):
# type: (dict) -> Client
client = Client()
client.username = config['username']
client.password = config['password']
client.cache_name = config['cache_name']
if config['address'].startswith('http'):
address = config['address']
else:
address = 'http{}://{}'.format('s' if config['is_https'] else '', config['address'])
client.address = address
session = RequestsSession()
if client.password:
session.auth = (client.username, client.password)
client.session = session
return client
# ################################################################################################################################
def _request(self, op, key, value=NotGiven, pattern='/zato/cache/{}', op_verb_map=op_verb_map):
# type: (str, str, str) -> str
# Build a full address
path = pattern.format(key)
address = '{}{}'.format(self.address, path)
# Get the HTTP verb to use in the request
verb = op_verb_map[op] # type: str
data = {
'cache': self.cache_name,
'return_prev': True
}
if value is not NotGiven:
data['value'] = value
data = dumps(data)
response = self.session.request(verb, address, data=data) # type: RequestsResponse
return response.text
# ################################################################################################################################
def run_command(self, config):
# type: (CommandConfig) -> CommandResponse
if config.value is not NotGiven:
if config.is_int_value:
value = int(config.value)
elif config.is_bool_value:
value = as_bool(config.value)
else:
value = config.value
else:
value = config.value
raw_response = self._request(config.command, config.key, value)
_response = CommandResponse()
_response.key = config.key
_response.text = raw_response
return _response
# ################################################################################################################################
# ################################################################################################################################
| 7,482
|
Python
|
.py
| 152
| 41.125
| 130
| 0.445253
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,688
|
__init__.py
|
zatosource_zato/code/zato-common/src/zato/common/util/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Imported for backward compatibility
from zato.common.util.api import * # noqa: F401
# Imported here in addition to zato.common.util.api for backward compatibility.
from zato.common.util.logging_ import ColorFormatter
# For pyflakes
ColorFormatter = ColorFormatter
| 417
|
Python
|
.py
| 11
| 36.454545
| 79
| 0.780549
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,689
|
scheduler.py
|
zatosource_zato/code/zato-common/src/zato/common/util/scheduler.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from copy import deepcopy
from datetime import datetime
from json import dumps
from logging import getLogger
from time import sleep
from traceback import format_exc
# Bunch
from bunch import Bunch
# Zato
from zato.common.api import SCHEDULER
from zato.common.odb.model import Cluster, IntervalBasedJob, Job, Service
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, list_
from zato.scheduler.api import SchedulerAPI
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato_scheduler')
# ################################################################################################################################
# ################################################################################################################################
def wait_for_odb_service_by_odb(session:'any_', cluster_id:'int', service_name:'str') -> 'None':
# Assume we do not have it
service = None
while not service:
# Try to look it up ..
service = session.query(Service).\
filter(Service.name==service_name).\
filter(Cluster.id==cluster_id).\
first()
# .. if not found, sleep for a moment.
if not service:
sleep(1)
logger.info('Waiting for ODB service `%s` (ODB)', service_name)
# If we are here, it means that the service was found so we can return it
return service
# ################################################################################################################################
# ################################################################################################################################
def wait_for_odb_service_by_api(api:'SchedulerAPI', service_name:'str') -> 'None':
# Assume we do not have it
is_deployed = None
while not is_deployed:
# Try to look it up ..
response = api.invoke_service('zato.service.is-deployed', {
'name': service_name
})
# .. we can return if we have a response that indicates that the service is deployed ..
if response and response.get('is_deployed'):
return
# .. otherwise, we sleep for a moment before the next iteration ..
else:
sleep(2)
# ################################################################################################################################
def _add_scheduler_job(api:'SchedulerAPI', job_data:'Bunch', spawn:'bool', source:'str') -> 'None':
# Ignore jobs that have been removed
if job_data.name in SCHEDULER.JobsToIgnore:
logger.info(f'Ignoring job `{job_data.name}` ({source})`')
return
if job_data.is_active:
api.create_edit('create', job_data, spawn=spawn)
else:
logger.info(f'Not adding an inactive job `{job_data}`')
# ################################################################################################################################
def add_startup_jobs_to_odb_by_odb(cluster_id:'int', odb:'any_', jobs:'any_', stats_enabled:'bool') -> 'None':
""" Uses a direction ODB connection to add initial startup jobs to the ODB.
"""
with closing(odb.session()) as session:
now = datetime.utcnow()
for item in jobs:
if item['name'].startswith('zato.stats'):
continue
try:
extra = item.get('extra', '')
if isinstance(extra, str):
extra = extra.encode('utf-8')
else:
if item.get('is_extra_list'):
extra = '\n'.join(extra)
else:
extra = dumps(extra)
if extra:
if not isinstance(extra, bytes):
extra = extra.encode('utf8')
#
# This will block as long as this service is not available in the ODB.
# It is required to do it because the scheduler may start before servers
# in which case services will not be in the ODB yet and we need to wait for them.
#
service = wait_for_odb_service_by_odb(session, cluster_id, item['service'])
cluster = session.query(Cluster).\
filter(Cluster.id==cluster_id).\
one()
existing_one = session.query(Job).\
filter(Job.name==item['name']).\
filter(Job.cluster_id==cluster_id).\
first()
if existing_one:
continue
job = Job(None, item['name'], True, 'interval_based', now, cluster=cluster, service=service, extra=extra)
kwargs = {}
for name in('seconds', 'minutes'):
if name in item:
kwargs[name] = item[name]
ib_job = IntervalBasedJob(None, job, **kwargs)
session.add(job)
session.add(ib_job)
session.commit()
except Exception:
logger.warning(format_exc())
else:
logger.info('Initial job added `%s`', job.name)
# ################################################################################################################################
def load_scheduler_jobs_by_odb(api:'SchedulerAPI', odb:'any_', cluster_id:'int', spawn:'bool'=True) -> 'None':
""" Uses ODB connections directly to obtain a list of all jobs that the scheduler should run.
"""
# Get a list of jobs ..
job_list = odb.get_job_list(cluster_id)
# .. go through each of them ..
for(id, name, is_active, job_type, start_date, extra, service_name, _,
_, weeks, days, hours, minutes, seconds, repeats, cron_definition) in job_list:
# .. build its business representation ..
job_data = Bunch({
'id':id, 'name':name, 'is_active':is_active,
'job_type':job_type, 'start_date':start_date,
'extra':extra, 'service':service_name, 'weeks':weeks,
'days':days, 'hours':hours, 'minutes':minutes,
'seconds':seconds, 'repeats':repeats,
'cron_definition':cron_definition
})
# .. and invoke a common function to add it to the scheduler.
_add_scheduler_job(api, job_data, spawn, 'load_scheduler_jobs_by_odb')
# ################################################################################################################################
def add_startup_jobs_to_odb_by_api(api:'SchedulerAPI', jobs:'list_[Bunch]', stats_enabled:'bool') -> 'None':
""" Uses server API calls to add initial startup jobs to the ODB.
"""
# This can be static for all the jobs because the backend will calculate the actual start time itself
start_date = '2023-01-02T11:22:33'
# Jobs that we are creating will be active unless the configuration says otherwise
is_active = True
# All of the jobs that we are adding are interval-based
job_type = SCHEDULER.JOB_TYPE.INTERVAL_BASED
# We are going to ignore jobs that already exist
should_ignore_existing = True
# Go through each of the jobs that we are to add ..
for job in jobs:
# .. make sure that the service that it depends on is deployed ..
wait_for_odb_service_by_api(api, job['service'])
# .. build a request describing the job to be created by copying its configuration ..
request = deepcopy(job)
# .. fill out the remaining details ..
if not 'is_active' in job:
request.is_active = is_active
if not 'job_type' in job:
request.job_type = job_type
if not 'start_date' in job:
request.start_date = start_date
if not 'should_ignore_existing' in job:
request.should_ignore_existing = should_ignore_existing
# .. now, we can create a new job, ignoring the fact that it may potentially already exist.
api.invoke_service('zato.scheduler.job.create', request)
# ################################################################################################################################
def load_scheduler_jobs_by_api(api:'SchedulerAPI', spawn:'bool') -> 'None':
""" Uses server API calls to obtain a list of all jobs that the scheduler should run.
"""
# Get a list of all the jobs we are to run ..
response = api.invoke_service('zato.scheduler.job.get-list')
# .. we have some jobs to schedule ..
if response:
# .. log what we are about to add ..
items = sorted(elem['name'] for elem in response)
logger.info('Loading jobs into scheduler -> %s', items)
# .. go through each of the jobs received ..
for item in response:
# .. enrich each of them ..
job_data = Bunch(item)
job_data.service = job_data.service_name
# .. and invoke a common function to add it to the scheduler.
_add_scheduler_job(api, job_data, spawn, 'load_scheduler_jobs_by_api')
# .. there is nothing for us to run ..
else:
logger.info('No jobs were received from the server')
# ################################################################################################################################
# ################################################################################################################################
| 10,163
|
Python
|
.py
| 190
| 44.210526
| 130
| 0.483939
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,690
|
kvdb.py
|
zatosource_zato/code/zato-common/src/zato/common/util/kvdb.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# ################################################################################################################################
def has_redis_sentinels(config):
return config.get('use_redis_sentinels', False)
# ################################################################################################################################
| 504
|
Python
|
.py
| 9
| 54
| 130
| 0.334694
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,691
|
wsx.py
|
zatosource_zato/code/zato-common/src/zato/common/util/wsx.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from json import dumps
from logging import getLogger
from tempfile import gettempdir
# Zato
from zato.common.api import WEB_SOCKET
from zato.common.util.file_system import fs_safe_name
from zato.common.util.open_ import open_r, open_rw
# ################################################################################################################################
if 0:
from zato.common.typing_ import stranydict
from zato.server.service import Service
# ################################################################################################################################
logger_zato = getLogger('zato')
logger_wsx = getLogger('zato_web_socket')
# ################################################################################################################################
msg_cleanup_error = 'WSX cleanup error, wcr:`%d`, si:`%s`, pci:`%s`, sk_list:`%s`, h:`%r`, hs:`%r`, hr:`%r`, ofl:`%s`, e:`%s`'
# ################################################################################################################################
_on_disconnected = WEB_SOCKET.HOOK_TYPE.ON_DISCONNECTED
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, textio_
# ################################################################################################################################
def find_wsx_environ(service:'Service', raise_if_not_found:'bool'=True) -> 'stranydict':
wsx_environ = service.wsgi_environ.get('zato.request_ctx.async_msg', {}).get('environ') or {}
if not wsx_environ:
if raise_if_not_found:
raise Exception('Could not find `[\'zato.request_ctx.async_msg\'][\'environ\']` in WSGI environ `{}`'.format(
service.wsgi_environ))
else:
return wsx_environ
# ################################################################################################################################
def cleanup_wsx_client(wsx_cleanup_required, service_invoker, pub_client_id, sub_keys, hook, hook_service, hook_request,
opaque_func_list=None):
""" Cleans up information about a WSX client that has disconnected.
"""
try:
# Sometime it will not be needed at all, e.g. when we clean up a half-opened connection that never
# succesfully authenticated.
if wsx_cleanup_required:
# Deletes state from SQL
service_invoker('zato.channel.web-socket.client.delete-by-pub-id', {
'pub_client_id': pub_client_id,
})
if sub_keys:
# Deletes across all workers the in-RAM pub/sub state about the client that is disconnecting
service_invoker('zato.channel.web-socket.client.unregister-ws-sub-key', {
'sub_key_list': sub_keys,
})
# An opaque list of functions to invoke - each caller may decide what else should be carried out here
for func in opaque_func_list or []:
func()
# Run the relevant on_disconnected hook, if any is available (even if the session was never opened)
if hook:
hook(_on_disconnected, hook_service, **hook_request)
except Exception as e:
for logger in logger_zato, logger_wsx:
logger.info(msg_cleanup_error, wsx_cleanup_required, service_invoker, pub_client_id, sub_keys, hook,
hook_service, hook_request, opaque_func_list, e)
# ################################################################################################################################
class ContextHandler:
_file: 'textio_'
def __init__(self, *, ctx_container_name:'str', is_read_only:'bool') -> 'None':
# Prepare metadata
self.ctx_container_name = ctx_container_name
self.is_read_only = is_read_only
self.open_func = open_r if self.is_read_only else open_rw
self.ctx_file_path = '<not-set-yet-ContextHandler-ctx_file_path>'
# And open the context file now either for reading or r/w
self.init()
# ################################################################################################################################
def init(self):
# Store context in a temporary directory ..
tmp_dir = gettempdir()
# .. under the same file as our channel's name ..
name_safe = fs_safe_name(self.ctx_container_name)
self.ctx_file_path = os.path.join(tmp_dir, 'zato-' + name_safe + '.txt')
# .. create and return the file now.
self._file = self.open_func(self.ctx_file_path)
# ################################################################################################################################
def store(self, data:'any_') -> 'None':
if not isinstance(data, dict):
data = {'data':str(data)}
data = dumps(data)
self._file.write(f'{data}\n')
self._file.flush()
# ################################################################################################################################
# ################################################################################################################################
| 5,481
|
Python
|
.py
| 94
| 51.223404
| 130
| 0.454273
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,692
|
env.py
|
zatosource_zato/code/zato-common/src/zato/common/util/env.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, strlist, strlistnone
# ################################################################################################################################
# ################################################################################################################################
def populate_environment_from_file(env_path:'str', *, to_delete:'strlistnone'=None, use_print:'bool'=True) -> 'strlist':
# stdlib
import os
from logging import getLogger
# Reusable
logger = getLogger('zato')
# Our response to produce
out:'strlist' = []
if env_path:
if not os.path.exists(env_path):
# Reusable
msg = 'No such path (env. variables) -> %s'
# Optionally, we need to use print too because logging may not be configured yet ..
if use_print:
print(msg % env_path)
# .. but use logging nevertheless.
logger.info(msg, env_path)
else:
# Zato
from zato.common.ext.configobj_ import ConfigObj
# Local variables
to_delete = to_delete or []
msg_deleted = 'Deleted env. variable `%s`'
msg_imported = 'Imported env. variable `%s` from `%s`'
# Build a configuration object with new variables to load ..
env_config = ConfigObj(env_path)
env = env_config.get('env') or {}
# .. delete any previous ones ..
for key in to_delete:
_:'any_' = os.environ.pop(key, None)
logger.info(msg_deleted, key)
# .. go through everything that is new ..
for key, value in env.items(): # type: ignore
# .. make sure values are strings ..
if isinstance(value, (int, float)):
value = str(value)
# .. do update the environment ..
os.environ[key] = value
# .. append the key for our caller's benefit ..
out.append(key)
# .. optionally, use print for logging ..
if use_print:
print(msg_imported % (key, env_path))
# .. but use logging too.
logger.info(msg_imported, key, env_path)
# We are ready to return our response now
return out
# ################################################################################################################################
def get_list_from_environment(key:'str', separator:'str') -> 'strlist':
# stdlib
import os
# Zato
from zato.common.util.api import make_list_from_string_list
# Our value to produce
out = []
if value := os.environ.get(key):
value = make_list_from_string_list(value, separator)
out.extend(value)
return out
# ################################################################################################################################
# ################################################################################################################################
| 3,564
|
Python
|
.py
| 72
| 39.930556
| 130
| 0.414571
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,693
|
import_.py
|
zatosource_zato/code/zato-common/src/zato/common/util/import_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from importlib import import_module
# ################################################################################################################################
# ################################################################################################################################
def import_string(name):
name = name.split('.')
attr_name = name[-1]
mod_name = '.'.join(name[:-1])
mod = import_module(mod_name)
return getattr(mod, attr_name)
# ################################################################################################################################
# ################################################################################################################################
| 902
|
Python
|
.py
| 17
| 50.529412
| 130
| 0.274175
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,694
|
eval_.py
|
zatosource_zato/code/zato-common/src/zato/common/util/eval_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# ################################################################################################################################
# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
true_values = 'true', 'yes', 'on', 'y', 't', '1' # noqa: E222
false_values = 'false', 'no', 'off', 'n', 'f', '0'
def as_bool(data):
if isinstance(data, (str, bytes)):
data = data.strip().lower()
if data in true_values:
return True
elif data in false_values:
return False
elif data == '':
return False
else:
raise ValueError('String is not true/false: %r' % data)
return bool(data)
def as_list(data, sep=None, strip=True):
if isinstance(data, (str, bytes)):
lst = data.split(sep)
if strip:
lst = [v.strip() for v in lst]
return lst
elif isinstance(data, (list, tuple)):
return data
elif data is None:
return []
else:
return [data]
# ################################################################################################################################
| 1,483
|
Python
|
.py
| 36
| 35.111111
| 130
| 0.498607
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,695
|
logging_.py
|
zatosource_zato/code/zato-common/src/zato/common/util/logging_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from logging import Formatter
# Zato
from zato.common.util.platform_ import is_posix
# ################################################################################################################################
# ################################################################################################################################
logging_conf_contents = """
loggers:
'':
level: INFO
handlers: [stdout, default]
'gunicorn.main':
level: INFO
handlers: [stdout, default]
'pytds':
level: WARN
handlers: [stdout, default]
zato:
level: INFO
handlers: [stdout, default]
qualname: zato
propagate: false
zato_rest:
level: INFO
handlers: [stdout, default]
qualname: zato
propagate: false
zato_access_log:
level: INFO
handlers: [http_access_log]
qualname: zato_access_log
propagate: false
zato_admin:
level: INFO
handlers: [admin]
qualname: zato_admin
propagate: false
zato_audit_pii:
level: INFO
handlers: [stdout, audit_pii]
qualname: zato_audit_pii
propagate: false
zato_connector:
level: INFO
handlers: [connector]
qualname: zato_connector
propagate: false
zato_hl7:
level: INFO
handlers: [stdout, hl7]
qualname: zato_hl7
propagate: false
zato_kvdb:
level: INFO
handlers: [kvdb]
qualname: zato_kvdb
propagate: false
zato_pubsub:
level: INFO
handlers: [stdout, pubsub]
qualname: zato_pubsub
propagate: false
zato_pubsub_overflow:
level: INFO
handlers: [pubsub_overflow]
qualname: zato_pubsub_overflow
propagate: false
zato_pubsub_audit:
level: INFO
handlers: [pubsub_audit]
qualname: zato_pubsub_audit
propagate: false
zato_rbac:
level: INFO
handlers: [rbac]
qualname: zato_rbac
propagate: false
zato_scheduler:
level: INFO
handlers: [stdout, scheduler]
qualname: zato_scheduler
propagate: false
zato_web_socket:
level: INFO
handlers: [stdout, web_socket]
qualname: zato_web_socket
propagate: false
zato_ibm_mq:
level: INFO
handlers: [stdout, ibm_mq]
qualname: zato_ibm_mq
propagate: false
zato_notif_sql:
level: INFO
handlers: [stdout, notif_sql]
qualname: zato_notif_sql
propagate: false
handlers:
default:
formatter: default
class: {log_handler_class}
filename: './logs/server.log'
mode: 'a'
maxBytes: {server_log_max_size}
backupCount: {server_log_backup_count}
encoding: 'utf8'
stdout:
formatter: colour
class: logging.StreamHandler
stream: ext://sys.stdout
http_access_log:
formatter: default
class: {log_handler_class}
filename: './logs/http_access.log'
mode: 'a'
maxBytes: {server_log_max_size}
backupCount: {server_log_backup_count}
encoding: 'utf8'
admin:
formatter: default
class: {log_handler_class}
filename: './logs/admin.log'
mode: 'a'
maxBytes: 20000000
backupCount: 10
encoding: 'utf8'
audit_pii:
formatter: default
class: logging.handlers.RotatingFileHandler
filename: './logs/audit-pii.log'
mode: 'a'
maxBytes: 20000000
backupCount: 10
encoding: 'utf8'
connector:
formatter: default
class: {log_handler_class}
filename: './logs/connector.log'
mode: 'a'
maxBytes: 20000000
backupCount: 10
encoding: 'utf8'
hl7:
formatter: default
class: {log_handler_class}
filename: './logs/hl7.log'
mode: 'a'
maxBytes: 20000000
backupCount: 10
encoding: 'utf8'
kvdb:
formatter: default
class: {log_handler_class}
filename: './logs/kvdb.log'
mode: 'a'
maxBytes: 20000000
backupCount: 10
encoding: 'utf8'
pubsub:
formatter: default
class: {log_handler_class}
filename: './logs/pubsub.log'
mode: 'a'
maxBytes: {server_log_max_size}
backupCount: {server_log_backup_count}
encoding: 'utf8'
pubsub_overflow:
formatter: default
class: {log_handler_class}
filename: './logs/pubsub-overflow.log'
mode: 'a'
maxBytes: {server_log_max_size}
backupCount: {server_log_backup_count}
encoding: 'utf8'
pubsub_audit:
formatter: default
class: {log_handler_class}
filename: './logs/pubsub-audit.log'
mode: 'a'
maxBytes: {server_log_max_size}
backupCount: {server_log_backup_count}
encoding: 'utf8'
rbac:
formatter: default
class: {log_handler_class}
filename: './logs/rbac.log'
mode: 'a'
maxBytes: 20000000
backupCount: 10
encoding: 'utf8'
scheduler:
formatter: default
class: {log_handler_class}
filename: './logs/scheduler.log'
mode: 'a'
maxBytes: 20000000
backupCount: 10
encoding: 'utf8'
web_socket:
formatter: default
class: {log_handler_class}
filename: './logs/web_socket.log'
mode: 'a'
maxBytes: {server_log_max_size}
backupCount: {server_log_backup_count}
encoding: 'utf8'
ibm_mq:
formatter: default
class: logging.handlers.RotatingFileHandler
filename: './logs/ibm-mq.log'
mode: 'a'
maxBytes: 20000000
backupCount: 10
encoding: 'utf8'
notif_sql:
formatter: default
class: logging.handlers.RotatingFileHandler
filename: './logs/notif-sql.log'
mode: 'a'
maxBytes: 20000000
backupCount: 10
encoding: 'utf8'
formatters:
audit_pii:
format: '%(message)s'
default:
format: '%(asctime)s - %(levelname)s - %(process)d:%(threadName)s - %(name)s:%(lineno)d - %(message)s'
http_access_log:
format: '%(remote_ip)s %(cid_resp_time)s "%(channel_name)s" [%(req_timestamp)s] "%(method)s %(path)s %(http_version)s" %(status_code)s %(response_size)s "-" "%(user_agent)s"'
colour:
format: '%(asctime)s - %(levelname)s - %(process)d:%(threadName)s - %(name)s:%(lineno)d - %(message)s'
(): zato.common.util.api.ColorFormatter
version: 1
""" # noqa: E501
# ################################################################################################################################
# ################################################################################################################################
# Based on http://stackoverflow.com/questions/384076/how-can-i-make-the-python-logging-output-to-be-colored
class ColorFormatter(Formatter):
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
RESET_SEQ = '\033[0m'
COLOR_SEQ = '\033[1;%dm'
BOLD_SEQ = '\033[1m'
COLORS = {
'WARNING': YELLOW,
'INFO': WHITE,
'DEBUG': BLUE,
'CRITICAL': YELLOW,
'ERROR': RED,
'TRACE1': YELLOW
}
def __init__(self, fmt):
self.use_color = True if is_posix else False
super(ColorFormatter, self).__init__(fmt)
# ################################################################################################################################
def formatter_msg(self, msg, use_color=True):
if use_color:
msg = msg.replace('$RESET', self.RESET_SEQ).replace('$BOLD', self.BOLD_SEQ)
else:
msg = msg.replace('$RESET', '').replace('$BOLD', '')
return msg
# ################################################################################################################################
def format(self, record):
levelname = record.levelname
if self.use_color and levelname in self.COLORS:
fore_color = 30 + self.COLORS[levelname]
levelname_color = self.COLOR_SEQ % fore_color + levelname + self.RESET_SEQ
record.levelname = levelname_color
return Formatter.format(self, record)
# ################################################################################################################################
# ################################################################################################################################
def get_logging_conf_contents() -> 'str':
# type: (str) -> str
# We import it here to make CLI work faster
from zato.common.util.platform_ import is_linux
linux_log_handler_class = 'logging.handlers.ConcurrentRotatingFileHandler'
non_linux_log_handler_class = 'logging.handlers.RotatingFileHandler'
# Under Windows, we cannot have multiple processes access the same log file
log_handler_class = linux_log_handler_class if is_linux else non_linux_log_handler_class
# This can be overridden by users
if server_log_max_size := os.environ.get('Zato_Server_Log_Max_Size'):
server_log_max_size = int(server_log_max_size)
else:
server_log_max_size = 1000000000 # 1 GB by default
# This can be overridden by users
if server_log_backup_count := os.environ.get('Zato_Server_Log_Backup_Count'):
server_log_backup_count = int(server_log_backup_count)
else:
server_log_backup_count = 2
return logging_conf_contents.format(
log_handler_class=log_handler_class,
server_log_max_size=server_log_max_size,
server_log_backup_count=server_log_backup_count,
)
# ################################################################################################################################
# ################################################################################################################################
| 10,353
|
Python
|
.py
| 301
| 26.887043
| 182
| 0.526384
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,696
|
proc.py
|
zatosource_zato/code/zato-common/src/zato/common/util/proc.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import os
import sys
from logging import getLogger
from tempfile import mkstemp
from time import time, sleep
# Sarge
from sarge import run as sarge_run, shell_format
# Python 2/3 compatibility
from six import PY2
# Zato
from zato.common.api import CLI_ARG_SEP
from zato.common.util.open_ import open_r
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
stderr_sleep_fg = 0.9
stderr_sleep_bg = 1.2
# ################################################################################################################################
# This is for convenience of switching to a newer version of sarge in the future. Newer versions use async_ instead of async.
async_keyword = 'async_' if PY2 else 'async_'
# ################################################################################################################################
# These messages may be returned by 'zato start' from underlying libraries
# but they do not indicate a genuine error.
stderr_ignore = [
'pykafka.rdkafka',
'Auto-created primary key used when not defining a primary key type',
'Linux distribution found',
]
# ################################################################################################################################
import platform
system = platform.system()
is_windows = 'windows' in system.lower()
# ################################################################################################################################
def get_executable():
""" Returns the wrapper which pip uses for executing Zato commands,
the one with all the dependencies added to PYTHONPATH.
"""
if is_windows:
return os.path.join(os.path.dirname(sys.executable), 'python.exe')
return os.path.join(os.path.dirname(sys.executable), 'py')
# ################################################################################################################################
class _StdErr:
# Some log messages (like the ones produced by PyKafka) go to stderr but they are not really errors,
# in which case we need to ignore them.
ignored = [
'Could not load pykafka.rdkafka extension.'
]
def __init__(self, path, timeout):
self.path = path
self.timeout = timeout
# ################################################################################################################################
def wait_for_error(self):
now = time()
while time() - now < self.timeout:
sleep(0.1)
_stderr = open_r(self.path)
_err = _stderr.read()
if _err and (not self.should_ignore(_err)):
return _err
else:
_stderr.close()
# ################################################################################################################################
def should_ignore(self, err):
for item in self.ignored:
if err.endswith(item):
return True
# ################################################################################################################################
def start_process(component_name, executable, run_in_fg, cli_options, extra_cli_options='', on_keyboard_interrupt=None,
failed_to_start_err=-100, extra_options=None, stderr_path=None, stdin_data=None, async_keyword=async_keyword) -> 'int':
""" Starts a new process from a given Python path, either in background or foreground (run_in_fg).
"""
stderr_path = stderr_path or mkstemp('-zato-start-{}.txt'.format(component_name.replace(' ','')))[1]
stdout_redirect = ''
stderr_redirect = ''
# This is the exit code as it will be returned by sarge
exit_code = 0
# We always run in foreground under Windows
if is_windows:
run_in_fg = True
else:
if not run_in_fg:
stdout_redirect = '1> /dev/null'
stderr_redirect = '2> {}'.format(stderr_path)
program = '{} {} {} {}'.format(executable, extra_cli_options, stdout_redirect, stderr_redirect)
try:
_stderr = _StdErr(stderr_path, stderr_sleep_fg if run_in_fg else stderr_sleep_bg)
run_kwargs = {
async_keyword: False if run_in_fg else True,
}
# Do not send input if it does not really exist because it prevents pdb from attaching to a service's stdin
if stdin_data:
run_kwargs['input'] = stdin_data
p = sarge_run(program, **run_kwargs)
# Wait a moment for any potential errors
_err = _stderr.wait_for_error()
if _err:
should_be_ignored = False
for item in stderr_ignore:
if item in _err:
should_be_ignored = True
break
if not should_be_ignored:
logger.warning('Stderr received from program `%s` e:`%s`, kw:`%s`', program, _err, run_kwargs)
sys.exit(failed_to_start_err)
# Update the exit code ..
exit_code = p.returncode
except KeyboardInterrupt:
if on_keyboard_interrupt:
on_keyboard_interrupt()
sys.exit(0)
finally:
# We can now return the exit code to our caller
return exit_code
# ################################################################################################################################
def start_python_process(component_name, run_in_fg, py_path, program_dir, on_keyboard_interrupt=None, failed_to_start_err=-100,
extra_options=None, stderr_path=None, stdin_data=None) -> 'int':
""" Starts a new process from a given Python path, either in background or foreground (run_in_fg).
"""
options = {
'fg': run_in_fg,
}
if extra_options:
options.update(extra_options)
options = CLI_ARG_SEP.join('{}={}'.format(k, v) for k, v in options.items())
py_path_option = shell_format('-m {0}', py_path)
program_dir_option = shell_format('{0}', program_dir) if program_dir else ''
extra_cli_options = '{} {} {}'.format(py_path_option, program_dir_option, options)
extra_cli_options = '{} '.format(py_path_option)
if program_dir_option:
extra_cli_options += '{} '.format(program_dir_option)
extra_cli_options += '{}'.format(options)
return start_process(component_name, get_executable(), run_in_fg, None, extra_cli_options, on_keyboard_interrupt,
failed_to_start_err, extra_options, stderr_path, stdin_data)
# ################################################################################################################################
| 7,069
|
Python
|
.py
| 141
| 43.751773
| 130
| 0.502981
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,697
|
platform_.py
|
zatosource_zato/code/zato-common/src/zato/common/util/platform_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from platform import platform as platform_platform, system as platform_system
system_result = platform_system().lower()
platform_result = platform_platform().lower()
is_mac = 'mac' in platform_result
is_linux = 'linux' in system_result
is_windows = 'windows' in system_result
is_non_mac = not is_mac
is_non_linux = not is_linux
is_non_windows = not is_windows
non_mac = is_non_mac
non_linux = is_non_linux
non_windows = is_non_windows
# For pyflakes
is_mac = is_mac
is_linux = is_linux
is_windows = is_windows
is_non_mac = is_non_mac
is_non_linux = is_non_linux
is_non_windows = is_non_windows
non_mac = non_mac
non_linux = non_linux
non_windows = non_windows
is_posix = is_non_windows
| 894
|
Python
|
.py
| 29
| 29.448276
| 77
| 0.717799
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,698
|
stats.py
|
zatosource_zato/code/zato-common/src/zato/common/util/stats.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import math
from datetime import timedelta
from operator import itemgetter
# Humanize
from humanize import precisedelta
# Zato
from zato.common.api import StatsKey
# ################################################################################################################################
# ################################################################################################################################
float_stats = ('item_max', 'item_min', 'item_mean', 'item_total_time')
# ################################################################################################################################
# ################################################################################################################################
def tmean(data, limit_from=None, limit_to=None):
""" Trimmed mean - includes only elements up to the input limit, if it is given at all.
"""
data = data if isinstance(data, list) else [data]
if limit_from or limit_to:
_data = []
for elem in data:
if limit_from:
if elem < limit_from:
continue
if limit_to:
if elem > limit_to:
continue
_data.append(elem)
data = _data[:]
count = len(data)
total = sum(data)
return total / count if count else 0
# ################################################################################################################################
# ################################################################################################################################
#
# Taken from https://code.activestate.com/recipes/511478-finding-the-percentile-of-the-values/
#
# Original code by Wai Yip Tung, licensed under the Python Foundation License
#
def percentile(data, percent, key=lambda x:x):
"""
Find the percentile of a list of values.
@parameter data - a list of values
@parameter percent - a float value from 0.0 to 1.0.
@parameter key - optional key function to compute value from each element of data.
@return - the percentile of the values
"""
if not data:
return 0
data.sort()
k = (len(data)-1) * percent
f = math.floor(k)
c = math.ceil(k)
if f == c:
return key(data[int(k)])
d0 = key(data[int(f)]) * (c-k)
d1 = key(data[int(c)]) * (k-f)
return d0 + d1
# ################################################################################################################################
# ################################################################################################################################
def collect_current_usage(data):
# type: (list) -> dict
# numpy
import numpy as np
# For later use
usage = 0
last_duration = None
last_timestamp = ''
usage_min = None
usage_max = None
usage_mean = None
# Make sure we always have a list to iterate over (rather than None)
data = data or []
for elem in data:
if elem is None:
continue
usage += elem[StatsKey.PerKeyValue]
if elem[StatsKey.PerKeyLastTimestamp] > last_timestamp:
last_timestamp = elem[StatsKey.PerKeyLastTimestamp]
last_duration = elem[StatsKey.PerKeyLastDuration]
if usage_min:
usage_min = min([usage_min, elem[StatsKey.PerKeyMin]])
else:
usage_min = elem[StatsKey.PerKeyMin]
if usage_max:
usage_max = max([usage_max, elem[StatsKey.PerKeyMax]])
else:
usage_max = elem[StatsKey.PerKeyMax]
if usage_mean:
usage_mean = np.mean([usage_mean, elem[StatsKey.PerKeyMean]])
else:
usage_mean = elem[StatsKey.PerKeyMean]
usage_mean = round(usage_mean, 3) if usage_mean else 0
return {
StatsKey.PerKeyValue: usage,
StatsKey.PerKeyLastDuration: last_duration,
StatsKey.PerKeyLastTimestamp: last_timestamp,
StatsKey.PerKeyMin: usage_min,
StatsKey.PerKeyMax: usage_max,
StatsKey.PerKeyMean: usage_mean,
}
# ################################################################################################################################
# ################################################################################################################################
def should_include_in_table_stats(service_name):
# type: (str) -> bool
if service_name.startswith('pub.zato'):
return False
elif service_name.startswith('zato'):
return False
else:
return True
# ################################################################################################################################
# ################################################################################################################################
def combine_table_data(data, round_digits=2):
# type: (list, int) -> dict
# Response to return
out = []
# How many objects we have seen, e.g. how many individual services
total_object_id = 0
# Total usage across all events
total_usage = 0
# Total time spent in all the events (in ms)
total_time = 0
# Total mean time across all objects
total_mean = 0
# First pass, filter out objects with known unneeded names
# and collect total usage of each object and of objects as a whole.
for pid_response in data: # type: dict
if pid_response:
for object_name, stats in pid_response.items(): # type: (str, dict)
if should_include_in_table_stats(object_name):
# Update per object counters
# Total usage needs to be an integer
stats['item_total_usage'] = int(stats['item_total_usage'])
# These are always floats that we need to round up
for name in float_stats:
stats[name] = round(stats[name], round_digits)
# Add to totals
total_usage += stats['item_total_usage']
total_mean += stats['item_mean']
total_time += stats['item_total_time']
total_object_id += 1
# Finally, add the results so that they can be used in further steps
item = dict(stats)
item['name'] = object_name
out.append(item)
# We know how many events we have so we can now compute the mean across all of them
if total_object_id:
total_mean = total_mean / total_object_id
# In this pass, we can attach additional per-object statistics
for item in out: # type: dict
item_usage_share = item['item_total_usage'] / total_usage * 100
item_usage_share = round(item_usage_share, round_digits)
item_time_share = item['item_total_time'] / total_time * 100
item_time_share = round(item_time_share, round_digits)
item['item_usage_share'] = item_usage_share
item['item_time_share'] = item_time_share
item['item_total_usage_human'] = item['item_total_usage'] # Currently, this is the same
total_time_delta_min_unit = 'milliseconds' if item['item_total_time'] < 1 else 'seconds'
total_time_delta = timedelta(milliseconds=item['item_total_time'])
total_time_delta = precisedelta(total_time_delta, minimum_unit=total_time_delta_min_unit)
item['item_total_time_human'] = total_time_delta
# Sort by the most interesting attribute
out.sort(key=itemgetter('item_time_share'), reverse=True)
return out
# ################################################################################################################################
# ################################################################################################################################
| 8,109
|
Python
|
.py
| 171
| 39.660819
| 130
| 0.477152
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,699
|
pubsub.py
|
zatosource_zato/code/zato-common/src/zato/common/util/pubsub.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from operator import itemgetter
# Zato
from zato.common.api import PUBSUB
from zato.common.exception import BadRequest
from zato.common.odb.query import pubsub_endpoint_queue_list_by_sub_keys
from zato.common.pubsub import MSG_PREFIX as PUBSUB_MSG_PREFIX
from zato.common.typing_ import cast_
# ################################################################################################################################
# ################################################################################################################################
if 0:
from sqlalchemy.orm.session import Session as SASession
from zato.common.typing_ import any_, anydict, anylist, dictlist, dictorlist, intnone, stranydict, strlist
from zato.server.base.parallel import ParallelServer
ParallelServer = ParallelServer
# ################################################################################################################################
# ################################################################################################################################
_PRIORITY = PUBSUB.PRIORITY
_pri_min = _PRIORITY.MIN
_pri_max = _PRIORITY.MAX
_pri_def = _PRIORITY.DEFAULT
_default_expiration = PUBSUB.DEFAULT.EXPIRATION
# ################################################################################################################################
# ################################################################################################################################
def make_short_msg_copy_from_dict(msg:'stranydict', data_prefix_len:'int', data_prefix_short_len:'int') -> 'stranydict':
out_msg = {} # type: stranydict
out_msg['msg_id'] = msg['pub_msg_id']
out_msg['in_reply_to'] = msg.get('in_reply_to')
out_msg['data'] = msg['data'][:data_prefix_len]
out_msg['data_prefix_short'] = out_msg['data'][:data_prefix_short_len]
out_msg['size'] = msg['size']
out_msg['pub_pattern_matched'] = msg['pub_pattern_matched']
out_msg['sub_pattern_matched'] = msg['sub_pattern_matched']
out_msg['pub_time'] = msg['pub_time']
out_msg['expiration'] = msg['expiration']
out_msg['expiration_time'] = msg['expiration_time']
out_msg['topic_id'] = msg['topic_id']
out_msg['topic_name'] = msg['topic_name']
out_msg['cluster_id'] = msg['cluster_id']
out_msg['published_by_id'] = msg['published_by_id']
out_msg['delivery_status'] = msg['delivery_status']
out_msg['server_name'] = msg['server_name']
out_msg['server_pid'] = msg['server_pid']
out_msg['has_gd'] = msg['has_gd']
out_msg['recv_time'] = msg['recv_time']
out_msg['sub_key'] = msg['sub_key']
return out_msg
# ################################################################################################################################
def make_short_msg_copy_from_msg(msg:'any_', data_prefix_len:'int', data_prefix_short_len:'int') -> 'stranydict':
out_msg = {} # type: stranydict
out_msg['msg_id'] = msg.pub_msg_id
out_msg['in_reply_to'] = msg.in_reply_to
out_msg['data'] = msg.data[:data_prefix_len]
out_msg['data_prefix_short'] = out_msg['data'][:data_prefix_short_len]
out_msg['size'] = msg.size
out_msg['pub_pattern_matched'] = msg.pub_pattern_matched
out_msg['sub_pattern_matched'] = msg.sub_pattern_matched
out_msg['pub_time'] = msg.pub_time
out_msg['expiration'] = msg.expiration
out_msg['expiration_time'] = msg.expiration_time
out_msg['topic_id'] = msg.topic_id
out_msg['topic_name'] = msg.topic_name
out_msg['cluster_id'] = msg.cluster_id
out_msg['published_by_id'] = msg.published_by_id
out_msg['delivery_status'] = msg.delivery_status
out_msg['server_name'] = msg.server_name
out_msg['server_pid'] = msg.server_pid
out_msg['has_gd'] = msg.has_gd
out_msg['recv_time'] = msg.recv_time
out_msg['sub_key'] = msg.sub_key
return out_msg
# ################################################################################################################################
def get_last_topics(topic_list:'dictlist', as_list:'bool'=True) -> 'dictorlist':
# Response to produce if as_list is not True.
out = {} # type: anydict
for item in topic_list:
for _ignored_topic_key, topic_data in item.items():
# Local alias
topic_id = topic_data['topic_id'] # type: int
# .. we may have visited this topic already ..
previous = out.get(topic_id, {}) # type: anydict
# .. if we have ..
if previous:
if topic_data['pub_time'] > previous['pub_time']:
out[topic_id] = topic_data
# .. otherwise, we can just set the current one ..
else:
out[topic_id] = topic_data
if as_list:
out = sorted(out.values(), key=itemgetter('pub_time'), reverse=True) # type: ignore
return out
else:
return out
# ################################################################################################################################
def get_last_pub_metadata(server:'ParallelServer', topic_id_list:'anylist | int') -> 'anydict':
# Make sure we have a list on input
if isinstance(topic_id_list, list):
input_topic_id = None
is_single_topic = False
else:
input_topic_id = int(topic_id_list)
is_single_topic = True
topic_id_list = [topic_id_list]
# Always use integers for topic IDs
topic_id_list = [int(elem) for elem in topic_id_list]
# Look up topic metadata in all the servers ..
response = server.rpc.invoke_all(
'zato.pubsub.topic.get-topic-metadata', {'topic_id_list':topic_id_list}, skip_response_elem=False)
# Produce our response
out = get_last_topics(response.data, as_list=False) # type: any_
if is_single_topic:
return out.get(input_topic_id) or {}
else:
return out
# ################################################################################################################################
def get_endpoint_metadata(server:'ParallelServer', endpoint_id:'int') -> 'dictorlist':
# All topics from all PIDs
topic_list = [] # type: dictlist
# Information about a single topic
topic_dict = {}
response = server.rpc.invoke_all('zato.pubsub.endpoint.get-endpoint-metadata', {'endpoint_id':endpoint_id})
for pid_response in response.data:
for pid_topic_list in pid_response.values():
for topic_data in pid_topic_list:
topic_id = topic_data['topic_id']
topic_dict[topic_id] = topic_data
topic_list.append(topic_dict)
return get_last_topics(topic_list, as_list=True)
# ################################################################################################################################
def get_topic_sub_keys_from_sub_keys(session:'SASession', cluster_id:'int', sub_key_list:'strlist') -> 'stranydict':
topic_sub_keys = {} # type: stranydict
for item in pubsub_endpoint_queue_list_by_sub_keys(session, cluster_id, sub_key_list):
topic_name = cast_('str', item.topic_name)
sub_keys = topic_sub_keys.setdefault(topic_name, []) # type: strlist
sub_keys.append(item.sub_key)
return topic_sub_keys
# ################################################################################################################################
def get_priority(
cid, # type: str
priority, # type: intnone
_pri_min=_pri_min, # type: int
_pri_max=_pri_max, # type: int
_pri_def=_pri_def # type: int
) -> 'int':
""" Get and validate message priority.
"""
if priority:
if priority < _pri_min or priority > _pri_max:
raise BadRequest(cid, 'Priority `{}` outside of allowed range {}-{}'.format(priority, _pri_min, _pri_max))
else:
priority = _pri_def
return priority
# ################################################################################################################################
def get_expiration(
cid:'str',
expiration:'intnone',
topic_limit_message_expiry:'int',
default_expiration:'int'=_default_expiration
) -> 'int':
""" Get and validate message expiration.
"""
expiration = expiration or 0
if expiration is not None and expiration < 0:
raise BadRequest(cid, 'Expiration `{}` must not be negative'.format(expiration))
# If there is no expiration set, try the default one ..
expiration = expiration or default_expiration
# .. however, we can never exceed the limit set by the topic object,
# .. so we need to take that into account as well.
expiration = min(expiration, topic_limit_message_expiry)
# We can return the final value now
return expiration
# ################################################################################################################################
def is_service_subscription(config:'any_') -> 'bool':
return config.sub_key.startswith(PUBSUB_MSG_PREFIX.SERVICE_SK)
# ################################################################################################################################
| 9,373
|
Python
|
.py
| 180
| 46.483333
| 130
| 0.515264
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|