id
int64 0
458k
| file_name
stringlengths 4
119
| file_path
stringlengths 14
227
| content
stringlengths 24
9.96M
| size
int64 24
9.96M
| language
stringclasses 1
value | extension
stringclasses 14
values | total_lines
int64 1
219k
| avg_line_length
float64 2.52
4.63M
| max_line_length
int64 5
9.91M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 7
101
| repo_stars
int64 100
139k
| repo_forks
int64 0
26.4k
| repo_open_issues
int64 0
2.27k
| repo_license
stringclasses 12
values | repo_extraction_date
stringclasses 433
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
10,200
|
tornadoclient.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/client/tornadoclient.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
import ssl
from tornado import iostream, escape
from zato.server.ext.ws4py.client import WebSocketBaseClient
from zato.server.ext.ws4py.exc import HandshakeError
__all__ = ['TornadoWebSocketClient']
class TornadoWebSocketClient(WebSocketBaseClient):
def __init__(self, url, protocols=None, extensions=None,
io_loop=None, ssl_options=None, headers=None):
"""
.. code-block:: python
from tornado import ioloop
class MyClient(TornadoWebSocketClient):
def opened(self):
for i in range(0, 200, 25):
self.send("*" * i)
def received_message(self, m):
print((m, len(str(m))))
def closed(self, code, reason=None):
ioloop.IOLoop.instance().stop()
ws = MyClient('ws://localhost:9000/echo', protocols=['http-only', 'chat'])
ws.connect()
ioloop.IOLoop.instance().start()
"""
WebSocketBaseClient.__init__(self, url, protocols, extensions,
ssl_options=ssl_options, headers=headers)
if self.scheme == "wss":
context = ssl.create_default_context()
self.sock = context.wrap_socket(self.sock, do_handshake_on_connect=False, **self.ssl_options)
self._is_secure = True
self.io = iostream.SSLIOStream(self.sock, io_loop, ssl_options=self.ssl_options)
else:
self.io = iostream.IOStream(self.sock, io_loop)
self.io_loop = io_loop
def connect(self):
"""
Connects the websocket and initiate the upgrade handshake.
"""
self.io.set_close_callback(self.__connection_refused)
self.io.connect((self.host, int(self.port)), self.__send_handshake)
def _write(self, b):
"""
Trying to prevent a write operation
on an already closed websocket stream.
This cannot be bullet proof but hopefully
will catch almost all use cases.
"""
if self.terminated:
raise RuntimeError("Cannot send on a terminated websocket")
self.io.write(b)
def __connection_refused(self, *args, **kwargs):
self.server_terminated = True
self.closed(1005, 'Connection refused')
def __send_handshake(self):
self.io.set_close_callback(self.__connection_closed)
self.io.write(escape.utf8(self.handshake_request),
self.__handshake_sent)
def __connection_closed(self, *args, **kwargs):
self.server_terminated = True
self.closed(1006, 'Connection closed during handshake')
def __handshake_sent(self):
self.io.read_until(b"\r\n\r\n", self.__handshake_completed)
def __handshake_completed(self, data):
self.io.set_close_callback(None)
try:
response_line, _, headers = data.partition(b'\r\n')
self.process_response_line(response_line)
protocols, extensions = self.process_handshake_header(headers)
except HandshakeError:
self.close_connection()
raise
self.opened()
self.io.set_close_callback(self.__stream_closed)
self.io.read_bytes(self.reading_buffer_size, self.__fetch_more)
def __fetch_more(self, bytes):
try:
should_continue = self.process(bytes)
except:
should_continue = False
if should_continue:
self.io.read_bytes(self.reading_buffer_size, self.__fetch_more)
else:
self.__gracefully_terminate()
def __gracefully_terminate(self):
self.client_terminated = self.server_terminated = True
try:
if not self.stream.closing:
self.closed(1006)
finally:
self.close_connection()
def __stream_closed(self, *args, **kwargs):
self.io.set_close_callback(None)
code = 1006
reason = None
if self.stream.closing:
code, reason = self.stream.closing.code, self.stream.closing.reason
self.closed(code, reason)
self.stream._cleanup()
def close_connection(self):
"""
Close the underlying connection
"""
self.io.close()
if __name__ == '__main__':
from tornado import ioloop
class MyClient(TornadoWebSocketClient):
def opened(self):
def data_provider():
for i in range(0, 200, 25):
yield "#" * i
self.send(data_provider())
for i in range(0, 200, 25):
self.send("*" * i)
def received_message(self, m):
print("#%d" % len(m))
if len(m) == 175:
self.close()
def closed(self, code, reason=None):
ioloop.IOLoop.instance().stop()
print(("Closed down", code, reason))
ws = MyClient('ws://localhost:9000/ws', protocols=['http-only', 'chat'])
ws.connect()
ioloop.IOLoop.instance().start()
| 5,102
|
Python
|
.py
| 124
| 30.645161
| 105
| 0.588592
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,201
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/client/__init__.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
import os
import socket
import ssl
from base64 import b64encode
from hashlib import sha1
from logging import getLogger
from zato.common.api import NotGiven
from zato.server.ext.ws4py import WS_KEY, WS_VERSION
from zato.server.ext.ws4py.exc import HandshakeError
from zato.server.ext.ws4py.websocket import WebSocket
from zato.server.ext.ws4py.compat import urlsplit
__all__ = ['WebSocketBaseClient']
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class WebSocketBaseClient(WebSocket):
def __init__(self, server, url, protocols=None, extensions=None,
heartbeat_freq=None, ssl_options=None, headers=None,
socket_read_timeout=None,
socket_write_timeout=None):
"""
A websocket client that implements :rfc:`6455` and provides a simple
interface to communicate with a websocket server.
This class works on its own but will block if not run in
its own thread.
When an instance of this class is created, a :py:mod:`socket`
is created. If the connection is a TCP socket,
the nagle's algorithm is disabled.
The address of the server will be extracted from the given
websocket url.
The websocket key is randomly generated, reset the
`key` attribute if you want to provide yours.
For instance to create a TCP client:
.. code-block:: python
>>> from websocket.client import WebSocketBaseClient
>>> ws = WebSocketBaseClient('ws://localhost/ws')
Here is an example for a TCP client over SSL:
.. code-block:: python
>>> from websocket.client import WebSocketBaseClient
>>> ws = WebSocketBaseClient('wss://localhost/ws')
Finally an example of a Unix-domain connection:
.. code-block:: python
>>> from websocket.client import WebSocketBaseClient
>>> ws = WebSocketBaseClient('ws+unix:///tmp/my.sock')
Note that in this case, the initial Upgrade request
will be sent to ``/``. You may need to change this
by setting the resource explicitely before connecting:
.. code-block:: python
>>> from websocket.client import WebSocketBaseClient
>>> ws = WebSocketBaseClient('ws+unix:///tmp/my.sock')
>>> ws.resource = '/ws'
>>> ws.connect()
You may provide extra headers by passing a list of tuples
which must be unicode objects.
"""
self.url = url
self.host = None
self.scheme = None
self.port = None
self.unix_socket_path = None
self.resource = None
self.ssl_options = ssl_options or {}
self.extra_headers = headers or []
self._parse_url()
sock = self.create_socket()
WebSocket.__init__(self, server, sock, protocols=protocols,
extensions=extensions,
heartbeat_freq=heartbeat_freq,
socket_read_timeout=socket_read_timeout,
socket_write_timeout=socket_write_timeout)
self.stream.always_mask = True
self.stream.expect_masking = False
self.key = b64encode(os.urandom(16))
def create_socket(self):
if self.unix_socket_path:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM, 0)
else:
# Let's handle IPv4 and IPv6 addresses
# Simplified from CherryPy's code
try:
family, socktype, proto, canonname, sa = socket.getaddrinfo(self.host, self.port,
socket.AF_UNSPEC,
socket.SOCK_STREAM,
0, socket.AI_PASSIVE)[0]
except socket.gaierror:
family = socket.AF_INET
if self.host.startswith('::'):
family = socket.AF_INET6
socktype = socket.SOCK_STREAM
proto = 0
canonname = ""
sa = (self.host, self.port, 0, 0)
sock = socket.socket(family, socktype, proto)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(socket, 'AF_INET6') and family == socket.AF_INET6 and \
self.host.startswith('::'):
try:
sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
except (AttributeError, socket.error):
pass
return sock
def rebuild_socket(self):
self.close_connection()
socket = self.create_socket()
self.sock = socket
# Adpated from: https://github.com/liris/websocket-client/blob/master/websocket.py#L105
def _parse_url(self):
"""
Parses a URL which must have one of the following forms:
- ws://host[:port][path]
- wss://host[:port][path]
- ws+unix:///path/to/my.socket
In the first two cases, the ``host`` and ``port``
attributes will be set to the parsed values. If no port
is explicitely provided, it will be either 80 or 443
based on the scheme. Also, the ``resource`` attribute is
set to the path segment of the URL (alongside any querystring).
In addition, if the scheme is ``ws+unix``, the
``unix_socket_path`` attribute is set to the path to
the Unix socket while the ``resource`` attribute is
set to ``/``.
"""
# Python 2.6.1 and below don't parse ws or wss urls properly. netloc is empty.
# See: https://github.com/Lawouach/WebSocket-for-Python/issues/59
scheme, url = self.url.split(":", 1)
parsed = urlsplit(url, scheme="http")
if parsed.hostname:
self.host = parsed.hostname
elif '+unix' in scheme:
self.host = 'localhost'
else:
raise ValueError("Invalid hostname from: %s", self.url)
# We have our host so we can set the TLS options accordingly
if not 'server_hostname' in self.ssl_options:
self.ssl_options['server_hostname'] = self.host
if parsed.port:
self.port = parsed.port
if scheme == "ws":
if not self.port:
self.port = 80
elif scheme == "wss":
if not self.port:
self.port = 443
elif scheme in ('ws+unix', 'wss+unix'):
pass
else:
raise ValueError("Invalid scheme: %s" % scheme)
if parsed.path:
resource = parsed.path
else:
resource = "/"
if '+unix' in scheme:
self.unix_socket_path = resource
resource = '/'
if parsed.query:
resource += "?" + parsed.query
self.scheme = scheme
self.resource = resource
@property
def bind_addr(self):
"""
Returns the Unix socket path if or a tuple
``(host, port)`` depending on the initial
URL's scheme.
"""
return self.unix_socket_path or (self.host, self.port)
def close(self, code=1000, reason=''):
"""
Initiate the closing handshake with the server.
"""
if not self.client_terminated:
self.client_terminated = True
try:
self._write(self.stream.close(code=code, reason=reason).single(mask=True))
except Exception as e:
logger.info('Caught a WSX exception when closing connection to `%s` -> `%s`', self.address_masked, e)
def connect(self, close_on_handshake_error=True):
"""
Connects this websocket and starts the upgrade handshake
with the remote endpoint.
"""
if self.scheme == "wss":
# default port is now 443; upgrade self.sender to send ssl
# This may be specified if needed ..
check_hostname = os.environ.get('Zato_WSX_TLS_Check_Hostname', NotGiven)
# .. if there is no such environment variable, assume the host name needs to be checked.
if check_hostname is NotGiven:
check_hostname = True
else:
check_hostname = False
context = ssl.create_default_context()
context.check_hostname = check_hostname
self.sock = context.wrap_socket(self.sock, **self.ssl_options)
self._is_secure = True
self.sock.connect(self.bind_addr)
self._write(self.handshake_request)
response = b''
doubleCLRF = b'\r\n\r\n'
while True:
bytes = self.sock.recv(128)
if not bytes:
break
response += bytes
if doubleCLRF in response:
break
if not response:
self.close_connection()
raise HandshakeError("Invalid response")
headers, _, body = response.partition(doubleCLRF)
response_line, _, headers = headers.partition(b'\r\n')
try:
self.process_response_line(response_line)
self.protocols, self.extensions = self.process_handshake_header(headers)
except HandshakeError:
# This will be set to True for backward-compatibility with ws4py
# from before it was added to ext.
if close_on_handshake_error:
self.close_connection()
raise
self.handshake_ok()
if body:
self.process(body)
@property
def handshake_headers(self):
"""
List of headers appropriate for the upgrade
handshake.
"""
headers = [
('Host', '%s:%s' % (self.host, self.port)),
('Connection', 'Upgrade'),
('Upgrade', 'websocket'),
('Sec-WebSocket-Key', self.key.decode('utf-8')),
('Sec-WebSocket-Version', str(max(WS_VERSION)))
]
if self.protocols:
headers.append(('Sec-WebSocket-Protocol', ','.join(self.protocols)))
if self.extra_headers:
headers.extend(self.extra_headers)
if not any(x for x in headers if x[0].lower() == 'origin'):
scheme, url = self.url.split(":", 1)
parsed = urlsplit(url, scheme="http")
if parsed.hostname:
self.host = parsed.hostname
else:
self.host = 'localhost'
origin = scheme + '://' + parsed.hostname
if parsed.port:
origin = origin + ':' + str(parsed.port)
headers.append(('Origin', origin))
return headers
@property
def handshake_request(self):
"""
Prepare the request to be sent for the upgrade handshake.
"""
headers = self.handshake_headers
request = [("GET %s HTTP/1.1" % self.resource).encode('utf-8')]
for header, value in headers:
request.append(("%s: %s" % (header, value)).encode('utf-8'))
request.append(b'\r\n')
return b'\r\n'.join(request)
def process_response_line(self, response_line):
"""
Ensure that we received a HTTP `101` status code in
response to our request and if not raises :exc:`HandshakeError`.
"""
protocol, code, status = response_line.split(b' ', 2)
if code != b'101':
raise HandshakeError("Invalid response status: %s %s" % (code, status))
def process_handshake_header(self, headers):
"""
Read the upgrade handshake's response headers and
validate them against :rfc:`6455`.
"""
protocols = []
extensions = []
headers = headers.strip()
for header_line in headers.split(b'\r\n'):
header, value = header_line.split(b':', 1)
header = header.strip().lower()
value = value.strip().lower()
if header == b'upgrade' and value != b'websocket':
raise HandshakeError("Invalid Upgrade header: %s" % value)
elif header == b'connection' and value != b'upgrade':
raise HandshakeError("Invalid Connection header: %s" % value)
elif header == b'sec-websocket-accept':
match = b64encode(sha1(self.key + WS_KEY).digest())
if value != match.lower():
raise HandshakeError("Invalid challenge response: %s" % value)
elif header == b'sec-websocket-protocol':
protocols = ','.join(value)
elif header == b'sec-websocket-extensions':
extensions = ','.join(value)
return protocols, extensions
def handshake_ok(self):
self.opened()
| 13,421
|
Python
|
.py
| 302
| 33.215232
| 130
| 0.555104
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,202
|
geventclient.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/client/geventclient.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
import copy
import gevent
from gevent import Greenlet
from gevent.queue import Queue
from zato.server.ext.ws4py.client import WebSocketBaseClient
__all__ = ['WebSocketClient']
class WebSocketClient(WebSocketBaseClient):
def __init__(self, server, url, protocols=None, extensions=None, ssl_options=None, headers=None,
socket_read_timeout=None, socket_write_timeout=None):
"""
WebSocket client that executes the
:meth:`run() <ws4py.websocket.WebSocket.run>` into a gevent greenlet.
.. code-block:: python
ws = WebSocketClient('ws://localhost:9000/echo', protocols=['http-only', 'chat'])
ws.connect()
ws.send("Hello world")
def incoming():
while True:
m = ws.receive()
if m is not None:
print str(m)
else:
break
def outgoing():
for i in range(0, 40, 5):
ws.send("*" * i)
greenlets = [
gevent.spawn(incoming),
gevent.spawn(outgoing),
]
gevent.joinall(greenlets)
"""
WebSocketBaseClient.__init__(self, server, url, protocols, extensions,
ssl_options=ssl_options, headers=headers,
socket_read_timeout=socket_read_timeout,
socket_write_timeout=socket_write_timeout)
self._th = Greenlet(self.run)
self.messages = Queue()
"""
Queue that will hold received messages.
"""
def handshake_ok(self):
"""
Called when the upgrade handshake has completed
successfully.
Starts the client's thread.
"""
self._th.start()
def received_message(self, message):
"""
Override the base class to store the incoming message
in the `messages` queue.
"""
self.messages.put(copy.deepcopy(message))
def closed(self, code, reason=None):
"""
Puts a :exc:`StopIteration` as a message into the
`messages` queue.
"""
# When the connection is closed, put a StopIteration
# on the message queue to signal there's nothing left
# to wait for
self.messages.put(StopIteration)
def receive(self):
"""
Returns messages that were stored into the
`messages` queue and returns `None` when the
websocket is terminated or closed.
"""
# If the websocket was terminated and there are no messages
# left in the queue, return None immediately otherwise the client
# will block forever
if self.terminated and self.messages.empty():
return None
message = self.messages.get()
if message is StopIteration:
return None
return message
| 2,880
|
Python
|
.py
| 80
| 26.6625
| 100
| 0.597197
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,203
|
threadedclient.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/client/threadedclient.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
import threading
from zato.server.ext.ws4py.client import WebSocketBaseClient
__all__ = ['WebSocketClient']
class WebSocketClient(WebSocketBaseClient):
def __init__(self, server, url, protocols=None, extensions=None, heartbeat_freq=None,
ssl_options=None, headers=None,
socket_read_timeout=None,
socket_write_timeout=None):
"""
.. code-block:: python
from zato.server.ext.ws4py.client.threadedclient import WebSocketClient
class EchoClient(WebSocketClient):
def opened(self):
for i in range(0, 200, 25):
self.send("*" * i)
def closed(self, code, reason):
print(("Closed down", code, reason))
def received_message(self, m):
print("=> %d %s" % (len(m), str(m)))
try:
ws = EchoClient('ws://localhost:9000/echo', protocols=['http-only', 'chat'])
ws.connect()
except KeyboardInterrupt:
ws.close()
"""
WebSocketBaseClient.__init__(self, server, url, protocols, extensions, heartbeat_freq,
ssl_options, headers=headers,
socket_read_timeout=socket_read_timeout,
socket_write_timeout=socket_write_timeout)
self._th = threading.Thread(target=self.run, name='WebSocketClient')
self._th.daemon = True
@property
def daemon(self):
"""
`True` if the client's thread is set to be a daemon thread.
"""
return self._th.daemon
@daemon.setter
def daemon(self, flag):
"""
Set to `True` if the client's thread should be a daemon.
"""
self._th.daemon = flag
def run_forever(self):
"""
Simply blocks the thread until the
websocket has terminated.
"""
while not self.terminated:
self._th.join(timeout=0.1)
def handshake_ok(self):
"""
Called when the upgrade handshake has completed
successfully.
Starts the client's thread.
"""
self._th.start()
if __name__ == '__main__':
from zato.server.ext.ws4py.client.threadedclient import WebSocketClient
class EchoClient(WebSocketClient):
def opened(self):
def data_provider():
for i in range(0, 200, 25):
yield "#" * i
self.send(data_provider())
for i in range(0, 200, 25):
self.send("*" * i)
def closed(self, code, reason):
print(("Closed down", code, reason))
def received_message(self, m):
print("#%d" % len(m))
if len(m) == 175:
self.close(reason='bye bye')
try:
ws = EchoClient('ws://localhost:9000/ws', protocols=['http-only', 'chat'],
headers=[('X-Test', 'hello there')])
ws.connect()
ws.run_forever()
except KeyboardInterrupt:
ws.close()
| 3,066
|
Python
|
.py
| 82
| 27.060976
| 94
| 0.558556
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,204
|
wsgiutils.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/server/wsgiutils.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
__doc__ = """
This module provides a WSGI application suitable
for a WSGI server such as gevent or wsgiref for instance.
:pep:`333` couldn't foresee a protocol such as
WebSockets but luckily the way the initial
protocol upgrade was designed means that we can
fit the handshake in a WSGI flow.
The handshake validates the request against
some internal or user-provided values and
fails the request if the validation doesn't
complete.
On success, the provided WebSocket subclass
is instanciated and stored into the
`'ws4py.websocket'` environ key so that
the WSGI server can handle it.
The WSGI application returns an empty iterable
since there is little value to return some
content within the response to the handshake.
A server wishing to support WebSocket via ws4py
should:
- Provide the real socket object to ws4py through the
`'ws4py.socket'` environ key. We can't use `'wsgi.input'`
as it may be wrapper to the socket we wouldn't know
how to extract the socket from.
- Look for the `'ws4py.websocket'` key in the environ
when the application has returned and probably attach
it to a :class:`ws4py.manager.WebSocketManager` instance
so that the websocket runs its life.
- Remove the `'ws4py.websocket'` and `'ws4py.socket'`
environ keys once the application has returned.
No need for these keys to persist.
- Not close the underlying socket otherwise, well,
your websocket will also shutdown.
.. warning::
The WSGI application sets the `'Upgrade'` header response
as specified by :rfc:`6455`. This is not tolerated by
:pep:`333` since it's a hop-by-hop header.
We expect most servers won't mind.
"""
import base64
from hashlib import sha1
import logging
import sys
from zato.server.ext.ws4py.websocket import WebSocket
from zato.server.ext.ws4py.exc import HandshakeError
from zato.server.ext.ws4py.compat import unicode, py3k
from zato.server.ext.ws4py import WS_VERSION, WS_KEY, format_addresses
logger = logging.getLogger('zato_web_socket')
__all__ = ['WebSocketWSGIApplication']
class WebSocketWSGIApplication(object):
def __init__(self, protocols=None, extensions=None, handler_cls=WebSocket):
"""
WSGI application usable to complete the upgrade handshake
by validating the requested protocols and extensions as
well as the websocket version.
If the upgrade validates, the `handler_cls` class
is instanciated and stored inside the WSGI `environ`
under the `'ws4py.websocket'` key to make it
available to the WSGI handler.
"""
self.protocols = protocols
self.extensions = extensions
self.handler_cls = handler_cls
def make_websocket(self, sock, protocols, extensions, environ):
"""
Initialize the `handler_cls` instance with the given
negociated sets of protocols and extensions as well as
the `environ` and `sock`.
Stores then the instance in the `environ` dict
under the `'ws4py.websocket'` key.
"""
websocket = self.handler_cls(sock, protocols, extensions,
environ.copy())
environ['ws4py.websocket'] = websocket
return websocket
def __call__(self, environ, start_response):
if environ.get('REQUEST_METHOD') != 'GET':
raise HandshakeError('HTTP method must be a GET')
for key, expected_value in [('HTTP_UPGRADE', 'websocket'),
('HTTP_CONNECTION', 'upgrade')]:
actual_value = environ.get(key, '').lower()
if not actual_value:
raise HandshakeError('Header %s is not defined' % key)
if expected_value not in actual_value:
raise HandshakeError('Illegal value for header %s: %s' %
(key, actual_value))
key = environ.get('HTTP_SEC_WEBSOCKET_KEY')
if key:
ws_key = base64.b64decode(key.encode('utf-8'))
if len(ws_key) != 16:
raise HandshakeError("WebSocket key's length is invalid")
version = environ.get('HTTP_SEC_WEBSOCKET_VERSION')
supported_versions = b', '.join([unicode(v).encode('utf-8') for v in WS_VERSION])
version_is_valid = False
if version:
try: version = int(version)
except: pass
else: version_is_valid = version in WS_VERSION
if not version_is_valid:
environ['websocket.version'] = unicode(version).encode('utf-8')
raise HandshakeError('Unhandled or missing WebSocket version')
ws_protocols = []
protocols = self.protocols or []
subprotocols = environ.get('HTTP_SEC_WEBSOCKET_PROTOCOL')
if subprotocols:
for s in subprotocols.split(','):
s = s.strip()
if s in protocols:
ws_protocols.append(s)
ws_extensions = []
exts = self.extensions or []
extensions = environ.get('HTTP_SEC_WEBSOCKET_EXTENSIONS')
if extensions:
for ext in extensions.split(','):
ext = ext.strip()
if ext in exts:
ws_extensions.append(ext)
accept_value = base64.b64encode(sha1(key.encode('utf-8') + WS_KEY).digest())
if py3k: accept_value = accept_value.decode('utf-8')
upgrade_headers = [
('Upgrade', 'websocket'),
('Connection', 'Upgrade'),
('Sec-WebSocket-Version', '%s' % version),
('Sec-WebSocket-Accept', accept_value),
]
if ws_protocols:
upgrade_headers.append(('Sec-WebSocket-Protocol', ', '.join(ws_protocols)))
if ws_extensions:
upgrade_headers.append(('Sec-WebSocket-Extensions', ','.join(ws_extensions)))
start_response("101 Switching Protocols", upgrade_headers)
self.make_websocket(environ['ws4py.socket'],
ws_protocols,
ws_extensions,
environ)
return []
| 6,123
|
Python
|
.py
| 137
| 36.19708
| 89
| 0.648767
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,205
|
wsgirefserver.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/server/wsgirefserver.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
__doc__ = """
Add WebSocket support to the built-in WSGI server
provided by the :py:mod:`wsgiref`. This is clearly not
meant to be a production server so please consider this
only for testing purpose.
Mostly, this module overrides bits and pieces of
the built-in classes so that it supports the WebSocket
workflow.
.. code-block:: python
from wsgiref.simple_server import make_server
from zato.server.ext.ws4py.websocket import EchoWebSocket
from zato.server.ext.ws4py.server.wsgirefserver import WSGIServer, WebSocketWSGIRequestHandler
from zato.server.ext.ws4py.server.wsgiutils import WebSocketWSGIApplication
server = make_server('', 9000, server_class=WSGIServer,
handler_class=WebSocketWSGIRequestHandler,
app=WebSocketWSGIApplication(handler_cls=EchoWebSocket))
server.initialize_websockets_manager()
server.serve_forever()
.. note::
For some reason this server may fail against autobahntestsuite.
"""
import logging
import sys
from wsgiref.handlers import SimpleHandler
from wsgiref.simple_server import WSGIRequestHandler, WSGIServer as _WSGIServer
from wsgiref import util
util._hoppish = {}.__contains__
from zato.server.ext.ws4py.manager import WebSocketManager
from zato.server.ext.ws4py import format_addresses
from zato.server.ext.ws4py.server.wsgiutils import WebSocketWSGIApplication
from zato.server.ext.ws4py.compat import get_connection
__all__ = ['WebSocketWSGIHandler', 'WebSocketWSGIRequestHandler',
'WSGIServer']
logger = logging.getLogger('zato_web_socket')
class WebSocketWSGIHandler(SimpleHandler):
def setup_environ(self):
"""
Setup the environ dictionary and add the
`'ws4py.socket'` key. Its associated value
is the real socket underlying socket.
"""
SimpleHandler.setup_environ(self)
self.environ['ws4py.socket'] = get_connection(self.environ['wsgi.input'])
self.http_version = self.environ['SERVER_PROTOCOL'].rsplit('/')[-1]
def finish_response(self):
"""
Completes the response and performs the following tasks:
- Remove the `'ws4py.socket'` and `'ws4py.websocket'`
environ keys.
- Attach the returned websocket, if any, to the WSGI server
using its ``link_websocket_to_server`` method.
"""
ws = None
if self.environ:
self.environ.pop('ws4py.socket', None)
ws = self.environ.pop('ws4py.websocket', None)
try:
SimpleHandler.finish_response(self)
except:
if ws:
ws.close(1011, reason='Something broke')
raise
else:
if ws:
self.request_handler.server.link_websocket_to_server(ws)
class WebSocketWSGIRequestHandler(WSGIRequestHandler):
def handle(self):
"""
Unfortunately the base class forces us
to override the whole method to actually provide our wsgi handler.
"""
self.raw_requestline = self.rfile.readline()
if not self.parse_request(): # An error code has been sent, just exit
return
# next line is where we'd have expect a configuration key somehow
handler = WebSocketWSGIHandler(
self.rfile, self.wfile, self.get_stderr(), self.get_environ()
)
handler.request_handler = self # backpointer for logging
handler.run(self.server.get_app())
class WSGIServer(_WSGIServer):
def initialize_websockets_manager(self):
"""
Call thos to start the underlying websockets
manager. Make sure to call it once your server
is created.
"""
self.manager = WebSocketManager()
self.manager.start()
def shutdown_request(self, request):
"""
The base class would close our socket
if we didn't override it.
"""
pass
def link_websocket_to_server(self, ws):
"""
Call this from your WSGI handler when a websocket
has been created.
"""
self.manager.add(ws)
def server_close(self):
"""
Properly initiate closing handshakes on
all websockets when the WSGI server terminates.
"""
if hasattr(self, 'manager'):
self.manager.close_all()
self.manager.stop()
self.manager.join()
delattr(self, 'manager')
_WSGIServer.server_close(self)
if __name__ == '__main__':
from zato.server.ext.ws4py import configure_logger
configure_logger()
from wsgiref.simple_server import make_server
from zato.server.ext.ws4py.websocket import EchoWebSocket
server = make_server('', 9000, server_class=WSGIServer,
handler_class=WebSocketWSGIRequestHandler,
app=WebSocketWSGIApplication(handler_cls=EchoWebSocket))
server.initialize_websockets_manager()
try:
server.serve_forever()
except KeyboardInterrupt:
server.server_close()
| 5,102
|
Python
|
.py
| 127
| 32.456693
| 98
| 0.671648
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,206
|
cherrypyserver.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/server/cherrypyserver.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
__doc__ = """
WebSocket within CherryPy is a tricky bit since CherryPy is
a threaded server which would choke quickly if each thread
of the server were kept attached to a long living connection
that WebSocket expects.
In order to work around this constraint, we take some advantage
of some internals of CherryPy as well as the introspection
Python provides.
Basically, when the WebSocket handshake is complete, we take over
the socket and let CherryPy take back the thread that was
associated with the upgrade request.
These operations require a bit of work at various levels of
the CherryPy framework but this module takes care of them
and from your application's perspective, this is abstracted.
Here are the various utilities provided by this module:
* WebSocketTool: The tool is in charge to perform the
HTTP upgrade and detach the socket from
CherryPy. It runs at various hook points of the
request's processing. Enable that tool at
any path you wish to handle as a WebSocket
handler.
* WebSocketPlugin: The plugin tracks the instanciated web socket handlers.
It also cleans out websocket handler which connection
have been closed down. The websocket connection then
runs in its own thread that this plugin manages.
Simple usage example:
.. code-block:: python
:linenos:
import cherrypy
from zato.server.ext.ws4py.server.cherrypyserver import WebSocketPlugin, WebSocketTool
from zato.server.ext.ws4py.websocket import EchoWebSocket
cherrypy.config.update({'server.socket_port': 9000})
WebSocketPlugin(cherrypy.engine).subscribe()
cherrypy.tools.websocket = WebSocketTool()
class Root(object):
@cherrypy.expose
def index(self):
return 'some HTML with a websocket javascript connection'
@cherrypy.expose
def ws(self):
pass
cherrypy.quickstart(Root(), '/', config={'/ws': {'tools.websocket.on': True,
'tools.websocket.handler_cls': EchoWebSocket}})
Note that you can set the handler class on per-path basis,
meaning you could also dynamically change the class based
on other envrionmental settings (is the user authenticated for ex).
"""
import base64
from hashlib import sha1
import inspect
import threading
import cherrypy
from cherrypy import Tool
from cherrypy.process import plugins
from cherrypy.wsgiserver import HTTPConnection, HTTPRequest, KnownLengthRFile
from zato.server.ext.ws4py import WS_KEY, WS_VERSION
from zato.server.ext.ws4py.exc import HandshakeError
from zato.server.ext.ws4py.websocket import WebSocket
from zato.server.ext.ws4py.compat import py3k, get_connection, detach_connection
from zato.server.ext.ws4py.manager import WebSocketManager
__all__ = ['WebSocketTool', 'WebSocketPlugin']
class WebSocketTool(Tool):
def __init__(self):
Tool.__init__(self, 'before_request_body', self.upgrade)
def _setup(self):
conf = self._merged_args()
hooks = cherrypy.serving.request.hooks
p = conf.pop("priority", getattr(self.callable, "priority",
self._priority))
hooks.attach(self._point, self.callable, priority=p, **conf)
hooks.attach('before_finalize', self.complete,
priority=p)
hooks.attach('on_end_resource', self.cleanup_headers,
priority=70)
hooks.attach('on_end_request', self.start_handler,
priority=70)
def upgrade(self, protocols=None, extensions=None, version=WS_VERSION,
handler_cls=WebSocket, heartbeat_freq=None):
"""
Performs the upgrade of the connection to the WebSocket
protocol.
The provided protocols may be a list of WebSocket
protocols supported by the instance of the tool.
When no list is provided and no protocol is either
during the upgrade, then the protocol parameter is
not taken into account. On the other hand,
if the protocol from the handshake isn't part
of the provided list, the upgrade fails immediatly.
"""
request = cherrypy.serving.request
request.process_request_body = False
ws_protocols = None
ws_location = None
ws_version = version
ws_key = None
ws_extensions = []
if request.method != 'GET':
raise HandshakeError('HTTP method must be a GET')
for key, expected_value in [('Upgrade', 'websocket'),
('Connection', 'upgrade')]:
actual_value = request.headers.get(key, '').lower()
if not actual_value:
raise HandshakeError('Header %s is not defined' % key)
if expected_value not in actual_value:
raise HandshakeError('Illegal value for header %s: %s' %
(key, actual_value))
version = request.headers.get('Sec-WebSocket-Version')
supported_versions = ', '.join([str(v) for v in ws_version])
version_is_valid = False
if version:
try: version = int(version)
except: pass
else: version_is_valid = version in ws_version
if not version_is_valid:
cherrypy.response.headers['Sec-WebSocket-Version'] = supported_versions
raise HandshakeError('Unhandled or missing WebSocket version')
key = request.headers.get('Sec-WebSocket-Key')
if key:
ws_key = base64.b64decode(key.encode('utf-8'))
if len(ws_key) != 16:
raise HandshakeError("WebSocket key's length is invalid")
protocols = protocols or []
subprotocols = request.headers.get('Sec-WebSocket-Protocol')
if subprotocols:
ws_protocols = []
for s in subprotocols.split(','):
s = s.strip()
if s in protocols:
ws_protocols.append(s)
exts = extensions or []
extensions = request.headers.get('Sec-WebSocket-Extensions')
if extensions:
for ext in extensions.split(','):
ext = ext.strip()
if ext in exts:
ws_extensions.append(ext)
location = []
include_port = False
if request.scheme == "https":
location.append("wss://")
include_port = request.local.port != 443
else:
location.append("ws://")
include_port = request.local.port != 80
location.append('localhost')
if include_port:
location.append(":%d" % request.local.port)
location.append(request.path_info)
if request.query_string != "":
location.append("?%s" % request.query_string)
ws_location = ''.join(location)
response = cherrypy.serving.response
response.stream = True
response.status = '101 Switching Protocols'
response.headers['Content-Type'] = 'text/plain'
response.headers['Upgrade'] = 'websocket'
response.headers['Connection'] = 'Upgrade'
response.headers['Sec-WebSocket-Version'] = str(version)
response.headers['Sec-WebSocket-Accept'] = base64.b64encode(sha1(key.encode('utf-8') + WS_KEY).digest())
if ws_protocols:
response.headers['Sec-WebSocket-Protocol'] = ', '.join(ws_protocols)
if ws_extensions:
response.headers['Sec-WebSocket-Extensions'] = ','.join(ws_extensions)
addr = (request.remote.ip, request.remote.port)
rfile = request.rfile.rfile
if isinstance(rfile, KnownLengthRFile):
rfile = rfile.rfile
ws_conn = get_connection(rfile)
request.ws_handler = handler_cls(ws_conn, ws_protocols, ws_extensions,
request.wsgi_environ.copy(),
heartbeat_freq=heartbeat_freq)
def complete(self):
"""
Sets some internal flags of CherryPy so that it
doesn't close the socket down.
"""
self._set_internal_flags()
def cleanup_headers(self):
"""
Some clients aren't that smart when it comes to
headers lookup.
"""
response = cherrypy.response
if not response.header_list:
return
headers = response.header_list[:]
for (k, v) in headers:
if k[:7] == 'Sec-Web':
response.header_list.remove((k, v))
response.header_list.append((k.replace('Sec-Websocket', 'Sec-WebSocket'), v))
def start_handler(self):
"""
Runs at the end of the request processing by calling
the opened method of the handler.
"""
request = cherrypy.request
if not hasattr(request, 'ws_handler'):
return
addr = (request.remote.ip, request.remote.port)
ws_handler = request.ws_handler
request.ws_handler = None
delattr(request, 'ws_handler')
# By doing this we detach the socket from
# the CherryPy stack avoiding memory leaks
detach_connection(request.rfile.rfile)
cherrypy.engine.publish('handle-websocket', ws_handler, addr)
def _set_internal_flags(self):
"""
CherryPy has two internal flags that we are interested in
to enable WebSocket within the server. They can't be set via
a public API and considering I'd want to make this extension
as compatible as possible whilst refraining in exposing more
than should be within CherryPy, I prefer performing a bit
of introspection to set those flags. Even by Python standards
such introspection isn't the cleanest but it works well
enough in this case.
This also means that we do that only on WebSocket
connections rather than globally and therefore we do not
harm the rest of the HTTP server.
"""
current = inspect.currentframe()
while True:
if not current:
break
_locals = current.f_locals
if 'self' in _locals:
if type(_locals['self']) == HTTPRequest:
_locals['self'].close_connection = True
if type(_locals['self']) == HTTPConnection:
_locals['self'].linger = True
# HTTPConnection is more inner than
# HTTPRequest so we can leave once
# we're done here
return
_locals = None
current = current.f_back
class WebSocketPlugin(plugins.SimplePlugin):
def __init__(self, bus):
plugins.SimplePlugin.__init__(self, bus)
self.manager = WebSocketManager()
def start(self):
self.bus.log("Starting WebSocket processing")
self.bus.subscribe('stop', self.cleanup)
self.bus.subscribe('handle-websocket', self.handle)
self.bus.subscribe('websocket-broadcast', self.broadcast)
self.manager.start()
def stop(self):
self.bus.log("Terminating WebSocket processing")
self.bus.unsubscribe('stop', self.cleanup)
self.bus.unsubscribe('handle-websocket', self.handle)
self.bus.unsubscribe('websocket-broadcast', self.broadcast)
def handle(self, ws_handler, peer_addr):
"""
Tracks the provided handler.
:param ws_handler: websocket handler instance
:param peer_addr: remote peer address for tracing purpose
"""
self.manager.add(ws_handler)
def cleanup(self):
"""
Terminate all connections and clear the pool. Executed when the engine stops.
"""
self.manager.close_all()
self.manager.stop()
self.manager.join()
def broadcast(self, message, binary=False):
"""
Broadcasts a message to all connected clients known to
the server.
:param message: a message suitable to pass to the send() method
of the connected handler.
:param binary: whether or not the message is a binary one
"""
self.manager.broadcast(message, binary)
if __name__ == '__main__':
import random
cherrypy.config.update({'server.socket_host': '127.0.0.1',
'server.socket_port': 9000})
WebSocketPlugin(cherrypy.engine).subscribe()
cherrypy.tools.websocket = WebSocketTool()
class Root(object):
@cherrypy.expose
@cherrypy.tools.websocket(on=False)
def ws(self):
return """<html>
<head>
<script type='application/javascript' src='https://ajax.googleapis.com/ajax/libs/jquery/1.8.3/jquery.min.js'> </script>
<script type='application/javascript'>
$(document).ready(function() {
var ws = new WebSocket('ws://192.168.0.10:9000/');
ws.onmessage = function (evt) {
$('#chat').val($('#chat').val() + evt.data + '\\n');
};
ws.onopen = function() {
ws.send("Hello there");
};
ws.onclose = function(evt) {
$('#chat').val($('#chat').val() + 'Connection closed by server: ' + evt.code + ' \"' + evt.reason + '\"\\n');
};
$('#chatform').submit(function() {
ws.send('%(username)s: ' + $('#message').val());
$('#message').val("");
return false;
});
});
</script>
</head>
<body>
<form action='/echo' id='chatform' method='get'>
<textarea id='chat' cols='35' rows='10'></textarea>
<br />
<label for='message'>%(username)s: </label><input type='text' id='message' />
<input type='submit' value='Send' />
</form>
</body>
</html>
""" % {'username': "User%d" % random.randint(0, 100)}
@cherrypy.expose
def index(self):
cherrypy.log("Handler created: %s" % repr(cherrypy.request.ws_handler))
cherrypy.quickstart(Root(), '/', config={'/': {'tools.websocket.on': True,
'tools.websocket.handler_cls': EchoWebSocketHandler}})
| 14,495
|
Python
|
.py
| 324
| 34.302469
| 129
| 0.610344
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,207
|
geventserver.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/server/geventserver.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
__doc__ = """
WSGI entities to support WebSocket from within gevent.
Its usage is rather simple:
.. code-block: python
from gevent import monkey; monkey.patch_all()
from zato.server.ext.ws4py.websocket import EchoWebSocket
from zato.server.ext.ws4py.server.geventserver import WSGIServer
from zato.server.ext.ws4py.server.wsgiutils import WebSocketWSGIApplication
server = WSGIServer(('localhost', 9000), WebSocketWSGIApplication(handler_cls=EchoWebSocket))
server.serve_forever()
"""
import logging
import gevent
from gevent.pywsgi import WSGIHandler, WSGIServer as _WSGIServer
from gevent.pool import Pool
from zato.server.ext.ws4py import format_addresses
from zato.server.ext.ws4py.server.wsgiutils import WebSocketWSGIApplication
logger = logging.getLogger('zato_web_socket')
__all__ = ['WebSocketWSGIHandler', 'WSGIServer',
'GEventWebSocketPool']
class WebSocketWSGIHandler(WSGIHandler):
"""
A WSGI handler that will perform the :rfc:`6455`
upgrade and handshake before calling the WSGI application.
If the incoming request doesn't have a `'Upgrade'` header,
the handler will simply fallback to the gevent builtin's handler
and process it as per usual.
"""
def run_application(self):
upgrade_header = self.environ.get('HTTP_UPGRADE', '').lower()
if upgrade_header:
# Build and start the HTTP response
self.environ['ws4py.socket'] = self.socket or self.environ['wsgi.input'].rfile._sock
self.result = self.application(self.environ, self.start_response) or []
self.process_result()
del self.environ['ws4py.socket']
self.socket = None
self.rfile.close()
ws = self.environ.pop('ws4py.websocket', None)
if ws:
ws_greenlet = self.server.pool.track(ws)
# issue #170
# in gevent 1.1 socket will be closed once application returns
# so let's wait for websocket handler to finish
ws_greenlet.join()
else:
gevent.pywsgi.WSGIHandler.run_application(self)
class GEventWebSocketPool(Pool):
"""
Simple pool of bound websockets.
Internally it uses a gevent group to track
the websockets. The server should call the ``clear``
method to initiate the closing handshake when the
server is shutdown.
"""
def track(self, websocket):
return self.spawn(websocket.run)
def clear(self):
for greenlet in list(self):
try:
websocket = greenlet._run.im_self
if websocket:
websocket.close(1001, 'Server is shutting down')
except:
pass
finally:
self.discard(greenlet)
class WSGIServer(_WSGIServer):
handler_class = WebSocketWSGIHandler
def __init__(self, *args, **kwargs):
"""
WSGI server that simply tracks websockets
and send them a proper closing handshake
when the server terminates.
Other than that, the server is the same
as its :class:`gevent.pywsgi.WSGIServer`
base.
"""
_WSGIServer.__init__(self, *args, **kwargs)
self.pool = GEventWebSocketPool()
def stop(self, *args, **kwargs):
self.pool.clear()
_WSGIServer.stop(self, *args, **kwargs)
if __name__ == '__main__':
from zato.server.ext.ws4py import configure_logger
configure_logger()
from zato.server.ext.ws4py.websocket import EchoWebSocket
server = WSGIServer(('127.0.0.1', 9000),
WebSocketWSGIApplication(handler_cls=EchoWebSocket))
server.serve_forever()
| 3,776
|
Python
|
.py
| 92
| 33.054348
| 97
| 0.661379
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,208
|
tulipserver.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/server/tulipserver.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
import base64
from hashlib import sha1
from email.parser import BytesHeaderParser
import io
import asyncio
from zato.server.ext.ws4py import WS_KEY, WS_VERSION
from zato.server.ext.ws4py.exc import HandshakeError
from zato.server.ext.ws4py.websocket import WebSocket
LF = b'\n'
CRLF = b'\r\n'
SPACE = b' '
EMPTY = b''
__all__ = ['WebSocketProtocol']
class WebSocketProtocol(asyncio.StreamReaderProtocol):
def __init__(self, handler_cls):
asyncio.StreamReaderProtocol.__init__(self, asyncio.StreamReader(),
self._pseudo_connected)
self.ws = handler_cls(self)
def _pseudo_connected(self, reader, writer):
pass
def connection_made(self, transport):
"""
A peer is now connected and we receive an instance
of the underlying :class:`asyncio.Transport`.
We :class:`asyncio.StreamReader` is created
and the transport is associated before the
initial HTTP handshake is undertaken.
"""
#self.transport = transport
#self.stream = asyncio.StreamReader()
#self.stream.set_transport(transport)
asyncio.StreamReaderProtocol.connection_made(self, transport)
# Let make it concurrent for others to tag along
f = asyncio.async(self.handle_initial_handshake())
f.add_done_callback(self.terminated)
@property
def writer(self):
return self._stream_writer
@property
def reader(self):
return self._stream_reader
def terminated(self, f):
if f.done() and not f.cancelled():
ex = f.exception()
if ex:
response = [b'HTTP/1.0 400 Bad Request']
response.append(b'Content-Length: 0')
response.append(b'Connection: close')
response.append(b'')
response.append(b'')
self.writer.write(CRLF.join(response))
self.ws.close_connection()
def close(self):
"""
Initiate the websocket closing handshake
which will eventuall lead to the underlying
transport.
"""
self.ws.close()
def timeout(self):
self.ws.close_connection()
if self.ws.started:
self.ws.closed(1002, "Peer connection timed-out")
def connection_lost(self, exc):
"""
The peer connection is now, the closing
handshake won't work so let's not even try.
However let's make the websocket handler
be aware of it by calling its `closed`
method.
"""
if exc is not None:
self.ws.close_connection()
if self.ws.started:
self.ws.closed(1002, "Peer connection was lost")
@asyncio.coroutine
def handle_initial_handshake(self):
"""
Performs the HTTP handshake described in :rfc:`6455`. Note that
this implementation is really basic and it is strongly advised
against using it in production. It would probably break for
most clients. If you want a better support for HTTP, please
use a more reliable HTTP server implemented using asyncio.
"""
request_line = yield from self.next_line()
method, uri, req_protocol = request_line.strip().split(SPACE, 2)
# GET required
if method.upper() != b'GET':
raise HandshakeError('HTTP method must be a GET')
headers = yield from self.read_headers()
if req_protocol == b'HTTP/1.1' and 'Host' not in headers:
raise ValueError("Missing host header")
for key, expected_value in [('Upgrade', 'websocket'),
('Connection', 'upgrade')]:
actual_value = headers.get(key, '').lower()
if not actual_value:
raise HandshakeError('Header %s is not defined' % str(key))
if expected_value not in actual_value:
raise HandshakeError('Illegal value for header %s: %s' %
(key, actual_value))
response_headers = {}
ws_version = WS_VERSION
version = headers.get('Sec-WebSocket-Version')
supported_versions = ', '.join([str(v) for v in ws_version])
version_is_valid = False
if version:
try: version = int(version)
except: pass
else: version_is_valid = version in ws_version
if not version_is_valid:
response_headers['Sec-WebSocket-Version'] = supported_versions
raise HandshakeError('Unhandled or missing WebSocket version')
key = headers.get('Sec-WebSocket-Key')
if key:
ws_key = base64.b64decode(key.encode('utf-8'))
if len(ws_key) != 16:
raise HandshakeError("WebSocket key's length is invalid")
protocols = []
ws_protocols = []
subprotocols = headers.get('Sec-WebSocket-Protocol')
if subprotocols:
for s in subprotocols.split(','):
s = s.strip()
if s in protocols:
ws_protocols.append(s)
exts = []
ws_extensions = []
extensions = headers.get('Sec-WebSocket-Extensions')
if extensions:
for ext in extensions.split(','):
ext = ext.strip()
if ext in exts:
ws_extensions.append(ext)
self.ws.protocols = ws_protocols
self.ws.extensions = ws_extensions
self.ws.headers = headers
response = [req_protocol + b' 101 Switching Protocols']
response.append(b'Upgrade: websocket')
response.append(b'Content-Type: text/plain')
response.append(b'Content-Length: 0')
response.append(b'Connection: Upgrade')
response.append(b'Sec-WebSocket-Version:' + bytes(str(version), 'utf-8'))
response.append(b'Sec-WebSocket-Accept:' + base64.b64encode(sha1(key.encode('utf-8') + WS_KEY).digest()))
if ws_protocols:
response.append(b'Sec-WebSocket-Protocol:' + b', '.join(ws_protocols))
if ws_extensions:
response.append(b'Sec-WebSocket-Extensions:' + b','.join(ws_extensions))
response.append(b'')
response.append(b'')
self.writer.write(CRLF.join(response))
yield from self.handle_websocket()
@asyncio.coroutine
def handle_websocket(self):
"""
Starts the websocket process until the
exchange is completed and terminated.
"""
yield from self.ws.run()
@asyncio.coroutine
def read_headers(self):
"""
Read all HTTP headers from the HTTP request
and returns a dictionary of them.
"""
headers = b''
while True:
line = yield from self.next_line()
headers += line
if line == CRLF:
break
return BytesHeaderParser().parsebytes(headers)
@asyncio.coroutine
def next_line(self):
"""
Reads data until \r\n is met and then return all read
bytes.
"""
line = yield from self.reader.readline()
if not line.endswith(CRLF):
raise ValueError("Missing mandatory trailing CRLF")
return line
if __name__ == '__main__':
from zato.server.ext.ws4py.async_websocket import EchoWebSocket
loop = asyncio.get_event_loop()
def start_server():
proto_factory = lambda: WebSocketProtocol(EchoWebSocket)
return loop.create_server(proto_factory, '', 9007)
s = loop.run_until_complete(start_server())
print('serving on', s.sockets[0].getsockname())
loop.run_forever()
| 7,747
|
Python
|
.py
| 192
| 30.59375
| 113
| 0.60569
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,209
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/apispec/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,210
|
model.py
|
zatosource_zato/code/zato-server/src/zato/server/apispec/model.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from dataclasses import dataclass, field, Field, MISSING
from inspect import isclass
from logging import getLogger
from operator import attrgetter
# Bunch
from bunch import Bunch # type: ignore[reportUnknownVariableType]
# SimpleParsing
from simple_parsing.docstring import get_attribute_docstring
# Zato
from zato.common.typing_ import extract_from_union, is_union
from zato.common.marshal_.api import extract_model_class, is_list, Model
# ################################################################################################################################
# ################################################################################################################################
if 0:
from simple_parsing.docstring import AttributeDocString
from zato.common.typing_ import any_, anydict, anylist
from zato.server.service import Service
Service = Service
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
_sio_attrs = (
'input',
'input_required',
'input_optional',
'output',
'output_required',
'output_optional',
)
_singleton = object()
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class FieldTypeInfo:
field_type: 'any_'
field_type_args: 'anylist'
union_with: 'any_'
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class FieldInfo:
name: 'str'
type: 'str' = ''
subtype: 'str' = ''
is_required: 'bool' = False
description: 'str' = ''
ref: 'str' = ''
is_list: 'bool' = False
# ################################################################################################################################
@staticmethod
def get_field_type_info(field:'Field') -> 'FieldTypeInfo':
field_type:'any_' = field.type
if is_union(field_type):
result = extract_from_union(field_type)
field_type_args, field_type, union_with = result
else:
field_type_args = []
union_with = _singleton
info = FieldTypeInfo()
info.field_type = field_type
info.field_type_args = field_type_args
info.union_with = union_with
return info
# ################################################################################################################################
@staticmethod
def from_python_field(model:'Model', field:'Field', api_spec_info:'any_') -> 'FieldInfo':
# Type hints
type_info:'any_'
if not field.type:
raise ValueError('Value missing -> field.type ({})'.format(field))
info = FieldInfo()
info.name = field.name or '<field-no-name>'
info.is_required = field.default is MISSING
info.description = field.__doc__ or ''
# Extract the Python field's docstring, regardless of its location in relation to the field ..
docstring = get_attribute_docstring(model, info.name) # type: AttributeDocString
# .. and assign it to the information object, in this priority.
info.description = (docstring.comment_above or docstring.comment_inline or docstring.docstring_below or '').strip()
field_type_info = FieldInfo.get_field_type_info(field)
field_type = field_type_info.field_type
# If this was a union with a None type, it means that it was actually an optional field
# because optional[Something] is equal to Union[Something, None], in which case
# we set the is_required flag to None, no matter what was set earlier up.
if field_type_info.union_with is type(None): # noqa: E721
info.is_required = False
is_class = isclass(field_type)
if field_type is list:
type_info = api_spec_info.LIST
elif is_list(field_type, is_class):
info.is_list = True
ref = extract_model_class(field_type)
if is_union(ref):
result = extract_from_union(ref)
_, field_type, _ = result
ref = field_type
#
# If we have an element such as anylistnone, the extracted field
# will be actually Python's own internal type pointing to the Any type.
# Under Python 3.8, this will be _SpecialForm. In newer versions,
# it may be potentially ClassVar. Be as it may, it does not have a __name__attribute that could extract.
#
ref_name = getattr(ref, '__name__', None)
if ref_name:
info.ref = '#/components/schemas/{}.{}'.format(ref.__module__, ref_name)
type_info = '', ref_name
else:
type_info = '', ''
elif is_class and issubclass(field_type, dict):
type_info = api_spec_info.DICT
elif is_class and issubclass(field_type, bool):
type_info = api_spec_info.BOOLEAN
elif is_class and issubclass(field_type, int):
type_info = api_spec_info.INTEGER
elif is_class and issubclass(field_type, float):
type_info = api_spec_info.FLOAT
elif is_class and issubclass(field_type, Model):
info.ref = '#/components/schemas/{}.{}'.format(field_type.__module__, field_type.__name__)
type_info = '', field_type.__name__
else:
try:
type_info = api_spec_info.map[field.__class__]
except KeyError:
type_info = api_spec_info.DEFAULT
info.type, info.subtype = type_info
return info
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class APISpecInfo:
name: str
field_list: 'anydict'
request_elem: str
response_elem: str
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class Config:
ns: 'str' = ''
services: 'anylist' = field(default_factory=list)
is_module_level: 'bool' = True
# ################################################################################################################################
# ################################################################################################################################
@dataclass
class DocstringModel:
full: 'str' = ''
full_html: 'str' = ''
summary: 'str' = ''
summary_html: 'str' = ''
description: 'str' = ''
description_html: 'str' = ''
# Keys are tags used, values are documentation for key
by_tag: 'anydict' = field(default_factory=dict)
tags: 'anylist' = field(default_factory=list)
# ################################################################################################################################
# ################################################################################################################################
class SimpleIO:
""" Represents a SimpleIO definition of a particular service.
"""
input: 'anylist'
input_required: 'anylist'
input_optional: 'anylist'
output: 'anylist'
output_required: 'anylist'
output_optional: 'anylist'
request_elem: 'any_'
response_elem: 'any_'
spec_name: 'str'
description: 'SimpleIODescription'
needs_sio_desc: 'bool'
def __init__(
self,
spec_info, # type: APISpecInfo
description, # type: SimpleIODescription
needs_sio_desc, # type: bool
) -> 'None':
self.input = spec_info.field_list.get('input', [])
self.input_required = []
self.input_optional = []
self.output = spec_info.field_list.get('output', [])
self.output_required = []
self.output_optional = []
self.request_elem = spec_info.request_elem
self.response_elem = spec_info.response_elem
self.spec_name = spec_info.name
self.description = description
self.needs_sio_desc = needs_sio_desc
# ################################################################################################################################
def assign_required_optional(self) -> 'None':
item: 'FieldInfo'
for item in self.input:
if item.is_required:
self.input_required.append(item)
else:
self.input_optional.append(item)
for item in self.output:
if item.is_required:
self.output_required.append(item)
else:
self.output_optional.append(item)
# ################################################################################################################################
def sort_elems(self) -> 'None':
self.input = sorted(self.input, key=attrgetter('name'))
self.input_required = sorted(self.input_required, key=attrgetter('name'))
self.input_optional = sorted(self.input_optional, key=attrgetter('name'))
self.output = sorted(self.output, key=attrgetter('name'))
self.output_required = sorted(self.output_required, key=attrgetter('name'))
self.output_optional = sorted(self.output_optional, key=attrgetter('name'))
# ################################################################################################################################
def to_bunch(self) -> 'Bunch':
out = Bunch()
for name in _sio_attrs + ('request_elem', 'response_elem', 'spec_name'):
out[name] = getattr(self, name)
if self.needs_sio_desc:
out.description = self.description.to_bunch()
return out
# ################################################################################################################################
# ################################################################################################################################
class SimpleIODescription:
input: 'anydict'
output: 'anydict'
def __init__(self) -> 'None':
self.input = {}
self.output = {}
# ################################################################################################################################
def to_bunch(self) -> 'Bunch':
out = Bunch()
out.input = self.input
out.output = self.output
return out
# ################################################################################################################################
# ################################################################################################################################
| 12,146
|
Python
|
.py
| 239
| 43.606695
| 130
| 0.422094
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,211
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/apispec/parser/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,212
|
service.py
|
zatosource_zato/code/zato-server/src/zato/server/apispec/parser/service.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.common.marshal_.api import extract_model_class, is_list
from zato.common.marshal_.api import Model
from zato.common.marshal_.simpleio import DataClassSimpleIO
from zato.common.typing_ import any_, cast_
from zato.server.apispec.model import APISpecInfo, Config, FieldInfo, SimpleIO
from zato.server.apispec.parser.docstring import DocstringParser
# Zato - Cython
from zato.simpleio import SIO_TYPE_MAP
# ################################################################################################################################
# ################################################################################################################################
_SIO_TYPE_MAP = SIO_TYPE_MAP() # type: ignore
# ################################################################################################################################
# ################################################################################################################################
if 0:
from dataclasses import Field
from zato.common.typing_ import anydict, anylist, optional, strorlist, type_
from zato.server.service import Service
Field = Field
Service = Service
# ################################################################################################################################
# ################################################################################################################################
def build_field_list(model:'Model | str', api_spec_info:'any_') -> 'anylist':
# Response to produce
out = [] # type: anylist
# Local variables
is_any = model is any_
is_int = model is int
is_str = model is str
is_bool = model is bool
# This is not a true model that we can process ..
should_ignore = is_int or is_str or is_any or is_bool
# .. in which case we can return immediately ..
if should_ignore:
return out
# .. handle list models as well ..
if is_list(model, True): # type: ignore
model = extract_model_class(model) # type: ignore
python_field_list = cast_('any_', model).zato_get_fields()
for _, field in sorted(python_field_list.items()):
# Parameter details object
info = FieldInfo.from_python_field(model, field, api_spec_info) # type: ignore
out.append(info)
return out
# ################################################################################################################################
# ################################################################################################################################
class ServiceInfo:
""" Contains information about a service basing on which documentation is generated.
"""
def __init__(
self,
name, # type: str
service_class, # type: type_[Service]
simple_io_config, # type: anydict
tags='public', # type: strorlist
needs_sio_desc=True # type: bool
) -> 'None':
self.name = name
self.service_class = service_class
self.simple_io_config = simple_io_config
self.config = Config()
self.simple_io = {} # type: anydict
self.needs_sio_desc = needs_sio_desc
# This is the object that extracts docstrings from services or from their SimpleIO definitions.
self.docstring = DocstringParser(service_class, tags)
# Run the parser now
self.parse()
# ################################################################################################################################
def to_dict(self) -> 'anydict':
return {
'name': self.name,
'simple_io': self.simple_io,
'docs': {
'full': self.docstring.data.full,
'full_html': self.docstring.data.full_html,
'summary': self.docstring.data.summary,
'summary_html': self.docstring.data.summary_html,
'description': self.docstring.data.description,
'description_html': self.docstring.data.description_html,
}
}
# ################################################################################################################################
def parse(self) -> 'None':
self.parse_simple_io()
self.docstring.set_summary_desc()
# ################################################################################################################################
def parse_simple_io(self) -> 'None':
""" Adds metadata about the service's SimpleIO definition.
"""
# SimpleIO
sio = getattr(self.service_class, '_sio', None) # type: any_
if sio and isinstance(sio, DataClassSimpleIO):
# This can be reused across all the output data formats
sio_desc = self.docstring.get_sio_desc(sio)
for api_spec_info in _SIO_TYPE_MAP: # type: ignore
# A structure that contains an API specification for each major output format, e.g. Zato or OpenAPI
spec_info = APISpecInfo()
spec_info.name = api_spec_info.name
spec_info.field_list = {}
spec_info.request_elem = getattr(sio, 'request_elem', '')
spec_info.response_elem = getattr(sio, 'response_elem', '')
# This is where input and output are assigned based on a service's models ..
for sio_attr_name in ('input', 'output'):
model = getattr(sio.user_declaration, sio_attr_name, None) # type: optional[Model]
if model:
spec_info.field_list[sio_attr_name] = build_field_list(model, api_spec_info)
# This is a container for the entire SimpleIO definition ..
sio_def = SimpleIO(spec_info, sio_desc, self.needs_sio_desc)
# .. and this is where required and optional arguments
# .. are extracted and assigned to input_required/optional or output_required/optional,
# .. based on previously assigned input and output ..
sio_def.assign_required_optional()
# .. let's sort every input and output element alphabetically ..
sio_def.sort_elems()
# .. now, we can serialise the data structure for external users
self.simple_io[spec_info.name] = sio_def.to_bunch()
# ################################################################################################################################
# ################################################################################################################################
| 6,920
|
Python
|
.py
| 123
| 47.674797
| 130
| 0.463566
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,213
|
docstring.py
|
zatosource_zato/code/zato-server/src/zato/server/apispec/parser/docstring.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# dataclasses
from dataclasses import dataclass
# docformatter
from docformatter import format_docstring
# markdown
from markdown import markdown
# Zato
from zato.common.api import APISPEC
from zato.common.marshal_.api import Model
from zato.server.apispec.model import DocstringModel, SimpleIODescription
# ################################################################################################################################
# ################################################################################################################################
if 0:
from dataclasses import Field
from zato.common.typing_ import any_, anydict, anylist, anytuple, iterator_, list_, strlist, strorlist, type_
from zato.server.service import Service
Field = Field
Service = Service
# ################################################################################################################################
# ################################################################################################################################
tag_internal = ('@classified', '@confidential', '@internal', '@private', '@restricted', '@secret')
tag_html_internal = """
.. raw:: html
<span class="zato-tag-name-highlight">{}</span>
"""
not_public = 'INFORMATION IN THIS SECTION IS NOT PUBLIC'
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class _DocstringSegment(Model):
tag: 'str' = ''
summary: 'str' = ''
description: 'str' = ''
full: 'str' = ''
# ################################################################################################################################
# ################################################################################################################################
class DocstringParser:
def __init__(
self,
service_class, # type: type_[Service]
tags='public' # type: strorlist
) -> 'None':
self.service_class = service_class
self.data = DocstringModel()
self.data.tags = tags if isinstance(tags, list) else [tags]
# ################################################################################################################################
def set_summary_desc(self) -> 'None':
doc = self.service_class.__doc__ or ''
doc = doc.replace('*', r'\*')
segments = self.extract_segments(doc)
for segment in segments:
# The very first summary found will set the whole docstring's summary
if segment.summary:
if not self.data.summary:
self.data.summary = segment.summary
if segment.description:
self.data.description += segment.description
if segment.full:
self.data.full += segment.full
# Now that we have visited all the segments, we can also add an HTML version of the data found.
self.data.full_html = self.to_html(self.data.full)
self.data.summary_html = self.to_html(self.data.summary)
self.data.description_html = self.to_html(self.data.description)
# ################################################################################################################################
def _parse_split_segment(
self,
tag, # type: str
split, # type: anylist
is_tag_internal, # type: bool
prefix_with_tag # type: bool
) -> '_DocstringSegment':
if is_tag_internal:
split.insert(0, not_public)
# For implicit tags (e.g. public), the summary will be under index 0,
# but for tags named explicitly, index 0 may be an empty element
# and the summary will be under index 1.
summary = split[0] or split[1]
# format_docstring expects an empty line between summary and description
if len(split) > 1:
_doc = []
_doc.append(split[0])
_doc.append('')
_doc.extend(split[1:])
doc = '\n'.join(_doc)
else:
doc = ''
# This gives us the full docstring out of which we need to extract description alone.
full_docstring = format_docstring('', '"{}"'.format(doc), post_description_blank=False)
full_docstring = full_docstring.lstrip('"').rstrip('"')
description = full_docstring.splitlines()
# If there are multiple lines and the second one is empty this means it is an indicator of a summary to follow.
if len(description) > 1 and not description[1]:
description = '\n'.join(description[2:])
else:
description = ''
# Function docformatter.normalize_summary adds a superfluous period at the end of docstring.
if full_docstring:
if description and full_docstring[-1] == '.' and full_docstring[-1] != description[-1]:
full_docstring = full_docstring[:-1]
if summary and full_docstring[-1] == '.' and full_docstring[-1] != summary[-1]:
full_docstring = full_docstring[:-1]
# If we don't have any summary but there is a docstring at all then it must be a single-line one
# and it becomes our summary.
if full_docstring and not summary:
summary = full_docstring
# If we don't have description but we have summary then summary becomes description and full docstring as well
if summary and not description:
description = summary
full_docstring = summary
summary = summary.lstrip()
# This is needed in case we have one of the tags
# that need a highlight because they contain information
# that is internal to users generating the specification.
tag_html = tag
if is_tag_internal:
tag_html = tag_html_internal.format(tag)
else:
tag_html = tag
if prefix_with_tag:
description = '\n\n{}\n{}'.format(tag_html, description)
full_docstring = '\n{}\n\n{}'.format(tag_html, full_docstring)
out = _DocstringSegment()
out.tag = tag.replace('@', '', 1)
out.summary = summary
out.description = description
out.full = full_docstring
return out
# ################################################################################################################################
def _get_next_split_segment(self, lines:'anylist', tag_indicator:'str'='@') -> 'iterator_[anytuple]':
current_lines = [] # type: strlist
len_lines = len(lines) -1 # type: int # Substract one because enumerate counts from zero
# The very first line must contain tag name(s),
# otherwise we assume that it is the implicit name, called 'public'.
first_line = lines[0] # type: str
current_tag = first_line.strip().replace(tag_indicator, '', 1) if \
first_line.startswith(tag_indicator) else APISPEC.DEFAULT_TAG # type: str
# Indicates that we are currently processing the very first line,
# which is needed because if it starts with a tag name
# then we do not want to immediately yield to our caller.
in_first_line = True
for idx, line in enumerate(lines):
line_stripped = line.strip()
if line_stripped.startswith(tag_indicator):
if not in_first_line:
yield current_tag, current_lines
current_tag = line_stripped
current_lines[:] = []
else:
in_first_line = False
current_lines.append(line)
if idx == len_lines:
yield current_tag, current_lines
break
else:
yield current_tag, current_lines
# ################################################################################################################################
def extract_segments(self, doc:'str') -> 'list_[_DocstringSegment]':
""" Makes a pass over the docstring to extract all of its tags and their text.
"""
# Response to produce
out = [] # type: list_[_DocstringSegment]
# Nothing to parse
if not doc:
return out
# All lines in the docstring, possibly containing multiple tags
all_lines = doc.strip().splitlines() # type: anylist
# Again, nothing to parse
if not all_lines:
return out
# Contains all lines still to be processed - function self._get_next_split_segment will update it in place.
current_lines = all_lines[:]
for tag, tag_lines in self._get_next_split_segment(current_lines):
# All non-public tags are shown explicitly
prefix_with_tag = tag != 'public'
# A flag indicating whether we are processing a public or an internal tag,
# e.g. public vs. @internal or @confidential.
for name in tag_internal:
if name in tag:
is_tag_internal = True
break
else:
is_tag_internal = False
segment = self._parse_split_segment(tag, tag_lines, is_tag_internal, prefix_with_tag)
if segment.tag in self.data.tags:
out.append(segment)
return out
# ################################################################################################################################
def parse_docstring(self) -> 'None':
doc = self.service_class.__doc__ or ''
doc = doc.replace('*', r'\*')
segments = self.extract_segments(doc)
for segment in segments:
# The very first summary found will set the whole docstring's summary
if segment.summary:
if not self.data.summary:
self.data.summary = segment.summary
if segment.description:
self.data.description += segment.description
if segment.full:
self.data.full += segment.full
# ################################################################################################################################
def get_sio_desc(self, sio:'any_', io_separator:'str'='/', new_elem_marker:'str'='*') -> 'SimpleIODescription':
out = SimpleIODescription()
doc = sio.service_class.SimpleIO.__doc__ or ''
doc = doc.replace('*', r'\*')
# No description to parse
if not doc:
return out
doc = doc.strip() # type: str # type: ignore[no-redef]
lines = [] # type: strlist
# Strip leading whitespace but only from lines containing element names
for line in doc.splitlines():
orig_line = line
line = line.lstrip()
if line.startswith(new_elem_marker):
lines.append(line)
else:
lines.append(orig_line)
# Now, replace all the leading whitespace left with endline characters,
# but instead of replacing them in place, they will be appending to the preceding line.
# This will contain all lines with whitespace replaced with newlines
with_new_lines = []
for idx, line in enumerate(lines):
# By default, assume that do not need to append the new line
append_new_line = False
# An empty line is interpreted as a new line marker
if not line:
append_new_line = True
if line.startswith(' '):
line = line.lstrip()
with_new_lines.append(line)
# Alright, this line started with whitespace which we removed above,
# so now we need to append the new line character. But first we need to
# find an index of any previous line that is not empty in case there
# are multiple empty lines in succession in the input string.
if append_new_line:
line_found = False
line_idx = idx
while not line_found or (idx == 0):
line_idx -= 1
current_line = with_new_lines[line_idx]
if current_line.strip():
break
with_new_lines[line_idx] += '\n'
# We may still have some empty lines left over which we remove now
lines = [elem for elem in with_new_lines[:] if elem]
input_lines = [] # type: strlist
output_lines = [] # type: strlist
# If there is no empty line, the docstring will describe either input or output (we do not know yet).
# If there is only one empty line, it constitutes a separator between input and output.
# If there is more than one empty line, we need to look up the separator marker instead.
# If the separator is not found, it again means that the docstring describes either input or output.
empty_line_count = 0
# Line that separates input from output in the list of arguments
input_output_sep_idx = 0 # Initially empty, i.e. set to zero
# To indicate whether we have found a separator in the docstring
has_separator = False
for idx, line in enumerate(lines):
if not line:
empty_line_count += 1
input_output_sep_idx = idx
if line == io_separator:
has_separator = True
input_output_sep_idx = idx
# No empty line separator = we do not know if it is input or output so we need to populate both structures ..
if empty_line_count == 0:
input_lines[:] = lines[:]
output_lines[:] = lines[:]
# .. a single empty line separator = we know where input and output are.
elif empty_line_count == 1:
input_lines[:] = lines[:input_output_sep_idx]
output_lines[:] = lines[input_output_sep_idx+1:]
else:
# If we have a separator, this is what indicates where input and output are ..
if has_separator:
input_lines[:] = lines[:input_output_sep_idx-1]
output_lines[:] = lines[input_output_sep_idx-1:]
# .. otherwise, we treat it as a list of arguments and we do not know if it is input or output.
else:
input_lines[:] = lines[:]
output_lines[:] = lines[:]
input_lines = [elem for elem in input_lines if elem and elem != io_separator]
output_lines = [elem for elem in output_lines if elem and elem != io_separator]
out.input.update(self._parse_sio_desc_lines(input_lines))
out.output.update(self._parse_sio_desc_lines(output_lines))
return out
# ################################################################################################################################
def _parse_sio_desc_lines(self, lines:'anylist', new_elem_marker:'str'='*') -> 'anydict':
out = {}
current_elem = None
for line in lines:
if line.startswith(new_elem_marker):
# We will need it later below
orig_line = line
# Remove whitespace, skip the new element marker and the first string left over will be the element name.
line_list = [elem for elem in line.split()] # type: strlist
line_list.remove(new_elem_marker)
current_elem = line_list[0]
# We have the element name so we can now remove it from the full line
to_remove = '{} {} - '.format(new_elem_marker, current_elem)
after_removal = orig_line.replace(to_remove, '', 1)
out[current_elem] = [after_removal]
else:
if current_elem:
out[current_elem].append(line)
# Joing all the lines into a single string, preprocessing them along the way.
for key, value in out.items():
# We need to strip the trailing new line characters from the last element in the list of lines
# because it is redundant and our callers would not want to render it anyway.
last = value[-1]
last = last.rstrip()
value[-1] = last
# Joing the lines now, honouring new line characters. Also, append whitespace
# but only to elements that are not the last in the list because they end a sentence.
new_value = []
len_value = len(value)
for idx, elem in enumerate(value, 1):
if idx != len_value and not elem.endswith('\n'):
elem += ' '
new_value.append(elem)
# Everything is preprocesses so we can create a new string now ..
new_value = ''.join(new_value) # type: ignore[assignment]
# .. and set it for that key.
out[key] = new_value
return out
# ################################################################################################################################
def to_html(self, value:'str') -> 'str':
return markdown(value).lstrip('<p>').rstrip('</p>')
# ################################################################################################################################
# ################################################################################################################################
| 17,944
|
Python
|
.py
| 335
| 42.841791
| 130
| 0.514867
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,214
|
openapi.py
|
zatosource_zato/code/zato-server/src/zato/server/apispec/spec/openapi.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
# Bunch
from bunch import Bunch, bunchify
# Parse
from parse import parse
# PyYAML
from yaml import dump as yaml_dump, Dumper as YAMLDumper
# Zato
from zato.common.api import URL_TYPE
from zato.common.marshal_.api import Model
from zato.common.typing_ import cast_
from zato.common.util.file_system import fs_safe_name
from zato.common.util.import_ import import_string
from zato.server.apispec.parser.service import build_field_list
# Zato - Cython
from zato.simpleio import SIO_TYPE_MAP
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import anydict, anydictnone, anylist, dictlist, stranydict, strorlist
from zato.server.apispec.model import FieldInfo
FieldInfo = FieldInfo
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
_SIO_TYPE_MAP = SIO_TYPE_MAP()
# ################################################################################################################################
# ################################################################################################################################
class OpenAPIGenerator:
""" Generates OpenAPI specifications.
"""
def __init__(
self,
data, # type: dictlist
channel_data, # type: dictlist
needs_api_invoke, # type: bool
needs_rest_channels, # type: bool
api_invoke_path # type: strorlist
) -> 'None':
self.data = data
self.channel_data = channel_data
self.needs_api_invoke = needs_api_invoke
self.needs_rest_channels = needs_rest_channels
if api_invoke_path:
api_invoke_path = api_invoke_path if isinstance(api_invoke_path, list) else [api_invoke_path]
else:
api_invoke_path = []
self.api_invoke_path = api_invoke_path
# ################################################################################################################################
def _get_request_name(self, service_name:'str') -> 'str':
return 'request_{}'.format(fs_safe_name(service_name))
# ################################################################################################################################
def _get_response_name(self, service_name:'str') -> 'str':
return 'response_{}'.format(fs_safe_name(service_name))
# ################################################################################################################################
def _add_model_schema(self, model_name:'str', model:'Model', out:'anydict') -> 'None':
# Do not visit the model if we have already seen it
if model_name in out:
return
else:
out[model_name] = {}
# Extract elements from the model object ..
sio_elems = build_field_list(model, _SIO_TYPE_MAP.OPEN_API_V3)
# .. and visit each of them, potentially recursing back into our function.
self._visit_sio_elems(model_name, sio_elems, out)
# ################################################################################################################################
def _visit_sio_elems(self, schema_name:'str', sio_elems:'anylist', out:'anydict') -> 'None':
properties = {} # type: stranydict
out[schema_name]['properties'] = properties
# All the elements of this model that are required
elems_required_names = [elem.name for elem in sio_elems if elem.is_required]
# Go through each SIO element to build an OpenAPI property for it ..
for info in sio_elems:
info = cast_('FieldInfo', info)
# .. this key will always exist ..
property_map = {
'description': info.description
} # type: stranydict
# .. for nested models, ref will exist ..
if info.ref:
# .. out of which we can extract a Python class name ..
model_name = info.ref.replace('#/components/schemas/', '')
# .. list elements will have type/items sub-elements ..
if info.is_list:
property_map['type'] = 'array'
property_map['items'] = {
'$ref': info.ref
}
# .. while non-list elements get the $ref element directly ..
else:
property_map['$ref'] = info.ref
# .. now, a model (class) ..
model = import_string(model_name)
# .. next, we can append this class's definitions to our dictionary of schemas ..
self._add_model_schema(model_name, model, out)
# .. while for simple types, these two will exist ..
else:
property_map['type'] = info.type
if info.type == 'array':
property_map['items'] = {}
elif info.type == 'object':
property_map['additionalProperties'] = {}
# .. now, we can assign the property to its container.
properties[info.name] = property_map
if elems_required_names:
out[schema_name]['required'] = elems_required_names
# ################################################################################################################################
def _get_message_schemas(self, data:'dictlist', is_request:'bool') -> 'Bunch':
if is_request:
name_func = self._get_request_name
msg_name = 'Request'
sio_elem_attr = 'input'
else:
name_func = self._get_response_name
msg_name = 'Response'
sio_elem_attr = 'output'
# All schema objects for input request or response
out = Bunch()
# Go through all the services ..
for item in data:
# .. skip it unless we can support OpenAPI ..
if 'openapi_v3' not in item['simple_io']:
continue
# .. turn its class name into a schema name ..
message_name = name_func(item['name'])
# .. prepare it upfront here ..
out[message_name] = {
'title': '{} object for {}'.format(msg_name, item['name']),
'type': 'object',
}
# .. get all the elements of the model class ..
sio_elems = getattr(item['simple_io']['openapi_v3'], sio_elem_attr)
# .. turn them into an OpenAPI schema ..
self._visit_sio_elems(message_name, sio_elems, out)
# .. and return our result to the caller.
return out
# ################################################################################################################################
def get_rest_channel(self, service_name:'str') -> 'anydictnone':
for channel_item in self.channel_data:
if channel_item['service_name'] == service_name:
if channel_item['transport'] == URL_TYPE.PLAIN_HTTP:
return bunchify(channel_item)
# ################################################################################################################################
def get_path_operation(self, service_name:'str') -> 'str':
service_name_list = service_name.split('.') # E.g. my.api.name.get-client -> ['my', 'api', 'name', 'get-client']
op_name = service_name_list[-1]
if op_name.startswith('get'):
return 'get'
elif op_name.startswith('delete'):
return 'delete'
else:
return 'post'
# ################################################################################################################################
def has_path_elem(self, url_path:'str', elem_name:'str') -> 'bool':
pattern = '{%s}' % elem_name
return pattern in url_path
# ################################################################################################################################
def generate(self) -> 'str':
# Local aliases
sec_name = 'BasicAuth'
# Basic information, always available
out = Bunch()
out.openapi = '3.0.3'
out.info = {
'title': 'API spec',
'version': '1.0',
}
out.servers = [{'url': 'http://127.0.0.1:17010'}]
# Responses to refer to in paths
out.components = Bunch()
out.components.schemas = Bunch()
# Security definition ..
out.components['securitySchemes'] = {}
out.components['securitySchemes'][sec_name] = {}
out.components['securitySchemes'][sec_name]['type'] = 'http'
out.components['securitySchemes'][sec_name]['scheme'] = 'basic'
# .. apply the definition globally.
out['security'] = []
out['security'].append({sec_name:[]})
# REST paths
out.paths = Bunch()
# Schemas for all services - it is possible that not all of them will be output,
# for instance, if a service is not exposed through any REST channel.
request_schemas = self._get_message_schemas(self.data, True)
response_schemas = self._get_message_schemas(self.data, False)
schemas = {}
schemas.update(request_schemas)
schemas.update(response_schemas)
out.components.schemas.update(schemas)
for item in self.data:
# Container for all the URL paths found for this item (service)
url_paths = []
# Parameters carried in URL paths, e.g. /user/{username}/{lang_code},
# all of them will be treated as required and all of them will be string ones.
channel_params = []
# Now, collect all the paths that the spec will contain ..
# .. generic API invoker, e.g. /zato/api/invoke/{service_name} ..
if self.needs_api_invoke and self.api_invoke_path:
for path in self.api_invoke_path:
url_paths.append(path.format(service_name=item['name']))
# .. per-service specific REST channels.
if self.needs_rest_channels:
rest_channel = self.get_rest_channel(item['name'])
if rest_channel:
# This is always needed, whether path parameters exist or not
url_paths.append(rest_channel['url_path'])
# Path parameters
group_names = rest_channel['match_target_compiled'].group_names
if group_names:
# Populate details of path parameters
for channel_param_name in sorted(group_names):
channel_params.append({
'name': channel_param_name,
'description': '',
'in': 'path',
'required': True,
'schema': {
'type': 'string',
'format': 'string',
}
})
# Translate the service name into a normalised form
service_name_fs = fs_safe_name(item['name'])
for url_path in url_paths:
out_path = out.paths.setdefault(url_path, Bunch()) # type: Bunch
post = out_path.setdefault('post', Bunch()) # type: Bunch
operation_id = 'post_{}'.format(fs_safe_name(url_path))
request_name = self._get_request_name(service_name_fs)
response_name = self._get_response_name(service_name_fs)
request_ref = '#/components/schemas/{}'.format(request_name)
response_ref = '#/components/schemas/{}'.format(response_name)
request_body = Bunch()
request_body.required = True
request_body.content = Bunch()
request_body.content['application/json'] = Bunch()
request_body.content['application/json'].schema = Bunch()
request_body.content['application/json'].schema['$ref'] = request_ref
responses = Bunch()
responses['200'] = Bunch()
responses['200'].description = ''
responses['200'].content = Bunch()
responses['200'].content['application/json'] = Bunch()
responses['200'].content['application/json'].schema = Bunch()
responses['200'].content['application/json'].schema['$ref'] = response_ref
post['operationId'] = operation_id
post['requestBody'] = request_body
post['responses'] = responses
if channel_params:
# Whether this "url_path" should receive the channel parameters
should_attach = True
# The channel parameters dictionary needs to be ignored
# if the channel is actually an API invoker, i.e. these parameters
# should only be used with channels that are dedicated to a service
# because this is where they were extracted from.
# Iterate over all the API invokers ..
for path_pattern in self.api_invoke_path:
# .. if we have a match, it means that "url_path" is actually
# .. a path pointing to an API invoker, in which case we ignore it,
# .. meaning that we will not attach channel parameters to it.
if parse(path_pattern, url_path):
should_attach = False
break
# If we are here, it means that "url_path" is a standalone REST channel,
# which means that it will get its path parameters.
if should_attach:
post['parameters'] = channel_params
return yaml_dump(out.toDict(), Dumper=YAMLDumper, default_flow_style=False)
# ################################################################################################################################
# ################################################################################################################################
| 15,340
|
Python
|
.py
| 277
| 42.974729
| 130
| 0.465575
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,215
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/apispec/spec/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,216
|
wsdl.py
|
zatosource_zato/code/zato-server/src/zato/server/apispec/spec/wsdl.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,217
|
core.py
|
zatosource_zato/code/zato-server/src/zato/server/apispec/spec/core.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from fnmatch import fnmatch
# Zato
from zato.server.apispec.parser.service import ServiceInfo
# ################################################################################################################################
# ################################################################################################################################
if 0:
from dataclasses import Field
from zato.common.typing_ import anydict, anylist, anylistnone, dict_, dictlist
from zato.server.service import Service
Field = Field
Service = Service
# ################################################################################################################################
# ################################################################################################################################
class Generator:
def __init__(
self,
service_store_services, # type: anydict
simple_io_config, # type: anydict
include, # type: anylist
exclude, # type: anylist
query='', # type: str
tags=None, # type: anylistnone
needs_sio_desc=True # type: bool
) -> 'None':
self.service_store_services = service_store_services
self.simple_io_config = simple_io_config
self.include = include or []
self.exclude = exclude or []
self.query = query
self.tags = tags or []
self.needs_sio_desc = needs_sio_desc
self.services = {} # type: dict_[str, ServiceInfo]
# ################################################################################################################################
def get_info(self) -> 'dictlist':
""" Returns a list of dicts containing metadata about services in the scope required to generate docs and API clients.
"""
# This is the call that finds all the services in the server's service store
# and turns them into a data structure that SIO information is applied to in later steps.
self.build_service_information()
if self.query:
query_items = [elem.strip() for elem in self.query.strip().split()]
else:
query_items = []
out = []
# Add services
for name in sorted(self.services):
proceed = True
if query_items:
for query_item in query_items:
if query_item not in name:
proceed = False
if not proceed:
continue
info = self.services[name] # type: ServiceInfo
out.append(info.to_dict())
return out
# ################################################################################################################################
def _should_handle(self, name:'str', list_:'anylist') -> 'bool':
for match_elem in list_:
if fnmatch(name, match_elem):
return True
else:
return False
# ################################################################################################################################
def build_service_information(self) -> 'None':
for details in self.service_store_services.values():
_should_include = self._should_handle(details['name'], self.include)
_should_exclude = self._should_handle(details['name'], self.exclude)
if (not _should_include) or _should_exclude:
continue
info = ServiceInfo(details['name'], details['service_class'], self.simple_io_config, self.tags, self.needs_sio_desc)
self.services[info.name] = info
# ################################################################################################################################
# ################################################################################################################################
| 4,150
|
Python
|
.py
| 80
| 43.5125
| 130
| 0.422668
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,218
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/base/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,219
|
config.py
|
zatosource_zato/code/zato-server/src/zato/server/base/parallel/config.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from logging import getLogger
# Zato
from zato.bunch import Bunch
from zato.common.api import AuditLog, RATE_LIMIT
from zato.common.audit_log import LogContainerConfig
from zato.common.const import SECRETS, ServiceConst
from zato.common.util.api import asbool
from zato.common.util.config import resolve_name
from zato.common.util.sql import elems_with_opaque
from zato.common.util.url_dispatcher import get_match_target
from zato.server.config import ConfigDict
from zato.url_dispatcher import Matcher
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.model.wsx import WSXConnectorConfig
from zato.common.odb.model import Server as ServerModel
from zato.common.typing_ import anydict, anydictnone, anyset
from zato.server.base.parallel import ParallelServer
WSXConnectorConfig = WSXConnectorConfig
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class ModuleCtx:
Audit_Max_Len_Messages = AuditLog.Default.max_len_messages
Config_Store = ('apikey', 'basic_auth', 'jwt')
Rate_Limit_Exact = RATE_LIMIT.TYPE.EXACT.id
Rate_Limit_Sec_Def = RATE_LIMIT.OBJECT_TYPE.SEC_DEF
Rate_Limit_HTTP_SOAP = RATE_LIMIT.OBJECT_TYPE.HTTP_SOAP
# ################################################################################################################################
# ################################################################################################################################
class ConfigLoader:
""" Loads server's configuration.
"""
# ################################################################################################################################
def set_up_security(self:'ParallelServer', cluster_id:'int') -> 'None':
# API keys
query = self.odb.get_apikey_security_list(cluster_id, True)
self.config.apikey = ConfigDict.from_query('apikey', query, decrypt_func=self.decrypt)
# AWS
query = self.odb.get_aws_security_list(cluster_id, True)
self.config.aws = ConfigDict.from_query('aws', query, decrypt_func=self.decrypt)
# HTTP Basic Auth
query = self.odb.get_basic_auth_list(cluster_id, None, True)
self.config.basic_auth = ConfigDict.from_query('basic_auth', query, decrypt_func=self.decrypt)
# JWT
query = self.odb.get_jwt_list(cluster_id, None, True)
self.config.jwt = ConfigDict.from_query('jwt', query, decrypt_func=self.decrypt)
# NTLM
query = self.odb.get_ntlm_list(cluster_id, True)
self.config.ntlm = ConfigDict.from_query('ntlm', query, decrypt_func=self.decrypt)
# OAuth
query = self.odb.get_oauth_list(cluster_id, True)
self.config.oauth = ConfigDict.from_query('oauth', query, decrypt_func=self.decrypt)
# RBAC - permissions
query = self.odb.get_rbac_permission_list(cluster_id, True)
self.config.rbac_permission = ConfigDict.from_query('rbac_permission', query, decrypt_func=self.decrypt)
# RBAC - roles
query = self.odb.get_rbac_role_list(cluster_id, True)
self.config.rbac_role = ConfigDict.from_query('rbac_role', query, decrypt_func=self.decrypt)
# RBAC - client roles
query = self.odb.get_rbac_client_role_list(cluster_id, True)
self.config.rbac_client_role = ConfigDict.from_query('rbac_client_role', query, decrypt_func=self.decrypt)
# RBAC - role permission
query = self.odb.get_rbac_role_permission_list(cluster_id, True)
self.config.rbac_role_permission = ConfigDict.from_query('rbac_role_permission', query, decrypt_func=self.decrypt)
# TLS CA certs
query = self.odb.get_tls_ca_cert_list(cluster_id, True)
self.config.tls_ca_cert = ConfigDict.from_query('tls_ca_cert', query, decrypt_func=self.decrypt)
# TLS channel security
query = self.odb.get_tls_channel_sec_list(cluster_id, True)
self.config.tls_channel_sec = ConfigDict.from_query('tls_channel_sec', query, decrypt_func=self.decrypt)
# TLS key/cert pairs
query = self.odb.get_tls_key_cert_list(cluster_id, True)
self.config.tls_key_cert = ConfigDict.from_query('tls_key_cert', query, decrypt_func=self.decrypt)
# Vault connections
query = self.odb.get_vault_connection_list(cluster_id, True)
self.config.vault_conn_sec = ConfigDict.from_query('vault_conn_sec', query, decrypt_func=self.decrypt)
# Encrypt all secrets
self._encrypt_secrets()
# ################################################################################################################################
def set_up_pubsub(self:'ParallelServer', cluster_id:'int') -> 'None':
# Pub/sub
self.config.pubsub = Bunch()
# Pub/sub - endpoints
query = self.odb.get_pubsub_endpoint_list(cluster_id, True)
self.config.pubsub_endpoint = ConfigDict.from_query('pubsub_endpoint', query, decrypt_func=self.decrypt)
# Pub/sub - topics
query = self.odb.get_pubsub_topic_list(cluster_id, True)
self.config.pubsub_topic = ConfigDict.from_query('pubsub_topic', query, decrypt_func=self.decrypt)
# Pub/sub - subscriptions
query = self.odb.get_pubsub_subscription_list(cluster_id, True)
self.config.pubsub_subscription = ConfigDict.from_query('pubsub_subscription', query, decrypt_func=self.decrypt)
# ################################################################################################################################
def set_up_config(
self:'ParallelServer', # type: ignore
server:'ServerModel'
) -> 'None':
# Which components are enabled
self.component_enabled.stats = asbool(self.fs_server_config.component_enabled.stats)
self.component_enabled.slow_response = asbool(self.fs_server_config.component_enabled.slow_response)
#
# Cassandra - start
#
query = self.odb.get_cassandra_conn_list(server.cluster.id, True)
self.config.cassandra_conn = ConfigDict.from_query('cassandra_conn', query, decrypt_func=self.decrypt)
query = self.odb.get_cassandra_query_list(server.cluster.id, True)
self.config.cassandra_query = ConfigDict.from_query('cassandra_query', query, decrypt_func=self.decrypt)
#
# Cassandra - end
#
#
# Search - start
#
query = self.odb.get_search_es_list(server.cluster.id, True)
self.config.search_es = ConfigDict.from_query('search_es', query, decrypt_func=self.decrypt)
query = self.odb.get_search_solr_list(server.cluster.id, True)
self.config.search_solr = ConfigDict.from_query('search_solr', query, decrypt_func=self.decrypt)
#
# Search - end
#
#
# SMS - start
#
query = self.odb.get_sms_twilio_list(server.cluster.id, True)
self.config.sms_twilio = ConfigDict.from_query('sms_twilio', query, decrypt_func=self.decrypt)
#
# SMS - end
#
#
# Cloud - start
#
# AWS S3
query = self.odb.get_cloud_aws_s3_list(server.cluster.id, True)
self.config.cloud_aws_s3 = ConfigDict.from_query('cloud_aws_s3', query, decrypt_func=self.decrypt)
#
# Cloud - end
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Services
query = self.odb.get_service_list(server.cluster.id, True)
self.config.service = ConfigDict.from_query('service_list', query, decrypt_func=self.decrypt)
#
# Definitions - start
#
# AMQP
query = self.odb.get_definition_amqp_list(server.cluster.id, True)
self.config.definition_amqp = ConfigDict.from_query('definition_amqp', query, decrypt_func=self.decrypt)
# IBM MQ
query = self.odb.get_definition_wmq_list(server.cluster.id, True)
self.config.definition_wmq = ConfigDict.from_query('definition_wmq', query, decrypt_func=self.decrypt)
#
# Definitions - end
#
#
# Channels - start
#
# AMQP
query = self.odb.get_channel_amqp_list(server.cluster.id, True)
self.config.channel_amqp = ConfigDict.from_query('channel_amqp', query, decrypt_func=self.decrypt)
# IBM MQ
query = self.odb.get_channel_wmq_list(server.cluster.id, True)
self.config.channel_wmq = ConfigDict.from_query('channel_wmq', query, decrypt_func=self.decrypt)
#
# Channels - end
#
#
# Outgoing connections - start
#
# AMQP
query = self.odb.get_out_amqp_list(server.cluster.id, True)
self.config.out_amqp = ConfigDict.from_query('out_amqp', query, decrypt_func=self.decrypt)
# Caches
query = self.odb.get_cache_builtin_list(server.cluster.id, True)
self.config.cache_builtin = ConfigDict.from_query('cache_builtin', query, decrypt_func=self.decrypt)
query = self.odb.get_cache_memcached_list(server.cluster.id, True)
self.config.cache_memcached = ConfigDict.from_query('cache_memcached', query, decrypt_func=self.decrypt)
# FTP
query = self.odb.get_out_ftp_list(server.cluster.id, True)
self.config.out_ftp = ConfigDict.from_query('out_ftp', query, decrypt_func=self.decrypt)
# IBM MQ
query = self.odb.get_out_wmq_list(server.cluster.id, True)
self.config.out_wmq = ConfigDict.from_query('out_wmq', query, decrypt_func=self.decrypt)
# Odoo
query = self.odb.get_out_odoo_list(server.cluster.id, True)
self.config.out_odoo = ConfigDict.from_query('out_odoo', query, decrypt_func=self.decrypt)
# SAP RFC
query = self.odb.get_out_sap_list(server.cluster.id, True)
self.config.out_sap = ConfigDict.from_query('out_sap', query, decrypt_func=self.decrypt)
# REST
query = self.odb.get_http_soap_list(server.cluster.id, 'outgoing', 'plain_http', True)
self.config.out_plain_http = ConfigDict.from_query('out_plain_http', query, decrypt_func=self.decrypt)
# SFTP
query = self.odb.get_out_sftp_list(server.cluster.id, True)
self.config.out_sftp = ConfigDict.from_query('out_sftp', query, decrypt_func=self.decrypt, drop_opaque=True)
# SOAP
query = self.odb.get_http_soap_list(server.cluster.id, 'outgoing', 'soap', True)
self.config.out_soap = ConfigDict.from_query('out_soap', query, decrypt_func=self.decrypt)
# SQL
query = self.odb.get_out_sql_list(server.cluster.id, True)
self.config.out_sql = ConfigDict.from_query('out_sql', query, decrypt_func=self.decrypt)
# ZMQ channels
query = self.odb.get_channel_zmq_list(server.cluster.id, True)
self.config.channel_zmq = ConfigDict.from_query('channel_zmq', query, decrypt_func=self.decrypt)
# ZMQ outgoing
query = self.odb.get_out_zmq_list(server.cluster.id, True)
self.config.out_zmq = ConfigDict.from_query('out_zmq', query, decrypt_func=self.decrypt)
# WebSocket channels
query = self.odb.get_channel_web_socket_list(server.cluster.id, True)
self.config.channel_web_socket = ConfigDict.from_query('channel_web_socket', query, decrypt_func=self.decrypt)
#
# Outgoing connections - end
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Generic - start
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Connections
query = self.odb.get_generic_connection_list(server.cluster.id, True)
self.config.generic_connection = ConfigDict.from_query('generic_connection', query, decrypt_func=self.decrypt)
#
# Generic - end
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Notifications - start
#
# SQL
query = self.odb.get_notif_sql_list(server.cluster.id, True)
self.config.notif_sql = ConfigDict.from_query('notif_sql', query, decrypt_func=self.decrypt)
#
# Notifications - end
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Security - start
#
self.set_up_security(server.cluster_id)
#
# Security - end
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# All the HTTP/SOAP channels.
http_soap = []
for item in elems_with_opaque(self.odb.get_http_soap_list(server.cluster.id, 'channel')):
hs_item = {}
for key in item.keys():
hs_item[key] = getattr(item, key)
hs_item['name'] = resolve_name(hs_item['name'])
hs_item['match_target'] = get_match_target(hs_item, http_methods_allowed_re=self.http_methods_allowed_re)
hs_item['match_target_compiled'] = Matcher(hs_item['match_target'], hs_item.get('match_slash', ''))
http_soap.append(hs_item)
self.config.http_soap = http_soap
# JSON Pointer
query = self.odb.get_json_pointer_list(server.cluster.id, True)
self.config.json_pointer = ConfigDict.from_query('json_pointer', query, decrypt_func=self.decrypt)
# SimpleIO
# In preparation for a SIO rewrite, we loaded SIO config from a file
# but actual code paths require the pre-3.0 format so let's prepare it here.
self.config.simple_io = ConfigDict('simple_io', Bunch())
int_exact = self.sio_config.int_config.exact
int_suffixes = self.sio_config.int_config.suffixes
bool_prefixes = self.sio_config.bool_config.prefixes
self.config.simple_io['int_parameters'] = int_exact
self.config.simple_io['int_parameter_suffixes'] = int_suffixes
self.config.simple_io['bool_parameter_prefixes'] = bool_prefixes
# Maintain backward-compatibility with pre-3.1 versions that did not specify any particular encoding
self.config.simple_io['bytes_to_str'] = {'encoding': self.sio_config.bytes_to_str_encoding or None}
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Pub/sub - start
#
self.set_up_pubsub(self.cluster_id)
#
# Pub/sub - end
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# E-mail - SMTP
query = self.odb.get_email_smtp_list(server.cluster.id, True)
self.config.email_smtp = ConfigDict.from_query('email_smtp', query, decrypt_func=self.decrypt)
# E-mail - IMAP
query = self.odb.get_email_imap_list(server.cluster.id, True)
self.config.email_imap = ConfigDict.from_query('email_imap', query, decrypt_func=self.decrypt)
# .. reusable ..
_logging_stanza = self.fs_server_config.get('logging', {})
# HTTP access log should optionally ignore certain requests ..
access_log_ignore = _logging_stanza.get('http_access_log_ignore')
if access_log_ignore:
access_log_ignore = access_log_ignore if isinstance(access_log_ignore, list) else [access_log_ignore]
self.needs_all_access_log = False
self.access_log_ignore.update(access_log_ignore)
# .. same goes for REST log entries that go to the server log ..
# .. if it does not exist, we need to populate it ourselves ..
_has_rest_log_ignore = 'rest_log_ignore' in _logging_stanza
if not _has_rest_log_ignore:
rest_log_ignore = [ServiceConst.API_Admin_Invoke_Url_Path]
else:
rest_log_ignore = _logging_stanza['rest_log_ignore']
rest_log_ignore = rest_log_ignore if isinstance(rest_log_ignore, list) else [rest_log_ignore]
# .. now, update the set of channels to ignore the REST log for ..
self.rest_log_ignore.update(rest_log_ignore)
# Assign config to worker
self.worker_store.worker_config = self.config
# ################################################################################################################################
def delete_object_rate_limiting(
self:'ParallelServer', # type: ignore
object_type:'str',
object_name:'str'
) -> 'None':
if self.rate_limiting.has_config(object_type, object_name):
self.rate_limiting.delete(object_type, object_name)
# ################################################################################################################################
def set_up_rate_limiting(
self:'ParallelServer', # type: ignore
) -> 'None':
for config_store_name in ModuleCtx.Config_Store:
config_dict = self.config[config_store_name] # type: ConfigDict
for object_name in config_dict: # type: str
self.set_up_object_rate_limiting(ModuleCtx.Rate_Limit_Sec_Def, object_name, config_store_name)
for item in self.config['http_soap']: # type: dict
# Set up rate limiting only if we know there is configuration for it available
if 'is_rate_limit_active' in item:
self.set_up_object_rate_limiting(ModuleCtx.Rate_Limit_HTTP_SOAP, item['name'], config_=item)
# ################################################################################################################################
def set_up_object_rate_limiting(
self:'ParallelServer', # type: ignore
object_type, # type: str
object_name, # type: str
config_store_name='', # type: str
config_=None, # type: anydictnone
) -> 'bool':
if not config_:
config_dict = self.config[config_store_name].get(object_name) # type: ConfigDict
config = config_dict['config'] # type: anydict
else:
config = config_
is_rate_limit_active = config.get('is_rate_limit_active') or False # type: bool
if is_rate_limit_active:
# This is reusable no matter if it is edit or create action
rate_limit_def = config['rate_limit_def']
is_exact = config['rate_limit_type'] == ModuleCtx.Rate_Limit_Exact
# Base dict that will be used as is, if we are to create the rate limiting configuration,
# or it will be updated with existing configuration, if it already exists.
rate_limit_config = {
'id': '{}.{}'.format(object_type, config['id']),
'is_active': is_rate_limit_active,
'type_': object_type,
'name': object_name,
'parent_type': None,
'parent_name': None,
}
# Do we have such configuration already?
existing_config = self.rate_limiting.get_config(object_type, object_name)
# .. if yes, we will be updating it
if existing_config:
rate_limit_config['parent_type'] = existing_config.parent_type
rate_limit_config['parent_name'] = existing_config.parent_name
self.rate_limiting.edit(object_type, object_name, rate_limit_config, rate_limit_def, is_exact)
# .. otherwise, we will be creating a new one
else:
self.rate_limiting.create(rate_limit_config, rate_limit_def, is_exact)
# We are not to have any rate limits, but it is possible that previously we were required to,
# in which case this needs to be cleaned up.
else:
existing_config = self.rate_limiting.get_config(object_type, object_name)
if existing_config:
object_info = existing_config.object_info
self.rate_limiting.delete(object_info.type_, object_info.name)
return is_rate_limit_active
# ################################################################################################################################
def set_up_object_audit_log(
self:'ParallelServer', # type: ignore
object_type, # type: str
object_id, # type: str
config, # type: WSXConnectorConfig
is_edit # type: bool
) -> 'None':
# Prepare a new configuration object for that log ..
log_config = LogContainerConfig()
log_config.type_ = object_type
log_config.object_id = object_id
if isinstance(config, dict):
config_max_len_messages_sent = config['max_len_messages_sent'] or 0
config_max_len_messages_received = config['max_len_messages_received'] or 0
else:
config_max_len_messages_sent = config.max_len_messages_sent or 0
config_max_len_messages_received = config.max_len_messages_received or 0
log_config.max_len_messages_sent = config_max_len_messages_sent
log_config.max_len_messages_received = config_max_len_messages_received
# .. convert both from kilobytes to bytes (we use kB = 1,000 bytes rather than KB = 1,024 bytes) ..
log_config.max_bytes_per_message_sent = int(config_max_len_messages_sent) * 1000
log_config.max_bytes_per_message_received = int(config_max_len_messages_received) * 1000
# .. and now we can create our audit log container
func = self.audit_log.edit_container if is_edit else self.audit_log.create_container
func(log_config)
# ################################################################################################################################
def set_up_object_audit_log_by_config(
self:'ParallelServer', # type: ignore
object_type, # type: str
object_id, # type: str
config, # type: WSXConnectorConfig
is_edit # type: bool
) -> 'None':
if getattr(config, 'is_audit_log_sent_active', False) or getattr(config, 'is_audit_log_received_active', False):
# These may be string objects
config.max_len_messages_sent = int(config.max_len_messages_sent or ModuleCtx.Audit_Max_Len_Messages)
config.max_len_messages_received = int(config.max_len_messages_received or ModuleCtx.Audit_Max_Len_Messages)
self.set_up_object_audit_log(object_type, object_id, config, is_edit)
# ################################################################################################################################
def _after_init_accepted(
self: 'ParallelServer', # type: ignore
locally_deployed # type: anyset
) -> 'None':
# Deploy missing services found on other servers
if locally_deployed:
self.deploy_missing_services(locally_deployed)
# Signal to ODB that we are done with deploying everything
self.odb.on_deployment_finished()
# Populate default pub/sub endpoint data
default_internal_pubsub_endpoint = self.odb.get_default_internal_pubsub_endpoint()
self.default_internal_pubsub_endpoint_id = default_internal_pubsub_endpoint.id
# Default content type
self.json_content_type = self.fs_server_config.content_type.json
# ################################################################################################################################
def get_config_odb_data(self, parallel_server:'ParallelServer') -> 'Bunch':
""" Returns configuration with regards to ODB data.
"""
odb_data = Bunch()
odb_data.db_name = parallel_server.odb_data['db_name']
odb_data.extra = parallel_server.odb_data['extra']
odb_data.engine = parallel_server.odb_data['engine']
odb_data.token = parallel_server.fs_server_config.main.token
odb_data.is_odb = True
if odb_data.engine != 'sqlite':
odb_data.password = parallel_server.odb_data['password']
odb_data.host = parallel_server.odb_data['host']
odb_data.port = parallel_server.odb_data['port']
odb_data.engine = parallel_server.odb_data['engine']
odb_data.pool_size = parallel_server.odb_data['pool_size']
odb_data.username = parallel_server.odb_data['username']
# Note that we don't read is_active off of anywhere - ODB always must
# be active and it's not a regular connection pool anyway.
odb_data.is_active = True
return odb_data
# ################################################################################################################################
def _encrypt_secrets(
self: 'ParallelServer' # type: ignore
) -> 'None':
""" All passwords are always encrypted so we need to look up any that are not,
for instance, because it is a cluster newly migrated from 2.0 to 3.0, and encrypt them now in ODB.
"""
sec_config_dict_types = (
'apikey', 'aws', 'basic_auth', 'jwt', 'ntlm', 'oauth', 'tls_key_cert', 'vault_conn_sec'
)
# Global lock to make sure only one server attempts to do it at a time
with self.zato_lock_manager('zato_encrypt_secrets'):
# An SQL session shared by all updates
with closing(self.odb.session()) as session:
# Iterate over all security definitions
for sec_config_dict_type in sec_config_dict_types:
config_dicts = getattr(self.config, sec_config_dict_type)
for config in config_dicts.values():
config = config['config']
# Continue to encryption only if needed and not already encrypted
if config.get('_encryption_needed'):
if not config['_encrypted_in_odb']:
odb_func = getattr(self.odb, '_migrate_30_encrypt_sec_{}'.format(sec_config_dict_type))
# Encrypt all params that are applicable
for secret_param in SECRETS.PARAMS:
if secret_param in config:
data = config[secret_param]
if data:
encrypted = self.encrypt(data)
odb_func(session, config['id'], secret_param, encrypted)
# Clean up config afterwards
config.pop('_encryption_needed', None)
config.pop('_encrypted_in_odb', None)
# Commit to SQL now that all updates are made
session.commit()
# ################################################################################################################################
def _after_init_non_accepted(self, server:'ParallelServer') -> 'None':
raise NotImplementedError("This Zato version doesn't support join states other than ACCEPTED")
# ################################################################################################################################
# ################################################################################################################################
| 28,715
|
Python
|
.py
| 494
| 48.267206
| 130
| 0.54856
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,220
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/base/parallel/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
import os
from copy import deepcopy
from datetime import datetime, timedelta
from logging import DEBUG, INFO, WARN
from pathlib import Path
from platform import system as platform_system
from random import seed as random_seed
from tempfile import mkstemp
from traceback import format_exc
from uuid import uuid4
# gevent
from gevent import sleep
from gevent.lock import RLock
# Needed for Cassandra
import gevent.monkey # type: ignore
gevent.monkey # type: ignore
# Paste
from paste.util.converters import asbool
# Zato
from zato.broker import BrokerMessageReceiver
from zato.broker.client import BrokerClient
from zato.bunch import Bunch
from zato.common.api import API_Key, DATA_FORMAT, default_internal_modules, EnvFile, EnvVariable, GENERIC, HotDeploy, IPC, \
KVDB as CommonKVDB, RATE_LIMIT, SERVER_STARTUP, SEC_DEF_TYPE, SERVER_UP_STATUS, ZatoKVDB as CommonZatoKVDB, \
ZATO_ODB_POOL_NAME
from zato.common.audit import audit_pii
from zato.common.audit_log import AuditLog
from zato.common.bearer_token import BearerTokenManager
from zato.common.broker_message import HOT_DEPLOY, MESSAGE_TYPE
from zato.common.const import SECRETS
from zato.common.events.common import Default as EventsDefault
from zato.common.facade import SecurityFacade
from zato.common.ipc.api import IPCAPI
from zato.common.json_internal import dumps, loads
from zato.common.kv_data import KVDataAPI
from zato.common.kvdb.api import KVDB
from zato.common.marshal_.api import MarshalAPI
from zato.common.oauth import OAuthStore, OAuthTokenClient
from zato.common.odb.api import PoolStore
from zato.common.odb.post_process import ODBPostProcess
from zato.common.pubsub import SkipDelivery
from zato.common.rate_limiting import RateLimiting
from zato.common.typing_ import cast_, intnone, optional
from zato.common.util.api import absolutize, get_config_from_file, get_kvdb_config_for_log, get_user_config_name, \
fs_safe_name, hot_deploy, invoke_startup_services as _invoke_startup_services, make_list_from_string_list, new_cid, \
register_diag_handlers, save_ipc_pid_port, spawn_greenlet, StaticConfig
from zato.common.util.env import populate_environment_from_file
from zato.common.util.file_transfer import path_string_list_to_list
from zato.common.util.hot_deploy_ import extract_pickup_from_items
from zato.common.util.json_ import BasicParser
from zato.common.util.platform_ import is_posix
from zato.common.util.posix_ipc_ import ConnectorConfigIPC, ServerStartupIPC
from zato.common.util.time_ import TimeUtil
from zato.common.util.tcp import wait_until_port_taken
from zato.distlock import LockManager
from zato.server.base.parallel.config import ConfigLoader
from zato.server.base.parallel.http import HTTPHandler
from zato.server.base.parallel.subprocess_.api import CurrentState as SubprocessCurrentState, \
StartConfig as SubprocessStartConfig
from zato.server.base.parallel.subprocess_.ftp import FTPIPC
from zato.server.base.parallel.subprocess_.ibm_mq import IBMMQIPC
from zato.server.base.parallel.subprocess_.zato_events import ZatoEventsIPC
from zato.server.base.parallel.subprocess_.outconn_sftp import SFTPIPC
from zato.server.base.worker import WorkerStore
from zato.server.config import ConfigStore
from zato.server.connection.kvdb.api import KVDB as ZatoKVDB
from zato.server.connection.pool_wrapper import ConnectionPoolWrapper
from zato.server.connection.stats import ServiceStatsClient
from zato.server.connection.server.rpc.api import ConfigCtx as _ServerRPC_ConfigCtx, ServerRPC
from zato.server.connection.server.rpc.config import ODBConfigSource
from zato.server.groups.base import GroupsManager
from zato.server.groups.ctx import SecurityGroupsCtxBuilder
from zato.server.sso import SSOTool
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch as bunch_
from zato.common.crypto.api import ServerCryptoManager
from zato.common.ipc.client import IPCResponse
from zato.common.odb.api import ODBManager
from zato.common.odb.model import Cluster as ClusterModel
from zato.common.typing_ import any_, anydict, anylist, anyset, callable_, dictlist, intset, listorstr, strdict, strbytes, \
strlist, strorlistnone, strnone, strorlist, strset
from zato.server.connection.cache import Cache, CacheAPI
from zato.server.connection.connector.subprocess_.ipc import SubprocessIPC
from zato.server.ext.zunicorn.arbiter import Arbiter
from zato.server.ext.zunicorn.workers.ggevent import GeventWorker
from zato.server.service.store import ServiceStore
from zato.simpleio import SIOServerConfig
from zato.server.generic.api.outconn.wsx.common import WSXCtx
from zato.server.startup_callable import StartupCallableTool
from zato.sso.api import SSOAPI
bunch_ = bunch_
ODBManager = ODBManager
ServerCryptoManager = ServerCryptoManager
ServiceStore = ServiceStore
SIOServerConfig = SIOServerConfig # type: ignore
SSOAPI = SSOAPI # type: ignore
StartupCallableTool = StartupCallableTool
SubprocessIPC = SubprocessIPC
# ################################################################################################################################
# ################################################################################################################################
logger = logging.getLogger(__name__)
kvdb_logger = logging.getLogger('zato_kvdb')
# ################################################################################################################################
# ################################################################################################################################
megabyte = 10 ** 6
# ################################################################################################################################
# ################################################################################################################################
_ipc_timeout = IPC.Default.Timeout
# ################################################################################################################################
# ################################################################################################################################
class ParallelServer(BrokerMessageReceiver, ConfigLoader, HTTPHandler):
""" Main server process.
"""
odb: 'ODBManager'
kvdb: 'KVDB'
config: 'ConfigStore'
crypto_manager: 'ServerCryptoManager'
sql_pool_store: 'PoolStore'
kv_data_api: 'KVDataAPI'
on_wsgi_request: 'any_'
cluster: 'ClusterModel'
worker_store: 'WorkerStore'
service_store: 'ServiceStore'
rpc: 'ServerRPC'
sso_api: 'SSOAPI'
rate_limiting: 'RateLimiting'
broker_client: 'BrokerClient'
zato_lock_manager: 'LockManager'
startup_callable_tool: 'StartupCallableTool'
oauth_store: 'OAuthStore'
bearer_token_manager: 'BearerTokenManager'
security_facade: 'SecurityFacade'
stop_after: 'intnone'
deploy_auto_from: 'str' = ''
groups_manager: 'GroupsManager'
security_groups_ctx_builder: 'SecurityGroupsCtxBuilder'
def __init__(self) -> 'None':
self.logger = logger
self.host = ''
self.port = -1
self.use_tls = False
self.is_starting_first = '<not-set>'
self.odb_data = Bunch()
self.repo_location = ''
self.user_conf_location:'strlist' = []
self.user_conf_location_extra:'strset' = set()
self.soap11_content_type = ''
self.soap12_content_type = ''
self.plain_xml_content_type = ''
self.json_content_type = ''
self.service_modules = []
self.service_sources = [] # Set in a config file
self.base_dir = ''
self.logs_dir = ''
self.tls_dir = ''
self.static_dir = ''
self.json_schema_dir = 'server-'
self.sftp_channel_dir = 'server-'
self.hot_deploy_config = Bunch()
self.fs_server_config = None # type: any_
self.fs_sql_config = Bunch()
self.pickup_config = Bunch()
self.logging_config = Bunch()
self.logging_conf_path = 'server-'
self.sio_config = cast_('SIOServerConfig', None)
self.sso_config = Bunch()
self.connector_server_grace_time = None
self.id = -1
self.name = ''
self.worker_id = ''
self.worker_pid = -1
self.cluster_id = -1
self.cluster_name = ''
self.startup_jobs = {}
self.deployment_lock_expires = -1
self.deployment_lock_timeout = -1
self.deployment_key = ''
self.has_gevent = True
self.request_dispatcher_dispatch = cast_('callable_', None)
self.delivery_store = None
self.static_config = Bunch()
self.component_enabled = Bunch()
self.client_address_headers = ['HTTP_X_ZATO_FORWARDED_FOR', 'HTTP_X_FORWARDED_FOR', 'REMOTE_ADDR']
self.return_tracebacks = False
self.default_error_message = ''
self.time_util = TimeUtil()
self.preferred_address = ''
self.crypto_use_tls = False
self.pid = -1
self.sync_internal = False
self.ipc_api = IPCAPI(self)
self.fifo_response_buffer_size = -1
self.is_first_worker = False
self.process_idx = -1
self.shmem_size = -1.0
self.server_startup_ipc = ServerStartupIPC()
self.connector_config_ipc = ConnectorConfigIPC()
self.is_sso_enabled = False
self.audit_pii = audit_pii
self.has_fg = False
self.env_file = ''
self.env_variables_from_files:'strlist' = []
self.default_internal_pubsub_endpoint_id = 0
self.jwt_secret = b''
self._hash_secret_method = ''
self._hash_secret_rounds = -1
self._hash_secret_salt_size = -1
self.sso_tool = SSOTool(self)
self.platform_system = platform_system().lower()
self.has_posix_ipc = is_posix
self.user_config = Bunch()
self.stderr_path = ''
self.work_dir = 'ParallelServer-work_dir'
self.events_dir = 'ParallelServer-events_dir'
self.kvdb_dir = 'ParallelServer-kvdb_dir'
self.marshal_api = MarshalAPI()
self.env_manager = None # This is taken from util/zato_environment.py:EnvironmentManager
self.enforce_service_invokes = False
self.json_parser = BasicParser()
self.api_key_header = 'Zato-Default-Not-Set-API-Key-Header'
self.api_key_header_wsgi = 'HTTP_' + self.api_key_header.upper().replace('-', '_')
self.needs_x_zato_cid = False
# A server-wide publication counter, indicating which one the current publication is,
# increased after each successful publication.
self.pub_counter = 1
# A lock to guard the publication counter.
self.pub_counter_lock = RLock()
# Transient API for in-RAM messages
self.zato_kvdb = ZatoKVDB()
# In-RAM statistics
self.slow_responses = self.zato_kvdb.internal_create_list_repo(CommonZatoKVDB.SlowResponsesName)
self.usage_samples = self.zato_kvdb.internal_create_list_repo(CommonZatoKVDB.UsageSamplesName)
self.current_usage = self.zato_kvdb.internal_create_number_repo(CommonZatoKVDB.CurrentUsageName)
self.pub_sub_metadata = self.zato_kvdb.internal_create_object_repo(CommonZatoKVDB.PubSubMetadataName)
self.stats_client = ServiceStatsClient()
self._stats_host = '<ParallelServer-_stats_host>'
self._stats_port = -1
# Audit log
self.audit_log = AuditLog()
# Current state of subprocess-based connectors
self.subproc_current_state = SubprocessCurrentState()
# Our arbiter may potentially call the cleanup procedure multiple times
# and this will be set to True the first time around.
self._is_process_closing = False
# Internal caches - not to be used by user services
self.internal_cache_patterns = {}
self.internal_cache_lock_patterns = RLock()
# Allows users store arbitrary data across service invocations
self.user_ctx = Bunch()
self.user_ctx_lock = RLock()
# Connectors
self.connector_ftp = FTPIPC(self)
self.connector_ibm_mq = IBMMQIPC(self)
self.connector_sftp = SFTPIPC(self)
self.connector_events = ZatoEventsIPC(self)
# HTTP methods allowed as a Python list
self.http_methods_allowed = []
# As above, but as a regular expression pattern
self.http_methods_allowed_re = ''
self.access_logger = logging.getLogger('zato_access_log')
self.access_logger_log = self.access_logger._log
self.needs_access_log = self.access_logger.isEnabledFor(INFO)
self.needs_all_access_log = True
self.access_log_ignore = set()
self.rest_log_ignore = set()
self.has_pubsub_audit_log = logging.getLogger('zato_pubsub_audit').isEnabledFor(DEBUG)
self.is_enabled_for_warn = logging.getLogger('zato').isEnabledFor(WARN)
self.is_admin_enabled_for_info = logging.getLogger('zato_admin').isEnabledFor(INFO)
# A wrapper for outgoing WSX connections
self.wsx_connection_pool_wrapper = ConnectionPoolWrapper(self, GENERIC.CONNECTION.TYPE.OUTCONN_WSX)
# The main config store
self.config = ConfigStore()
# ################################################################################################################################
def set_ipc_password(self, password:'str') -> 'None':
password = self.decrypt(password)
self.ipc_api.password = password
# ################################################################################################################################
def get_default_internal_pubsub_endpoint_id(self) -> 'int':
# This value defaults to 0 and we populate it with the real value in self._after_init_accepted.
return self.default_internal_pubsub_endpoint_id
# ################################################################################################################################
def deploy_missing_services(self, locally_deployed:'anylist') -> 'None':
""" Deploys services that exist on other servers but not on ours.
"""
# The locally_deployed list are all the services that we could import based on our current
# understanding of the contents of the cluster. However, it's possible that we have
# been shut down for a long time and during that time other servers deployed services
# we don't know anything about. They are not stored locally because we were down.
# Hence we need to check out if there are any other servers in the cluster and if so,
# grab their list of services, compare it with what we have deployed and deploy
# any that are missing.
# Continue only if there is more than one running server in the cluster.
other_servers = self.odb.get_servers()
if other_servers:
other_server = other_servers[0] # Index 0 is as random as any other because the list is not sorted.
missing = self.odb.get_missing_services(other_server, {item.name for item in locally_deployed})
if missing:
logger.info('Found extra services to deploy: %s', ', '.join(sorted(item.name for item in missing)))
# (file_name, source_path) -> a list of services it contains
modules:'strdict' = {}
# Coalesce all service modules - it is possible that each one has multiple services
# so we do want to deploy the same module over for each service found.
for _ignored_service_id, name, source_path, source in missing:
file_name = os.path.basename(source_path)
_, tmp_full_path = mkstemp(suffix='-'+ file_name)
# Module names are unique so they can serve as keys
key = file_name
if key not in modules:
modules[key] = {
'tmp_full_path': tmp_full_path,
'services': [name] # We can append initial name already in this 'if' branch
}
# Save the source code only once here
f = open(tmp_full_path, 'wb')
_ = f.write(source)
f.close()
else:
modules[key]['services'].append(name)
# Create a deployment package in ODB out of which all the services will be picked up ..
for file_name, values in modules.items():
msg:'bunch_' = Bunch()
msg.action = HOT_DEPLOY.CREATE_SERVICE.value
msg.msg_type = MESSAGE_TYPE.TO_PARALLEL_ALL
msg.package_id = hot_deploy(self, file_name, values['tmp_full_path'], notify=False)
# .. and tell the worker to actually deploy all the services the package contains.
# gevent.spawn(self.worker_store.on_broker_msg_HOT_DEPLOY_CREATE_SERVICE, msg)
self.worker_store.on_broker_msg_HOT_DEPLOY_CREATE_SERVICE(msg)
logger.info('Deployed extra services found: %s', sorted(values['services']))
# ################################################################################################################################
def maybe_on_first_worker(self, server:'ParallelServer') -> 'anyset':
""" This method will execute code with a distibuted lock held. We need a lock because we can have multiple worker
processes fighting over the right to redeploy services. The first worker to obtain the lock will actually perform
the redeployment and set a flag meaning that for this particular deployment key (and remember that each server restart
means a new deployment key) the services have been already deployed. Further workers will check that the flag exists
and will skip the deployment altogether.
"""
def import_initial_services_jobs() -> 'anyset':
# All non-internal services that we have deployed
locally_deployed = []
# Internal modules with that are potentially to be deployed
internal_service_modules = []
# This was added between 3.0 and 3.1, which is why it is optional
deploy_internal = self.fs_server_config.get('deploy_internal', default_internal_modules)
# Above, we potentially got the list of internal modules to be deployed as they were defined in server.conf.
# However, if someone creates an environment and then we add a new module, this module will not neccessarily
# exist in server.conf. This is why we need to add any such missing ones explicitly below.
for internal_module, is_enabled in default_internal_modules.items():
if internal_module not in deploy_internal:
deploy_internal[internal_module] = is_enabled
# All internal modules were found, now we can build a list of what is to be enabled.
for module_name, is_enabled in deploy_internal.items():
if is_enabled:
internal_service_modules.append(module_name)
locally_deployed.extend(self.service_store.import_internal_services(
internal_service_modules, self.base_dir, self.sync_internal, cast_('bool', self.is_starting_first)))
logger.info('Deploying user-defined services (%s)', self.name)
user_defined_deployed:'anylist' = self.service_store.import_services_from_anywhere(
self.service_modules + self.service_sources, self.base_dir).to_process
locally_deployed.extend(user_defined_deployed)
len_user_defined_deployed = len(user_defined_deployed)
suffix = '' if len_user_defined_deployed == 1 else 's'
# This will be always logged ..
logger.info('Deployed %d user-defined service%s (%s)', len_user_defined_deployed, suffix, self.name)
# .. whereas details are optional.
if asbool(os.environ.get('Zato_Log_User_Services_Deployed', False)):
for item in sorted(elem.name for elem in user_defined_deployed):
logger.info('Deployed user service: %s', item)
return set(locally_deployed)
lock_name = '{}{}:{}'.format(
CommonKVDB.LOCK_SERVER_STARTING, self.fs_server_config.main.token, self.deployment_key)
already_deployed_flag = '{}{}:{}'.format(
CommonKVDB.LOCK_SERVER_ALREADY_DEPLOYED, self.fs_server_config.main.token, self.deployment_key)
logger.debug('Using lock_name: `%s`', lock_name)
with self.zato_lock_manager(lock_name, ttl=self.deployment_lock_expires, block=self.deployment_lock_timeout):
if self.kv_data_api.get(already_deployed_flag):
# There has been already the first worker who's done everything there is to be done so we may just return.
self.is_starting_first = False
logger.debug('Not attempting to obtain the lock_name:`%s`', lock_name)
# Simply deploy services, including any missing ones, the first worker has already cleared out the ODB
locally_deployed = import_initial_services_jobs()
return locally_deployed
else:
# We are this server's first worker so we need to re-populate
# the database and create the flag indicating we're done.
self.is_starting_first = True
logger.debug('Got lock_name:`%s`, ttl:`%s`', lock_name, self.deployment_lock_expires)
# .. Remove all the deployed services from the DB ..
self.odb.drop_deployed_services(server.id)
# .. deploy them back including any missing ones found on other servers.
locally_deployed = import_initial_services_jobs()
# Add the flag to Redis indicating that this server has already
# deployed its services. Note that by default the expiration
# time is more than a century in the future. It will be cleared out
# next time the server will be started.
self.kv_data_api.set(
already_deployed_flag,
dumps({'create_time_utc':datetime.utcnow().isoformat()}),
self.deployment_lock_expires,
)
return locally_deployed
# ################################################################################################################################
def get_full_name(self) -> 'str':
""" Returns this server's full name in the form of server@cluster.
"""
return '{}@{}'.format(self.name, self.cluster_name)
# ################################################################################################################################
def add_pickup_conf_from_env(self) -> 'None':
# These exact names may exist in the environment ..
name_list = ['Zato_Project_Root', 'Zato_Hot_Deploy_Dir', 'ZATO_HOT_DEPLOY_DIR']
# .. same for these prefixes ..
prefix_list = ['Zato_Project_Root_', 'Zato_Hot_Deploy_Dir_']
# .. go through the specific names and add any matching ..
for name in name_list:
if path := os.environ.get(name, ''):
self.add_pickup_conf_from_local_path(path, name)
# .. go through the list of prefixes and add any matching too.
for prefix in prefix_list:
for name, path in os.environ.items():
if name.startswith(prefix):
self.add_pickup_conf_from_local_path(path, name)
# ################################################################################################################################
def add_pickup_conf_from_auto_deploy(self) -> 'None':
# Look up Python hot-deployment directories ..
path = os.path.join(self.deploy_auto_from, 'code')
# .. and make it possible to deploy from them.
self.add_pickup_conf_from_local_path(path, 'AutoDeploy')
# ################################################################################################################################
def add_pickup_conf_from_local_path(self, paths:'str', source:'str', path_patterns:'strorlistnone'=None) -> 'None':
# Bunchz
from bunch import bunchify
# Local variables
path_patterns = path_patterns or HotDeploy.Default_Patterns
path_patterns = path_patterns if isinstance(path_patterns, list) else [path_patterns] # type: ignore
# We have hot-deployment configuration to process ..
if paths:
# .. log what we are about to do ..
msg = f'Processing hot-deployment configuration paths `{paths!r}` (source -> {source})'
logger.info(msg)
# .. support multiple entries ..
paths = make_list_from_string_list(paths, ':') # type: ignore
# .. add the actual configuration ..
for path in paths:
# .. make sure the path is actually given on input, e.g. it is not None or '' ..
if not path:
msg = f'Ignoring empty hot-deployment configuration path `{path}` (source -> {source})'
logger.info(msg)
continue
# .. log what we are about to do ..
msg = f'Adding hot-deployment configuration from `{path}` (source -> {source})'
logger.info(msg)
# .. stay on the safe side because, here, we do not know where it will be used ..
_fs_safe_name = fs_safe_name(path)
# .. use this prefix to indicate that it is a directory to hot-deploy from ..
key_name = '{}.{}'.format(HotDeploy.UserPrefix, _fs_safe_name)
# .. store the configuration for later use now ..
pickup_from = {
'pickup_from': path
}
self.pickup_config[key_name] = bunchify(pickup_from)
# .. go through all the path patterns that point to user configuration (including enmasse) ..
for path_pattern in path_patterns:
# .. get a list of matching paths ..
user_conf_paths = Path(path).rglob(path_pattern)
user_conf_paths = list(user_conf_paths)
# .. go through all the paths that matched ..
for user_conf_path in user_conf_paths:
# .. and add each of them to hot-deployment.
self._add_user_conf_from_path(str(user_conf_path), source)
# ################################################################################################################################
def add_user_conf_from_env(self) -> 'None':
# Local variables
env_keys = ['Zato_User_Conf_Dir', 'ZATO_USER_CONF_DIR']
# Look up user-defined configuration directories ..
paths = os.environ.get('ZATO_USER_CONF_DIR', '')
# .. try the other name too ..
if not paths:
paths = os.environ.get('Zato_User_Conf_Dir', '')
# Go through all the possible environment keys ..
for key in env_keys:
# .. if we have user-config details to process ..
if paths := os.environ.get(key, ''):
# .. support multiple entries ..
paths = paths.split(':')
paths = [elem.strip() for elem in paths]
# .. and the actual configuration.
for path in paths:
source = f'env. variable found -> {key}'
self._add_user_conf_from_path(path, source)
# ################################################################################################################################
def _add_user_conf_from_path(self, path:'str', source:'str') -> 'None':
# Bunch
from bunch import bunchify
# Ignore files other than the below ones
suffixes = ['ini', 'conf', 'yaml', 'yml']
patterns = ['*.' + elem for elem in suffixes]
patterns_str = ', '.join(patterns)
# Log what we are about to do ..
msg = f'Adding user-config from `{path}` ({source})'
logger.info(msg)
# .. look up files inside the directory and add the path to each
# .. to a list of what should be loaded on startup ..
if os.path.exists(path) and os.path.isdir(path):
file_item_list = os.listdir(path)
for file_item in file_item_list:
for suffix in suffixes:
if file_item.endswith(suffix):
self.user_conf_location_extra.add(path)
# .. stay on the safe side because, here, we do not know where it will be used ..
_fs_safe_name = fs_safe_name(path)
# .. use this prefix to indicate that it is a directory to deploy user configuration from ..
key_name = '{}.{}'.format(HotDeploy.UserConfPrefix, _fs_safe_name)
# .. use a specific service if it is an enmasse file ..
if 'enmasse' in path:
service = 'zato.pickup.update-enmasse'
# .. or default to the one for user config if it is not ..
else:
service= 'zato.pickup.update-user-conf'
# .. and store the configuration for later use now.
pickup_from = {
'pickup_from': path,
'patterns': patterns_str,
'parse_on_pickup': False,
'delete_after_pickup': False,
'services': service,
}
self.pickup_config[key_name] = bunchify(pickup_from)
# ################################################################################################################################
def add_pickup_conf_from_env_variables(self) -> 'None':
# Code hot-deployment
self.add_pickup_conf_from_env()
# User configuration
self.add_user_conf_from_env()
# ################################################################################################################################
def add_pickup_conf_for_env_file(self) -> 'None':
# If we have a file with environment variables, we want to listed to the changes to its contents ..
if self.env_file:
# .. but we need to have an absolute path ..
if not os.path.isabs(self.env_file):
logger.info(f'Env. file is not an absolute path, hot-deployment will not be enabled -> `{self.env_file}')
return
else:
# .. extract the directory the file is in ..
parent_dir = os.path.dirname(self.env_file)
parent_dir = Path(parent_dir)
parent_dir_name = parent_dir.name
# .. and extract its own parent as well because this is needed in the call below ..
grand_parent_dir = os.path.dirname(parent_dir)
# .. and add it to hot-deployment.
self.add_pickup_conf_from_local_path(grand_parent_dir, 'EnvFile', parent_dir_name)
# ################################################################################################################################
def update_environment_variables_from_file(self, file_path:'str') -> 'None':
# Prepare information about the variables that are to be deleted ..
to_delete = deepcopy(self.env_variables_from_files)
# .. load new variables, deleting the old ones along the way ..
new_variables = populate_environment_from_file(file_path, to_delete=to_delete, use_print=False)
# .. and populate the list for later use.
self.env_variables_from_files = new_variables
# ################################################################################################################################
def add_wsx_gateway_service_allowed(self) -> 'None':
wsx_gateway_service_allowed = os.environ.get('Zato_WSX_Gateway_Service_Allowed', '')
if wsx_gateway_service_allowed:
config_wsx_gateway_service_allowed = self.fs_server_config.pubsub.wsx_gateway_service_allowed
config_wsx_gateway_service_allowed = config_wsx_gateway_service_allowed or []
self.fs_server_config.pubsub.wsx_gateway_service_allowed = config_wsx_gateway_service_allowed
wsx_gateway_service_allowed = wsx_gateway_service_allowed.split(',')
wsx_gateway_service_allowed = [elem.strip() for elem in wsx_gateway_service_allowed if elem]
_ = self.fs_server_config.pubsub.wsx_gateway_service_allowed.extend(wsx_gateway_service_allowed)
# ################################################################################################################################
def _after_init_common(self, server:'ParallelServer') -> 'anyset':
""" Initializes parts of the server that don't depend on whether the server's been allowed to join the cluster or not.
"""
def _normalize_service_source_path(name:'str') -> 'str':
if not os.path.isabs(name):
name = os.path.normpath(os.path.join(self.base_dir, name))
return name
# Patterns to match during deployment
self.service_store.patterns_matcher.read_config(self.fs_server_config.deploy_patterns_allowed)
# Static config files
self.static_config = StaticConfig(self.static_dir)
# SSO e-mail templates
self.static_config.read_directory(os.path.join(self.static_dir, 'sso', 'email'))
# Key-value DB
kvdb_config:'bunch_' = get_kvdb_config_for_log(self.fs_server_config.kvdb)
kvdb_logger.info('Worker config `%s`', kvdb_config)
self.kvdb.config = self.fs_server_config.kvdb
self.kvdb.server = self
self.kvdb.decrypt_func = self.crypto_manager.decrypt
kvdb_logger.info('Worker config `%s`', kvdb_config)
if self.fs_server_config.kvdb.host:
self.kvdb.init()
# Whether to add X-Zato-CID to outgoing responses
needs_x_zato_cid = self.fs_server_config.misc.get('needs_x_zato_cid') or False
self.needs_x_zato_cid = needs_x_zato_cid
# New in 3.1, it may be missing in the config file
if not self.fs_server_config.misc.get('sftp_genkey_command'):
self.fs_server_config.misc.sftp_genkey_command = 'dropbearkey'
# New in 3.2, may be missing in the config file
allow_internal:'listorstr' = self.fs_server_config.misc.get('service_invoker_allow_internal', [])
allow_internal = allow_internal if isinstance(allow_internal, list) else [allow_internal]
self.fs_server_config.misc.service_invoker_allow_internal = allow_internal
# Service sources from server.conf
for name in open(os.path.join(self.repo_location, self.fs_server_config.main.service_sources)):
name = name.strip()
if name and not name.startswith('#'):
name = _normalize_service_source_path(name)
self.service_sources.append(name)
# Look up pickup configuration among environment variables
# and add anything found to self.pickup_config.
self.add_pickup_conf_from_env_variables()
# Look up pickup configuration based on what should be auto-deployed on startup.
if self.deploy_auto_from:
self.add_pickup_conf_from_auto_deploy()
# If we have a file with environment variables on input,
# pick up changes to this file too.
if self.env_file:
self.add_pickup_conf_for_env_file()
# Append additional services that can be invoked through WebSocket gateways.
self.add_wsx_gateway_service_allowed()
# Service sources from user-defined hot-deployment configuration ..
for pickup_from in extract_pickup_from_items(self.base_dir, self.pickup_config, HotDeploy.Source_Directory):
# .. log what we are about to do ..
if isinstance(pickup_from, list):
for project in pickup_from:
for item in project.pickup_from_path:
logger.info('Adding hot-deployment directory `%s` (HotDeploy.UserPrefix->Project)', item)
else:
logger.info('Adding hot-deployment directory `%s` (HotDeploy.UserPrefix->Path)', pickup_from)
# .. and do append it for later use ..
self.service_sources.append(pickup_from)
# Read all the user config files that are already available on startup
self.read_user_config()
# Convert size of FIFO response buffers to megabytes
self.fifo_response_buffer_size = int(float(self.fs_server_config.misc.fifo_response_buffer_size) * megabyte)
locally_deployed = self.maybe_on_first_worker(server)
return locally_deployed
# ################################################################################################################################
def _read_user_config_from_directory(self, dir_name:'str') -> 'None':
# We assume that it will be always one of these file name suffixes,
# note that we are not reading enmasse (.yaml and .yml) files here,
# even though directories with enmasse files may be among what we have in self.user_conf_location_extra.
suffixes_supported = ('.ini', '.conf')
# User-config from ./config/repo/user-config
for file_name in os.listdir(dir_name):
# Reject files that actually contain environment variables
if file_name == EnvFile.Default:
continue
# Reject files with suffixes that we do not recognize
if not file_name.lower().endswith(suffixes_supported):
continue
user_conf_full_path = os.path.join(dir_name, file_name)
user_config_name = get_user_config_name(file_name)
conf = get_config_from_file(user_conf_full_path, file_name)
# Not used at all in this type of configuration
_:'any_' = conf.pop('user_config_items', None)
self.user_config[user_config_name] = conf
logger.info('Read user config `%s` from `%s` (dir:%s)', user_config_name, file_name, dir_name)
# ################################################################################################################################
def read_user_config(self):
# Type hints
dir_name:'str'
# Reads config files from the default directory
for dir_name in self.user_conf_location:
self._read_user_config_from_directory(dir_name)
# Reads config files from extra directories pointed to by ZATO_USER_CONF_DIR
for dir_name in self.user_conf_location_extra:
self._read_user_config_from_directory(dir_name)
# ################################################################################################################################
def set_up_user_config_location(self) -> 'strlist':
user_conf_location:'str' = self.pickup_config.get('user_conf', {}).get('pickup_from', '')
return path_string_list_to_list(self.base_dir, user_conf_location)
# ################################################################################################################################
def set_up_odb(self) -> 'None':
# This is the call that creates an SQLAlchemy connection
self.config.odb_data['fs_sql_config'] = self.fs_sql_config
self.sql_pool_store[ZATO_ODB_POOL_NAME] = self.config.odb_data
self.odb.pool = self.sql_pool_store[ZATO_ODB_POOL_NAME].pool
self.odb.token = self.config.odb_data.token.decode('utf8')
self.odb.decrypt_func = self.decrypt
# ################################################################################################################################
def build_server_rpc(self) -> 'ServerRPC':
# What our configuration backend is
config_source = ODBConfigSource(self.odb, self.cluster_name, self.name, self.decrypt)
# A combination of backend and runtime configuration
config_ctx = _ServerRPC_ConfigCtx(config_source, self)
# A publicly available RPC client
return ServerRPC(config_ctx)
# ################################################################################################################################
def _run_stats_client(self, events_tcp_port:'int') -> 'None':
self.stats_client.init('127.0.0.1', events_tcp_port)
self.stats_client.run()
# ################################################################################################################################
def handle_enmasse_auto_from(self) -> 'None':
# Zato
from zato.server.commands import CommandsFacade
# Local aliases
commands = CommandsFacade()
commands.init(self)
# Full path to a directory with enmasse files ..
path = os.path.join(self.deploy_auto_from, 'enmasse')
path = Path(path)
# enmasse --import --replace-odb-objects --input ./zato-export.yml /path/to/server/
# .. find all the enmasse files in this directory ..
for file_path in sorted(path.iterdir()):
# .. and run enmasse with each of them.
_ = commands.run_enmasse_async(file_path)
# ################################################################################################################################
def log_environment_details(self):
# First, we need to have the correct variable set ..
if log_details := os.environ.get(EnvVariable.Log_Env_Details) or True:
# .. now, make sure it is set to True ..
if asbool(log_details):
# .. now, we need to have the correct file available ..
path = ['~', 'env', 'details', 'all-zato-env-details.json']
path = os.path.join(*path)
path = os.path.expanduser(path)
if os.path.exists(path):
with open(path) as f:
data = f.read()
self.logger.info(f'Environment details:\n{data}')
# ################################################################################################################################
@staticmethod
def start_server(parallel_server:'ParallelServer', zato_deployment_key:'str'='') -> 'None':
# Easier to type
self = parallel_server
# This cannot be done in __init__ because each sub-process obviously has its own PID
self.pid = os.getpid()
# This also cannot be done in __init__ which doesn't have this variable yet
self.process_idx = int(os.environ['ZATO_SERVER_WORKER_IDX'])
self.is_first_worker = self.process_idx == 0
# Used later on
use_tls = asbool(self.fs_server_config.crypto.use_tls)
# This changed in 3.2 so we need to take both into account
self.work_dir = self.fs_server_config.main.get('work_dir') or self.fs_server_config.hot_deploy.get('work_dir')
self.work_dir = os.path.normpath(os.path.join(self.repo_location, self.work_dir))
# Make sure the directories for events exists
events_dir_v1 = os.path.join(self.work_dir, 'events', 'v1')
for name in 'v1', 'v2':
full_path = os.path.join(self.work_dir, 'events', name)
if not os.path.exists(full_path):
os.makedirs(full_path, mode=0o770, exist_ok=True)
# Set for later use - this is the version that we currently employ and we know that it exists.
self.events_dir = events_dir_v1
# Will be None if we are not running in background.
if not zato_deployment_key:
zato_deployment_key = '{}.{}'.format(datetime.utcnow().isoformat(), uuid4().hex)
# Each time a server starts a new deployment key is generated to uniquely
# identify this particular time the server is running.
self.deployment_key = zato_deployment_key
# This is to handle SIGURG signals.
if is_posix:
register_diag_handlers()
# Configure paths and load data pertaining to Zato KVDB
self.set_up_zato_kvdb()
# Find out if we are on a platform that can handle our posix_ipc
_skip_platform:'listorstr' = self.fs_server_config.misc.get('posix_ipc_skip_platform')
_skip_platform = _skip_platform if isinstance(_skip_platform, list) else [_skip_platform]
_skip_platform = [elem for elem in _skip_platform if elem]
self.fs_server_config.misc.posix_ipc_skip_platform = _skip_platform
# Create all POSIX IPC objects now that we have the deployment key,
# but only if our platform allows it.
if self.has_posix_ipc:
self.shmem_size = int(float(self.fs_server_config.shmem.size) * 10**6) # Convert to megabytes as integer
self.server_startup_ipc.create(self.deployment_key, self.shmem_size)
self.connector_config_ipc.create(self.deployment_key, self.shmem_size)
else:
self.server_startup_ipc = None
self.connector_config_ipc = None
# Store the ODB configuration, create an ODB connection pool and have self.odb use it
self.config.odb_data = self.get_config_odb_data(self)
self.set_up_odb()
# Now try grabbing the basic server's data from the ODB. No point
# in doing anything else if we can't get past this point.
server:'any_' = self.odb.fetch_server(self.config.odb_data)
if not server:
raise Exception('Server does not exist in the ODB')
# Set up the server-wide default lock manager
odb_data:'bunch_' = self.config.odb_data
if is_posix:
backend_type:'str' = 'fcntl' if odb_data.engine == 'sqlite' else odb_data.engine
else:
backend_type = 'zato-pass-through'
self.zato_lock_manager = LockManager(backend_type, 'zato', self.odb.session)
# Just to make sure distributed locking is configured correctly
with self.zato_lock_manager(uuid4().hex):
pass
# Basic metadata
self.id = server.id
self.name = server.name
self.cluster = self.odb.cluster
self.cluster_id = self.cluster.id
self.cluster_name = self.cluster.name
self.worker_id = '{}.{}.{}.{}'.format(self.cluster_id, self.id, self.worker_pid, new_cid())
# SQL post-processing
ODBPostProcess(self.odb.session(), None, self.cluster_id).run()
# Set up SQL-based key/value API
self.kv_data_api = KVDataAPI(cast_('int', self.cluster_id), self.odb)
# Looked up upfront here and assigned to services in their store
self.enforce_service_invokes = asbool(self.fs_server_config.misc.enforce_service_invokes)
# For server-to-server RPC
self.rpc = self.build_server_rpc()
logger.info(
'Preferred address of `%s@%s` (pid: %s) is `http%s://%s:%s`',
self.name, self.cluster_name, self.pid, 's' if use_tls else '', self.preferred_address, self.port)
# Configure which HTTP methods can be invoked via REST or SOAP channels
methods_allowed = self.fs_server_config.http.methods_allowed
methods_allowed:'strorlist' = methods_allowed if isinstance(methods_allowed, list) else [methods_allowed]
self.http_methods_allowed.extend(methods_allowed)
# As above, as a regular expression to be used in pattern matching
http_methods_allowed_re = '|'.join(self.http_methods_allowed)
self.http_methods_allowed_re = '({})'.format(http_methods_allowed_re)
# Reads in all configuration from ODB
self.worker_store = WorkerStore(self.config, self)
self.worker_store.invoke_matcher.read_config(self.fs_server_config.invoke_patterns_allowed)
self.worker_store.target_matcher.read_config(self.fs_server_config.invoke_target_patterns_allowed)
self.set_up_config(server) # type: ignore
# Normalize hot-deploy configuration
self.hot_deploy_config = Bunch()
self.hot_deploy_config.pickup_dir = absolutize(self.fs_server_config.hot_deploy.pickup_dir, self.repo_location)
self.hot_deploy_config.work_dir = self.work_dir
self.hot_deploy_config.backup_history = int(self.fs_server_config.hot_deploy.backup_history)
self.hot_deploy_config.backup_format = self.fs_server_config.hot_deploy.backup_format
# The first name was used prior to v3.2, note pick_up vs. pickup
if 'delete_after_pick_up':
delete_after_pickup = self.fs_server_config.hot_deploy.get('delete_after_pick_up')
else:
delete_after_pickup = self.fs_server_config.hot_deploy.get('delete_after_pickup')
self.hot_deploy_config.delete_after_pickup = delete_after_pickup
# Added in 3.1, hence optional
max_batch_size = int(self.fs_server_config.hot_deploy.get('max_batch_size', 1000))
# Turn it into megabytes
max_batch_size = max_batch_size * 1000
# Finally, assign it to ServiceStore
self.service_store.max_batch_size = max_batch_size
# Rate limiting
self.rate_limiting = RateLimiting()
self.rate_limiting.cluster_id = cast_('int', self.cluster_id)
self.rate_limiting.global_lock_func = self.zato_lock_manager
self.rate_limiting.sql_session_func = self.odb.session
# Set up rate limiting for ConfigDict-based objects, which includes everything except for:
# * services - configured in ServiceStore
# * SSO - configured in the next call
self.set_up_rate_limiting()
# Rate limiting for SSO
self.set_up_sso_rate_limiting()
# API keys configuration
self.set_up_api_key_config()
# Some parts of the worker store's configuration are required during the deployment of services
# which is why we are doing it here, before worker_store.init() is called.
self.worker_store.early_init()
# Deploys services
locally_deployed = self._after_init_common(server) # type: ignore
# Build objects responsible for groups
self.groups_manager = GroupsManager(self)
self.security_groups_ctx_builder = SecurityGroupsCtxBuilder(self)
# Initializes worker store, including connectors
self.worker_store.init()
self.request_dispatcher_dispatch = self.worker_store.request_dispatcher.dispatch
# Configure remaining parts of SSO
self.configure_sso()
# Security facade wrapper
self.security_facade = SecurityFacade(self)
# Configure the store to obtain OAuth tokens through
self.set_up_oauth_store()
# Bearer tokens (OAuth)
self.bearer_token_manager = BearerTokenManager(self)
# Cannot be done in __init__ because self.sso_config is not available there yet
salt_size:'int' = self.sso_config.hash_secret.salt_size
self.crypto_manager.add_hash_scheme('zato.default', self.sso_config.hash_secret.rounds, salt_size)
# Support pre-3.x hot-deployment directories
for name in('current_work_dir', 'backup_work_dir', 'last_backup_work_dir', 'delete_after_pickup'):
# New in 2.0
if name == 'delete_after_pickup':
# For backward compatibility, we need to support both names
old_name = 'delete_after_pick_up'
if old_name in self.fs_server_config.hot_deploy:
_name = old_name
else:
_name = name
value = asbool(self.fs_server_config.hot_deploy.get(_name, True))
self.hot_deploy_config[name] = value
else:
self.hot_deploy_config[name] = os.path.normpath(os.path.join(
self.hot_deploy_config.work_dir, self.fs_server_config.hot_deploy[name]))
self.broker_client = BrokerClient(
server_rpc=self.rpc, zato_client=None, scheduler_config=self.fs_server_config.scheduler)
self.worker_store.set_broker_client(self.broker_client)
self._after_init_accepted(locally_deployed)
self.odb.server_up_down(
server.token, SERVER_UP_STATUS.RUNNING, True, self.host, self.port, self.preferred_address, use_tls)
# These flags are needed if we are the first worker or not
has_ibm_mq = bool(self.worker_store.worker_config.definition_wmq.keys()) \
and self.fs_server_config.component_enabled.ibm_mq
has_sftp = bool(self.worker_store.worker_config.out_sftp.keys())
has_stats = self.fs_server_config.component_enabled.stats
subprocess_start_config = SubprocessStartConfig()
subprocess_start_config.has_ibm_mq = has_ibm_mq
subprocess_start_config.has_sftp = has_sftp
subprocess_start_config.has_stats = has_stats
# Directories for SSH keys used by SFTP channels
self.sftp_channel_dir = os.path.join(self.repo_location, 'sftp', 'channel')
# This is the first process
if self.is_starting_first:
logger.info('First worker of `%s` is %s', self.name, self.pid)
self.startup_callable_tool.invoke(SERVER_STARTUP.PHASE.IN_PROCESS_FIRST, kwargs={
'server': self,
})
# Clean up any old WSX connections possibly registered for this server
# which may be still lingering around, for instance, if the server was previously
# shut down forcibly and did not have an opportunity to run self.cleanup_on_stop
self.cleanup_wsx()
# Startup services
self.invoke_startup_services()
# Local file-based configuration to apply
try:
self.apply_local_config()
except Exception as e:
logger.info('Exception while applying local config -> %s', e)
# Subprocess-based connectors
if self.has_posix_ipc:
self.init_subprocess_connectors(subprocess_start_config)
# SFTP channels are new in 3.1 and the directories may not exist
if not os.path.exists(self.sftp_channel_dir):
os.makedirs(self.sftp_channel_dir)
# These are subsequent processes
else:
self.startup_callable_tool.invoke(SERVER_STARTUP.PHASE.IN_PROCESS_OTHER, kwargs={
'server': self,
})
if self.has_posix_ipc:
self._populate_connector_config(subprocess_start_config)
# Stops the environment after N seconds
if self.stop_after:
_ = spawn_greenlet(self._stop_after_timeout)
# Per-process IPC tasks
self.init_ipc()
if is_posix:
connector_config_ipc = cast_('ConnectorConfigIPC', self.connector_config_ipc)
if self.component_enabled['stats']:
# Statistics
events_config = cast_('anydict', connector_config_ipc.get_config(ZatoEventsIPC.ipc_config_name, as_dict=True))
events_tcp_port = events_config['port']
self._run_stats_client(events_tcp_port)
# Invoke startup callables
self.startup_callable_tool.invoke(SERVER_STARTUP.PHASE.AFTER_STARTED, kwargs={
'server': self,
})
# The server is started so we can deploy what we were told to handle on startup,
# assuming that we are the first process in this server.
if self.is_starting_first:
if self.deploy_auto_from:
self.handle_enmasse_auto_from()
# Optionally, if we appear to be a Docker quickstart environment, log all details about the environment.
self.log_environment_details()
logger.info('Started `%s@%s` (pid: %s)', server.name, server.cluster.name, self.pid)
# ################################################################################################################################
def set_scheduler_address(self, scheduler_address:'str') -> 'None':
self.broker_client.set_scheduler_address(scheduler_address)
# ################################################################################################################################
def init_ipc(self):
# Name of the environment key that points to our password ..
_ipc_password_key = IPC.Credentials.Password_Key
# .. which we can extract ..
ipc_password = os.environ[_ipc_password_key]
# .. and decrypt it ..
ipc_password = self.decrypt(ipc_password)
# .. this is the same for all processes ..
bind_host = self.fs_server_config.main.get('ipc_host') or '0.0.0.0'
# .. this is set to a different value for each process ..
bind_port = (self.fs_server_config.main.get('ipc_port_start') or IPC.Default.TCP_Port_Start) + self.process_idx
# .. now, the IPC server can be started ..
_:'any_' = spawn_greenlet(self.ipc_api.start_server,
self.pid,
self.base_dir,
bind_host=bind_host,
bind_port=bind_port,
username=IPC.Credentials.Username,
password=ipc_password,
callback_func=self.on_ipc_invoke_callback,
)
# .. we can now store the information about what IPC port to use with this PID.
save_ipc_pid_port(self.cluster_name, self.name, self.pid, bind_port)
# ################################################################################################################################
def _stop_after_timeout(self):
# psutil
import psutil
now = datetime.utcnow()
stop_at = now + timedelta(seconds=cast_('int', self.stop_after))
while now < stop_at:
logger.info(f'Now is {now}; waiting to stop until {stop_at}')
now = datetime.utcnow()
sleep(1)
logger.info(f'Stopping Zato after {self.stop_after}s')
# All the pids that we will stop
to_stop:'intset' = set()
# Details of each process
details = {}
# Our own PID
our_pid = os.getpid()
# If a pid has any of these names in its name or command line,
# we consider it a process that will be stopped.
to_include = ['zato', 'gunicorn']
for proc in list(psutil.process_iter(['pid', 'name'])):
proc_name = proc.name()
proc_cmd_line = ' '.join(proc.cmdline())
for item in to_include:
if (item in proc_name) or (item in proc_cmd_line):
to_stop.add(proc.pid)
details[proc.pid] = f'{proc_name}; {proc_cmd_line}'
logger.info('Found PID: %s; Name: %s; Cmd. line: %s', proc.pid, proc_name, proc_cmd_line)
break
logger.info('Pids collected: %s; our PID: %s', to_stop, our_pid)
# Remove our PID so that we do not stop ourselves too early
to_stop.remove(our_pid)
# Now, we can stop all the other processes
for pid in to_stop:
logger.info('Stopping PID %s (%s)', pid, details[pid])
os.kill(pid, 9)
# Finally, we can stop ourselves
os.kill(our_pid, 9)
# ################################################################################################################################
def _populate_connector_config(self, config:'SubprocessStartConfig') -> 'None':
""" Called when we are not the first worker and, if any connector is enabled,
we need to get its configuration through IPC and populate our own accordingly.
"""
ipc_config_name_to_enabled = {
IBMMQIPC.ipc_config_name: config.has_ibm_mq,
SFTPIPC.ipc_config_name: config.has_sftp,
ZatoEventsIPC.ipc_config_name: config.has_stats,
}
for ipc_config_name, is_enabled in ipc_config_name_to_enabled.items():
if is_enabled:
response = self.connector_config_ipc.get_config(ipc_config_name)
if response:
response = cast_('str', response)
response = loads(response)
connector_suffix = ipc_config_name.replace('zato-', '').replace('-', '_')
connector_attr = 'connector_{}'.format(connector_suffix)
connector = getattr(self, connector_attr) # type: SubprocessIPC
connector.ipc_tcp_port = response['port']
# ################################################################################################################################
def init_subprocess_connectors(self, config:'SubprocessStartConfig') -> 'None':
""" Sets up subprocess-based connectors.
"""
# Common
ipc_tcp_start_port = int(self.fs_server_config.misc.get('ipc_tcp_start_port', 34567))
# IBM MQ
if config.has_ibm_mq:
# Will block for a few seconds at most, until is_ok is returned
# which indicates that a connector started or not.
try:
if self.connector_ibm_mq.start_ibm_mq_connector(ipc_tcp_start_port):
self.connector_ibm_mq.create_initial_wmq_definitions(self.worker_store.worker_config.definition_wmq)
self.connector_ibm_mq.create_initial_wmq_outconns(self.worker_store.worker_config.out_wmq)
self.connector_ibm_mq.create_initial_wmq_channels(self.worker_store.worker_config.channel_wmq)
except Exception as e:
logger.warning('Could not create initial IBM MQ objects, e:`%s`', e)
else:
self.subproc_current_state.is_ibm_mq_running = True
# SFTP
if config.has_sftp and self.connector_sftp.start_sftp_connector(ipc_tcp_start_port):
self.connector_sftp.create_initial_sftp_outconns(self.worker_store.worker_config.out_sftp)
self.subproc_current_state.is_sftp_running = True
# Prepare Zato events configuration
events_config = self.fs_server_config.get('events') or {}
# This is optional in server.conf ..
fs_data_path = events_config.get('fs_data_path') or ''
fs_data_path = fs_data_path or EventsDefault.fs_data_path
# An absolute path = someone chose it explicitly, we leave it is as it is.
if os.path.isabs(fs_data_path):
pass
# .. otherwise, build a full path.
else:
fs_data_path = os.path.join(self.work_dir, fs_data_path, self.events_dir, 'zato.events')
fs_data_path = os.path.abspath(fs_data_path)
fs_data_path = os.path.normpath(fs_data_path)
extra_options_kwargs = {
'fs_data_path': fs_data_path,
'sync_threshold': EventsDefault.sync_threshold,
'sync_interval': EventsDefault.sync_interval,
}
if self.component_enabled['stats']:
_ = self.connector_events.start_zato_events_connector(ipc_tcp_start_port, extra_options_kwargs=extra_options_kwargs)
# Wait until the events connector started - this will let other parts
# of the server assume that it is always available.
_ = wait_until_port_taken(self.connector_events.ipc_tcp_port, timeout=5)
# ################################################################################################################################
def set_up_sso_rate_limiting(self) -> 'None':
for item in self.odb.get_sso_user_rate_limiting_info():
item = cast_('any_', item)
self._create_sso_user_rate_limiting(item.user_id, True, item.rate_limit_def)
# ################################################################################################################################
def set_up_api_key_config(self):
# Prefer the value from environment variables ..
if not (api_key_header := os.environ.get(API_Key.Env_Key)):
# .. otherwise, use the default one .
api_key_header = API_Key.Default_Header
# .. now, we can set it for later use.
self.api_key_header = api_key_header
self.api_key_header_wsgi = 'HTTP_' + self.api_key_header.upper().replace('-', '_')
# ################################################################################################################################
def _create_sso_user_rate_limiting(
self,
user_id:'str',
is_active:'bool',
rate_limit_def:'str',
) -> 'None':
self.rate_limiting.create({
'id': user_id,
'type_': RATE_LIMIT.OBJECT_TYPE.SSO_USER,
'name': user_id,
'is_active': is_active,
'parent_type': None,
'parent_name': None,
}, rate_limit_def, True)
# ################################################################################################################################
def _get_sso_session(self) -> 'any_':
""" Returns a session function suitable for SSO operations.
"""
pool_name:'str' = self.sso_config.sql.name
if pool_name:
try:
pool:'any_' = self.worker_store.sql_pool_store.get(pool_name)
except KeyError:
pool = None
if not pool:
raise Exception('SSO pool `{}` not found or inactive'.format(pool_name))
else:
session_func = pool.session
else:
session_func = self.odb.session
return session_func()
# ################################################################################################################################
def configure_sso(self) -> 'None':
if self.is_sso_enabled:
self.sso_api.post_configure(self._get_sso_session, self.odb.is_sqlite)
# ################################################################################################################################
def invoke_startup_services(self) -> 'None':
stanza = 'startup_services_first_worker' if self.is_starting_first else 'startup_services_any_worker'
_invoke_startup_services('Parallel', stanza,
self.fs_server_config, self.repo_location, self.broker_client, None,
is_sso_enabled=self.is_sso_enabled)
# ################################################################################################################################
def _set_ide_password(self, ide_username:'str', ide_password:'str') -> 'None':
service_name = 'zato.security.basic-auth.change-password'
request = {
'name': ide_username,
'is_active': True,
'type_': SEC_DEF_TYPE.BASIC_AUTH,
'password1': ide_password,
'password2': ide_password,
}
_ = self.invoke(service_name, request)
# ################################################################################################################################
def apply_local_config(self) -> 'None':
# A quickstart environment directory we are potentially in
env_dir = os.path.join(self.base_dir, '..')
env_dir = os.path.abspath(env_dir)
# A configuration file that may potentially exist
env_json = os.path.join(env_dir, 'env.json')
# Proceed only if the config file exists at all
if os.path.exists(env_json):
# Log what we are about to do
self.logger.info('Found local config file -> %s', env_json)
with open(env_json) as f:
data = f.read()
conf = loads(data)
ide_username = conf.get('ide_username')
ide_password = conf.get('ide_password')
if ide_username and ide_password:
self.logger.info('Setting password for IDE user `%s`', ide_username)
self._set_ide_password(ide_username, ide_password)
# ################################################################################################################################
def get_default_cache(self) -> 'CacheAPI':
""" Returns the server's default cache.
"""
return cast_('CacheAPI', self.worker_store.cache_api.default)
# ################################################################################################################################
def get_cache(self, cache_type:'str', cache_name:'str') -> 'Cache':
""" Returns a cache object of given type and name.
"""
return self.worker_store.cache_api.get_cache(cache_type, cache_name)
# ################################################################################################################################
def get_from_cache(self, cache_type:'str', cache_name:'str', key:'str') -> 'any_':
""" Returns a value from input cache by key, or None if there is no such key.
"""
cache = self.worker_store.cache_api.get_cache(cache_type, cache_name)
return cache.get(key) # type: ignore
# ################################################################################################################################
def set_in_cache(self, cache_type:'str', cache_name:'str', key:'str', value:'any_') -> 'any_':
""" Sets a value in cache for input parameters.
"""
cache = self.worker_store.cache_api.get_cache(cache_type, cache_name)
return cache.set(key, value) # type: ignore
# ################################################################################################################################
def _remove_response_root_elem(self, data:'strdict') -> 'strdict':
keys = list(data.keys())
if len(keys) == 1:
root = keys[0]
if root.startswith('zato_') or root == 'response':
data = data[root]
return data
# ################################################################################################################################
def _remove_response_elem(self, data:'strdict | anylist') -> 'strdict | anylist':
if isinstance(data, dict):
data = self._remove_response_root_elem(data)
else:
for idx, item in enumerate(data):
item = self._remove_response_root_elem(item)
data[idx] = item
return data
# ################################################################################################################################
def invoke_all_pids(self, service:'str', request:'any_', timeout:'int'=5, *args:'any_', **kwargs:'any_') -> 'dictlist':
""" Invokes a given service in each of processes current server has.
"""
# A list of dict responses, one for each PID
out:'dictlist' = []
try:
# Get all current PIDs
all_pids_response = self.invoke('zato.info.get-worker-pids', serialize=False)
pids = all_pids_response['pids']
# Use current PID if none were received (this is required on Mac)
pids = pids or [self.pid]
# Invoke each of them
for pid in pids:
pid_response = self.invoke_by_pid(service, request, pid, timeout=timeout, *args, **kwargs)
if pid_response.data is not None:
# If this is an internal service, we want to remove its root-level response element.
if service.startswith('zato'):
pid_response.data = self._remove_response_elem(pid_response.data)
out.append(cast_('anydict', pid_response.data))
except Exception:
logger.warning('PID invocation error `%s`', format_exc())
finally:
return out
# ################################################################################################################################
def invoke_by_pid(
self,
service, # type: str
request, # type: any_
target_pid, # type: int
timeout=_ipc_timeout, # type: int
**kwargs # type:any_
) -> 'IPCResponse':
""" Invokes a service in a worker process by the latter's PID.
"""
response = self.ipc_api.invoke_by_pid(self.use_tls, service, request, self.cluster_name, self.name, target_pid, timeout)
return response
# ################################################################################################################################
def invoke(self, service:'str', request:'any_'=None, *args:'any_', **kwargs:'any_') -> 'any_':
""" Invokes a service either in our own worker or, if PID is given on input, in another process of this server.
"""
target_pid = kwargs.pop('pid', None)
if target_pid and target_pid != self.pid:
# This cannot be used by self.invoke_by_pid
data_format = kwargs.pop('data_format', None)
data = self.invoke_by_pid(service, request, target_pid, *args, **kwargs)
return dumps(data) if data_format == DATA_FORMAT.JSON else data
else:
response = self.worker_store.invoke(
service, request,
data_format=kwargs.pop('data_format', DATA_FORMAT.DICT),
serialize=kwargs.pop('serialize', True),
*args, **kwargs)
return response
# ################################################################################################################################
def invoke_wsx_adapter(self, service_name:'str', ctx:'WSXCtx') -> 'None':
ctx.invoke_service(self, service_name)
# ################################################################################################################################
def on_ipc_invoke_callback(self, msg:'bunch_') -> 'anydict':
service:'str' = msg['service']
data:'any_' = msg['data']
response:'any_' = self.invoke(service, data)
if isinstance(response, dict):
if 'response' in response:
response:'any_' = response['response']
return response # type: ignore
# ################################################################################################################################
def publish(self, *args:'any_', **kwargs:'any_') -> 'any_':
return self.worker_store.pubsub.publish(*args, **kwargs)
# ################################################################################################################################
def invoke_async(self, service:'str', request:'any_', callback:'callable_', *args:'any_', **kwargs:'any_') -> 'any_':
""" Invokes a service in background.
"""
return self.worker_store.invoke(service, request, is_async=True, callback=callback, *args, **kwargs)
# ################################################################################################################################
def publish_pickup(self, topic_name:'str', request:'any_', *args:'any_', **kwargs:'any_') -> 'None':
""" Publishes a previously picked up file to a named topic.
"""
_ = self.invoke('zato.pubsub.publish.publish', {
'topic_name': topic_name,
'endpoint_id': self.default_internal_pubsub_endpoint_id,
'has_gd': False,
'data': dumps({
'meta': {
'pickup_ts_utc': request['ts_utc'],
'stanza': request.get('stanza'),
'full_path': request['full_path'],
'file_name': request['file_name'],
},
'data': {
'raw': request['raw_data'],
}
})
})
# ################################################################################################################################
def deliver_pubsub_msg(self, msg:'any_') -> 'None':
""" A callback method invoked by pub/sub delivery tasks for each messages that is to be delivered.
"""
subscription = self.worker_store.pubsub.subscriptions_by_sub_key[msg.sub_key]
topic = self.worker_store.pubsub.topics[subscription.config['topic_id']] # type: ignore
if topic.before_delivery_hook_service_invoker:
response:'any_' = topic.before_delivery_hook_service_invoker(topic, msg)
if response['skip_msg']:
raise SkipDelivery(msg.pub_msg_id)
_ = self.invoke('zato.pubsub.delivery.deliver-message', {'msg':msg, 'subscription':subscription})
# ################################################################################################################################
def encrypt(self, data:'any_', prefix:'str'=SECRETS.PREFIX, *, needs_str:'bool'=True) -> 'strnone':
""" Returns data encrypted using server's CryptoManager.
"""
if data:
data = data.encode('utf8') if isinstance(data, str) else data
encrypted = self.crypto_manager.encrypt(data, needs_str=needs_str)
return '{}{}'.format(prefix, encrypted)
# ################################################################################################################################
def hash_secret(self, data:'str', name:'str'='zato.default') -> 'str':
return self.crypto_manager.hash_secret(data, name)
# ################################################################################################################################
def verify_hash(self, given:'str', expected:'str', name:'str'='zato.default') -> 'bool':
return self.crypto_manager.verify_hash(given, expected, name)
# ################################################################################################################################
def decrypt(self, data:'strbytes', _prefix:'str'=SECRETS.PREFIX, _marker:'str'=SECRETS.Encrypted_Indicator) -> 'str':
""" Returns data decrypted using server's CryptoManager.
"""
if isinstance(data, bytes):
data = data.decode('utf8')
if data and data.startswith((_prefix, _marker)):
return self.decrypt_no_prefix(data.replace(_prefix, '', 1))
else:
return data # Already decrypted, return as is
# ################################################################################################################################
def decrypt_no_prefix(self, data:'str') -> 'str':
return self.crypto_manager.decrypt(data)
# ################################################################################################################################
def incr_pub_counter(self):
with self.pub_counter_lock:
self.pub_counter += 1
# ################################################################################################################################
def get_pub_counter(self):
with self.pub_counter_lock:
return self.pub_counter
# ################################################################################################################################
def set_up_zato_kvdb(self) -> 'None':
self.kvdb_dir = os.path.join(self.work_dir, 'kvdb', 'v10')
if not os.path.exists(self.kvdb_dir):
os.makedirs(self.kvdb_dir, exist_ok=True)
self.load_zato_kvdb_data()
# ################################################################################################################################
def set_up_oauth_store(self) -> 'None':
# Create the base object ..
self.oauth_store = OAuthStore(
self.worker_store.oauth_get_by_id,
OAuthTokenClient.obtain_from_config
)
# .. and populate it with initial data now.
for item_id in self.worker_store.oauth_get_all_id_list():
self.oauth_store.create(item_id)
# ################################################################################################################################
def load_zato_kvdb_data(self) -> 'None':
#
# Only now do we know what the full paths for KVDB data are so we can set them accordingly here ..
#
self.slow_responses.set_data_path(
os.path.join(self.kvdb_dir, CommonZatoKVDB.SlowResponsesPath),
)
self.usage_samples.set_data_path(
os.path.join(self.kvdb_dir, CommonZatoKVDB.UsageSamplesPath),
)
self.current_usage.set_data_path(
os.path.join(self.kvdb_dir, CommonZatoKVDB.CurrentUsagePath),
)
self.pub_sub_metadata.set_data_path(
os.path.join(self.kvdb_dir, CommonZatoKVDB.PubSubMetadataPath),
)
#
# .. and now we can load all the data.
#
self.slow_responses.load_data()
self.usage_samples.load_data()
self.current_usage.load_data()
self.pub_sub_metadata.load_data()
# ################################################################################################################################
def save_zato_main_proc_state(self) -> 'None':
self.slow_responses.save_data()
self.usage_samples.save_data()
self.current_usage.save_data()
self.pub_sub_metadata.save_data()
# ################################################################################################################################
@staticmethod
def post_fork(arbiter:'Arbiter', worker:'any_') -> 'None':
""" A Gunicorn hook which initializes the worker.
"""
# Each subprocess needs to have the random number generator re-seeded.
random_seed()
# This is our parallel server
server = worker.app.zato_wsgi_app # type: ParallelServer
server.startup_callable_tool.invoke(SERVER_STARTUP.PHASE.BEFORE_POST_FORK, kwargs={
'arbiter': arbiter,
'worker': worker,
})
worker.app.zato_wsgi_app.worker_pid = worker.pid
ParallelServer.start_server(server, arbiter.zato_deployment_key)
# ################################################################################################################################
@staticmethod
def on_starting(arbiter:'Arbiter') -> 'None':
""" A Gunicorn hook for setting the deployment key for this particular
set of server processes. It needs to be added to the arbiter because
we want for each worker to be (re-)started to see the same key.
"""
arbiter.zato_deployment_key = '{}.{}'.format(datetime.utcnow().isoformat(), uuid4().hex)
# ################################################################################################################################
@staticmethod
def worker_exit(arbiter:'Arbiter', worker:'GeventWorker') -> 'None':
# Invoke cleanup procedures
app:'ParallelServer' = worker.app.zato_wsgi_app
app.cleanup_on_stop()
# ################################################################################################################################
@staticmethod
def before_pid_kill(arbiter:'Arbiter', worker:'GeventWorker') -> 'None':
pass
# ################################################################################################################################
def cleanup_wsx(self, needs_pid:'bool'=False) -> 'None':
""" Delete persistent information about WSX clients currently registered with the server.
"""
wsx_service = 'zato.channel.web-socket.client.delete-by-server'
if self.service_store.is_deployed(wsx_service):
self.invoke(wsx_service, {'needs_pid': needs_pid})
# ################################################################################################################################
def cleanup_on_stop(self) -> 'None':
""" A shutdown cleanup procedure.
"""
# Tell the ODB we've gone through a clean shutdown but only if this is
# the main process going down (Arbiter) not one of Gunicorn workers.
# We know it's the main process because its ODB's session has never
# been initialized.
if not self.odb.session_initialized:
self.config.odb_data = self.get_config_odb_data(self)
self.config.odb_data['fs_sql_config'] = self.fs_sql_config
self.set_up_odb()
self.odb.init_session(ZATO_ODB_POOL_NAME, self.config.odb_data, self.odb.pool, False)
self.odb.server_up_down(self.odb.token, SERVER_UP_STATUS.CLEAN_DOWN)
self.odb.close()
# Per-worker cleanup
else:
# Store Zato KVDB data on disk
self.save_zato_main_proc_state()
# Set the flag to True only the first time we are called, otherwise simply return
if self._is_process_closing:
return
else:
self._is_process_closing = True
# Close SQL pools
self.sql_pool_store.cleanup_on_stop()
# Close all POSIX IPC structures
if self.has_posix_ipc:
self.server_startup_ipc.close()
self.connector_config_ipc.close()
# WSX connections for this server cleanup
self.cleanup_wsx(True)
logger.info('Stopping server process (%s:%s) (%s)', self.name, self.pid, os.getpid())
import sys
sys.exit(3) # Same as arbiter's WORKER_BOOT_ERROR
# ################################################################################################################################
def notify_new_package(self, package_id:'int') -> 'None':
""" Publishes a message on the broker so all the servers (this one including
can deploy a new package).
"""
msg = {'action': HOT_DEPLOY.CREATE_SERVICE.value, 'package_id': package_id} # type: ignore
self.broker_client.publish(msg)
# ################################################################################################################################
# ################################################################################################################################
# Shortcut API methods
def api_service_store_get_service_name_by_id(self, *args:'any_', **kwargs:'any_') -> 'any_':
return self.service_store.get_service_name_by_id(*args, **kwargs)
def api_worker_store_basic_auth_get_by_id(self, *args:'any_', **kwargs:'any_') -> 'any_':
return self.worker_store.basic_auth_get_by_id(*args, **kwargs)
def api_worker_store_reconnect_generic(self, *args:'any_', **kwargs:'any_') -> 'any_':
return self.worker_store.reconnect_generic(*args, **kwargs) # type: ignore
def is_active_outconn_wsx(self, conn_id:'str') -> 'bool':
is_active:'bool' = self.worker_store.is_active_generic_conn(conn_id)
return is_active
def is_service_wsx_adapter(self, *args:'any_', **kwargs:'any_') -> 'any_':
return self.service_store.is_service_wsx_adapter(*args, **kwargs)
def on_wsx_outconn_stopped_running(self, conn_id:'str') -> 'None':
""" This does not do anything by default but tests can overwrite it with custom functionality.
"""
def on_wsx_outconn_connected(self, conn_id:'str') -> 'None':
""" This does not do anything by default but tests can overwrite it with custom functionality.
"""
# ################################################################################################################################
# ################################################################################################################################
servernone = optional[ParallelServer]
# ################################################################################################################################
# ################################################################################################################################
| 89,199
|
Python
|
.py
| 1,512
| 48.85582
| 130
| 0.551619
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,221
|
http.py
|
zatosource_zato/code/zato-server/src/zato/server/base/parallel/http.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from datetime import datetime
from logging import getLogger, INFO
from traceback import format_exc
# pytz
from pytz import UTC
# tzlocal
from tzlocal import get_localzone
# Zato
from zato.common.api import NO_REMOTE_ADDRESS
from zato.common.util.api import new_cid
# ################################################################################################################################
# ################################################################################################################################
if 0:
from pytz.tzinfo import BaseTzInfo
from zato.common.typing_ import any_, callable_, list_, stranydict
from zato.server.base.parallel import ParallelServer
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato_rest')
# ################################################################################################################################
# ################################################################################################################################
Access_Log_Date_Time_Format = '%d/%b/%Y:%H:%M:%S %z'
_has_log_info = logger.isEnabledFor(INFO)
# ################################################################################################################################
# ################################################################################################################################
class HTTPHandler:
""" Handles incoming HTTP requests.
"""
def on_wsgi_request(
self:'ParallelServer', # type: ignore
wsgi_environ, # type: stranydict
start_response, # type: callable_
_new_cid=new_cid, # type: callable_
_local_zone=get_localzone(), # type: BaseTzInfo
_utcnow=datetime.utcnow, # type: callable_
_INFO=INFO, # type: int
_UTC=UTC, # type: any_
_Access_Log_Date_Time_Format=Access_Log_Date_Time_Format, # type: str
_no_remote_address=NO_REMOTE_ADDRESS, # type: str
**kwargs:'any_'
) -> 'list_[bytes]':
""" Handles incoming HTTP requests.
"""
# This is reusable
user_agent = wsgi_environ.get('HTTP_USER_AGENT', '(None)')
# We need a correlation ID first ..
cid = kwargs.get('cid', _new_cid(needs_padding=True))
# .. this is a timestamp of when the request was received ..
request_ts_utc = _utcnow()
# .. basic context details ..
wsgi_environ['zato.local_tz'] = _local_zone
wsgi_environ['zato.request_timestamp_utc'] = request_ts_utc
wsgi_environ['zato.request_timestamp'] = request_ts_local = request_ts_utc.replace(tzinfo=_UTC).astimezone(_local_zone)
# .. this is always needed ..
wsgi_environ['zato.http.response.headers'] = {}
# .. but returning X-Zato-CID is optional ..
if self.needs_x_zato_cid:
wsgi_environ['zato.http.response.headers']['X-Zato-CID'] = cid
# .. try to extract a remote address ..
remote_addr = _no_remote_address
for name in self.client_address_headers:
remote_addr = wsgi_environ.get(name)
if remote_addr:
break
# .. do assign the potentially extracted address for later use ..
wsgi_environ['zato.http.remote_addr'] = remote_addr
# .. try to handle the request now ..
try:
# .. this is the call that obtains a response ..
payload = self.request_dispatcher_dispatch(
cid,
request_ts_utc,
wsgi_environ,
self.worker_store,
user_agent,
remote_addr,
) or b''
# .. any exception at this point must be a server-side error ..
except Exception:
error_msg = '`%s` Exception caught `%s`' % (cid, format_exc())
logger.error(error_msg)
wsgi_environ['zato.http.response.status'] = b'500 Internal Server Error'
payload = error_msg if self.return_tracebacks else self.default_error_message
raise
channel_item = wsgi_environ.get('zato.channel_item')
if channel_item:
# For access log
channel_name = channel_item.get('name', '-')
else:
# 404 because we could not find the channel, or
# 405 because this was an invalid HTTP method
channel_name = '-'
start_response(wsgi_environ['zato.http.response.status'], wsgi_environ['zato.http.response.headers'].items())
if isinstance(payload, str):
payload = payload.encode('utf-8')
# .. this is reusable ..
status_code = wsgi_environ['zato.http.response.status'].split()[0]
response_size = len(payload)
# .. this goes to the access log ..
if self.needs_access_log:
# .. either log all HTTP requests or make sure that current path ..
# .. is not in a list of paths to ignore ..
if self.needs_all_access_log or wsgi_environ['PATH_INFO'] not in self.access_log_ignore:
self.access_logger_log(
_INFO,
'',
None, # type: ignore
None,
{
'remote_ip': remote_addr,
'cid_resp_time': '%s/%s' % (cid, (_utcnow() - request_ts_utc).total_seconds()),
'channel_name': channel_name,
'req_timestamp_utc': request_ts_utc.strftime(_Access_Log_Date_Time_Format),
'req_timestamp': request_ts_local.strftime(_Access_Log_Date_Time_Format),
'method': wsgi_environ['REQUEST_METHOD'],
'path': wsgi_environ['PATH_INFO'],
'http_version': wsgi_environ['SERVER_PROTOCOL'],
'status_code': status_code,
'response_size': response_size,
'user_agent': user_agent,
})
# .. this goes to the server log ..
if _has_log_info:
if not wsgi_environ['PATH_INFO'] in self.rest_log_ignore:
# .. how long it took to produce the response ..
delta = _utcnow() - request_ts_utc
# .. log information about what we are returning ..
msg = f'REST cha ‚Üê cid={cid}; {status_code} time={delta}; len={response_size}'
logger.info(msg)
# Now, return the response to our caller.
return [payload]
# ################################################################################################################################
# ################################################################################################################################
| 7,235
|
Python
|
.py
| 139
| 41.654676
| 130
| 0.469396
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,222
|
api.py
|
zatosource_zato/code/zato-server/src/zato/server/base/parallel/subprocess_/api.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# ################################################################################################################################
# ################################################################################################################################
class StartConfig:
__slots__ = 'has_ibm_mq', 'has_sftp', 'has_stats'
def __init__(self, has_ibm_mq=False, has_sftp=False, has_stats=False):
self.has_ibm_mq = has_ibm_mq
self.has_sftp = has_sftp
self.has_stats = has_stats
# ################################################################################################################################
# ################################################################################################################################
class CurrentState:
""" Represents current runtime state of subprocess-based connectors.
"""
__slots__ = 'is_ibm_mq_running', 'is_sftp_running', 'is_stats_running'
def __init__(self):
self.is_ibm_mq_running = False
self.is_sftp_running = False
self.is_stats_running = False
# ################################################################################################################################
# ################################################################################################################################
| 1,520
|
Python
|
.py
| 25
| 56.56
| 130
| 0.302826
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,223
|
ibm_mq.py
|
zatosource_zato/code/zato-server/src/zato/server/base/parallel/subprocess_/ibm_mq.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from binascii import unhexlify
from http.client import RemoteDisconnected
# Requests
from requests.exceptions import ConnectionError
# urllib3
from urllib3.exceptions import ProtocolError
# Zato
from zato.common.api import IPC, WebSphereMQCallData
from zato.common.broker_message import CHANNEL, DEFINITION, OUTGOING
from zato.common.exception import ConnectorClosedException
from zato.server.connection.connector.subprocess_.ipc import SubprocessIPC
# ################################################################################################################################
# ################################################################################################################################
_connector_not_reachable = (ConnectionError, ProtocolError, RemoteDisconnected, ConnectorClosedException)
# ################################################################################################################################
# ################################################################################################################################
class IBMMQIPC(SubprocessIPC):
""" Implements communication with an IBM MQ connector for a given server.
"""
check_enabled = 'ibm_mq'
connector_name = 'IBM MQ'
callback_suffix = 'wmq'
ipc_config_name = 'zato-ibm-mq'
auth_username = IPC.CONNECTOR.USERNAME.IBM_MQ
pidfile_suffix = 'ibm-mq'
connector_module = 'zato.server.connection.connector.subprocess_.impl.ibm_mq'
action_definition_create = DEFINITION.WMQ_CREATE
action_outgoing_create = OUTGOING.WMQ_CREATE
action_channel_create = CHANNEL.WMQ_CREATE
action_send = OUTGOING.WMQ_SEND
action_ping = DEFINITION.WMQ_PING
# ################################################################################################################################
# Public API methods
# ################################################################################################################################
def start_ibm_mq_connector(self, *args, **kwargs):
return self.start_connector(*args, **kwargs)
# ################################################################################################################################
def invoke_wmq_connector(self, *args, **kwargs):
return self.invoke_connector(*args, **kwargs)
# ################################################################################################################################
def send_wmq_message(self, *args, **kwargs):
try:
out = self.send_message(*args, **kwargs)
except Exception as e:
if isinstance(e, _connector_not_reachable):
raise ConnectorClosedException(e, 'IBM MQ connector not reachable')
else:
raise
else:
return WebSphereMQCallData(unhexlify(out['msg_id']).strip(), unhexlify(out['correlation_id']).strip())
def ping_wmq(self, *args, **kwargs):
return self.ping(*args, **kwargs)
# ################################################################################################################################
def create_initial_wmq_definitions(self, config_dict):
def text_func(config):
return '{} {}:{} (queue manager:{})'.format(config['name'], config['host'], config['port'], config['queue_manager'])
return self.create_initial_definitions(config_dict, text_func)
# ################################################################################################################################
def create_initial_wmq_outconns(self, config_dict):
def text_func(config):
return config['name']
return self.create_initial_outconns(config_dict, text_func)
# ################################################################################################################################
def create_initial_wmq_channels(self, config_dict):
def text_func(config):
return config['name']
return self.create_initial_channels(config_dict, text_func)
# ################################################################################################################################
# ################################################################################################################################
| 4,519
|
Python
|
.py
| 75
| 54.893333
| 130
| 0.435531
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,224
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/base/parallel/subprocess_/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
| 238
|
Python
|
.py
| 6
| 38.166667
| 82
| 0.729258
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,225
|
zato_events.py
|
zatosource_zato/code/zato-server/src/zato/server/base/parallel/subprocess_/zato_events.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
# Zato
from zato.server.connection.connector.subprocess_.ipc import SubprocessIPC
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class ZatoEventsIPC(SubprocessIPC):
""" Implements communication with a Zato events connector for a given server.
"""
check_enabled = False
connector_name = 'Zato events'
callback_suffix = 'zato_events'
ipc_config_name = 'zato-events'
auth_username = None
pidfile_suffix = 'zato-events'
connector_module = 'zato.server.connection.connector.subprocess_.impl.events.container'
# ################################################################################################################################
def get_credentials(self):
return '<ZatoEventsIPC-no-username>', '<ZatoEventsIPC-no-password>'
# ################################################################################################################################
def _ping_connector(self, ignored_address, ignored_auth, should_warn):
# stdlib
import socket
# Zato
from zato.common.util.tcp import wait_until_port_taken
# Wait a few seconds to ensure the connector started
wait_until_port_taken(self.ipc_tcp_port, timeout=5)
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: # type: socket
try:
s.settimeout(1)
s.connect(('127.0.0.1', self.ipc_tcp_port))
except Exception as e:
logger.warning('IPC ping failed. Could not connect to 127.0.0.1:%s; e=%s', self.ipc_tcp_port, e.args)
else:
return True
# ################################################################################################################################
# Public API methods
# ################################################################################################################################
def start_zato_events_connector(self, *args, **kwargs):
return self.start_connector(*args, **kwargs)
# ################################################################################################################################
def invoke_zato_events_connector(self, *args, **kwargs):
return self.invoke_connector(*args, **kwargs)
# ################################################################################################################################
| 3,108
|
Python
|
.py
| 52
| 53.846154
| 130
| 0.382916
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,226
|
ftp.py
|
zatosource_zato/code/zato-server/src/zato/server/base/parallel/subprocess_/ftp.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Zato
from zato.common.api import IPC
from zato.common.broker_message import CHANNEL
from zato.server.connection.connector.subprocess_.ipc import SubprocessIPC
# ################################################################################################################################
# ################################################################################################################################
class FTPIPC(SubprocessIPC):
""" Implements communication with an FTP connector for a given server.
"""
connector_name = 'FTP'
callback_suffix = 'ftp'
ipc_config_name = 'zato-ftp'
auth_username = IPC.CONNECTOR.USERNAME.SFTP
pidfile_suffix = 'ftp'
connector_module = 'zato.server.connection.connector.subprocess_.impl.ftp'
action_channel_create = CHANNEL.FTP_CREATE
action_ping = CHANNEL.FTP_PING
# ################################################################################################################################
# Public API methods
# ################################################################################################################################
def start_ftp_connector(self, *args, **kwargs):
return self.start_connector(*args, **kwargs)
# ################################################################################################################################
def invoke_ftp_connector(self, *args, **kwargs):
return self.invoke_connector(*args, **kwargs)
# ################################################################################################################################
def ping_ftp(self, *args, **kwargs):
return self.ping(*args, **kwargs)
# ################################################################################################################################
def create_initial_ftp_channels(self, config_dict):
def text_func(config):
return config['name']
return self.create_initial_channels(config_dict, text_func)
# ################################################################################################################################
# ################################################################################################################################
| 2,535
|
Python
|
.py
| 41
| 57.731707
| 130
| 0.376566
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,227
|
outconn_sftp.py
|
zatosource_zato/code/zato-server/src/zato/server/base/parallel/subprocess_/outconn_sftp.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Zato
from zato.common.api import IPC
from zato.common.broker_message import OUTGOING
from zato.server.connection.connector.subprocess_.ipc import SubprocessIPC
# ################################################################################################################################
# ################################################################################################################################
class SFTPIPC(SubprocessIPC):
""" Implements communication with an SFTP connector for a given server.
"""
connector_name = 'SFTP'
callback_suffix = 'sftp'
ipc_config_name = 'zato-sftp'
auth_username = IPC.CONNECTOR.USERNAME.SFTP
pidfile_suffix = 'sftp'
connector_module = 'zato.server.connection.connector.subprocess_.impl.outconn_sftp'
action_outgoing_create = OUTGOING.SFTP_CREATE
action_send = OUTGOING.SFTP_EXECUTE
action_ping = OUTGOING.SFTP_PING
# ################################################################################################################################
# Public API methods
# ################################################################################################################################
def start_sftp_connector(self, *args, **kwargs):
return self.start_connector(*args, **kwargs)
# ################################################################################################################################
def invoke_sftp_connector(self, *args, **kwargs):
return self.invoke_connector(*args, **kwargs)
# ################################################################################################################################
def ping_sftp(self, *args, **kwargs):
return self.ping(*args, **kwargs)
# ################################################################################################################################
def create_initial_sftp_outconns(self, config_dict):
def text_func(config):
return config['name']
return self.create_initial_outconns(config_dict, text_func)
# ################################################################################################################################
# ################################################################################################################################
| 2,600
|
Python
|
.py
| 42
| 57.785714
| 130
| 0.387948
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,228
|
wmq.py
|
zatosource_zato/code/zato-server/src/zato/server/base/worker/wmq.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.server.base.worker.common import WorkerImpl
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.server.base.worker import WorkerStore
# ################################################################################################################################
# ################################################################################################################################
class WebSphereMQ(WorkerImpl):
""" IBM MQ-related functionality for worker objects.
"""
def _on_broker_msg_invoke_wmq_connector(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if self.server.is_first_worker:
self.server.connector_ibm_mq.invoke_wmq_connector(msg)
# Everything is delegated to connectors ..
on_broker_msg_DEFINITION_WMQ_CREATE = _on_broker_msg_invoke_wmq_connector
on_broker_msg_DEFINITION_WMQ_EDIT = _on_broker_msg_invoke_wmq_connector
on_broker_msg_DEFINITION_WMQ_DELETE = _on_broker_msg_invoke_wmq_connector
on_broker_msg_DEFINITION_WMQ_CHANGE_PASSWORD = _on_broker_msg_invoke_wmq_connector
# .. including outconns ..
on_broker_msg_OUTGOING_WMQ_CREATE = _on_broker_msg_invoke_wmq_connector
on_broker_msg_OUTGOING_WMQ_EDIT = _on_broker_msg_invoke_wmq_connector
on_broker_msg_OUTGOING_WMQ_DELETE = _on_broker_msg_invoke_wmq_connector
# .. and channels ..
on_broker_msg_CHANNEL_WMQ_CREATE = _on_broker_msg_invoke_wmq_connector
on_broker_msg_CHANNEL_WMQ_EDIT = _on_broker_msg_invoke_wmq_connector
on_broker_msg_CHANNEL_WMQ_DELETE = _on_broker_msg_invoke_wmq_connector
# ################################################################################################################################
# ################################################################################################################################
| 2,302
|
Python
|
.py
| 38
| 56.342105
| 130
| 0.471815
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,229
|
sms_twilio.py
|
zatosource_zato/code/zato-server/src/zato/server/base/worker/sms_twilio.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.server.base.worker.common import WorkerImpl
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.server.base.worker import WorkerStore
# ################################################################################################################################
# ################################################################################################################################
class SMSTwilio(WorkerImpl):
""" SMS Twilio-related functionality for worker objects.
"""
def on_broker_msg_SMS_TWILIO_CREATE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
msg.auth_token = self.server.decrypt(msg.auth_token)
self.sms_twilio_api.create(msg.name, msg)
# ################################################################################################################################
def on_broker_msg_SMS_TWILIO_EDIT(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
# It might be a rename
old_name = msg.get('old_name')
del_name = old_name if old_name else msg['name']
msg.auth_token = self.server.decrypt(msg.auth_token)
self.sms_twilio_api.edit(del_name, msg)
# ################################################################################################################################
def on_broker_msg_SMS_TWILIO_DELETE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.sms_twilio_api.delete(msg.name)
# ################################################################################################################################
| 2,102
|
Python
|
.py
| 40
| 47.4
| 130
| 0.358887
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,230
|
file_transfer.py
|
zatosource_zato/code/zato-server/src/zato/server/base/worker/file_transfer.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Bunch
from bunch import bunchify
# Zato
from zato.common.api import FILE_TRANSFER
from zato.common.typing_ import cast_
from zato.common.util.file_transfer import parse_extra_into_list
from zato.server.base.worker.common import WorkerImpl
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.common.typing_ import tuple_
from zato.server.base.worker import WorkerStore
from zato.server.base.worker.generic import Generic
# ################################################################################################################################
# ################################################################################################################################
class FileTransfer(WorkerImpl):
""" Handles broker messages related to file transfer.
"""
def __init__(self):
super(FileTransfer, self).__init__()
# ################################################################################################################################
def _file_transfer_get_scheduler_job_by_id(self, job_id:'int') -> 'Bunch':
# This returns a SimpleIO payload object ..
response = self.server.invoke('zato.scheduler.job.get-by-id', {
'cluster_id': self.server.cluster_id,
'id': job_id,
}, needs_response=False)
# .. this returns a dict ..
response = response.getvalue()
# .. and this returns a Bunch.
return bunchify(response)
# ################################################################################################################################
def _file_transfer_get_scheduler_job_list(self):
# This returns a SimpleIO payload object ..
response = self.server.invoke('zato.scheduler.job.get-list', {
'cluster_id': self.server.cluster_id,
'service_name': FILE_TRANSFER.SCHEDULER_SERVICE,
}, needs_response=False)
# .. this returns a dict with a single key ..
response = response.getvalue()
# .. and we return the key's list data only.
return response['zato_scheduler_job_get_list_response']
# ################################################################################################################################
def _file_transfer_save_scheduler_job(self, data:'Bunch') -> 'None':
data['cluster_id'] = self.server.cluster_id
data['service'] = data.service_name
self.server.invoke('zato.scheduler.job.edit', data)
# ################################################################################################################################
def _file_transfer_modify_scheduler_job(
self,
job, # type: Bunch | None
job_id, # type: int
channel_id, # type: int
add_or_remove # type: bool
) -> 'None':
""" Finds a job along with its extra data and either adds or removes a file transfer channel for it.
"""
# We store IDs as string objects but we compare then as integers
channel_id = int(channel_id)
# Get a scheduler's job by its id if we were not given a job on input
job = job or self._file_transfer_get_scheduler_job_by_id(job_id)
# This is where keep information about channels to run
extra = job.extra if isinstance(job.extra, str) else job.extra.decode('utf8')
# Holds all channel IDs to run
extra_set = set()
# If it exists at all ..
if extra:
# .. it will be a semicolon-separated list of IDs ..
extra = parse_extra_into_list(job.extra)
# .. turn the list into a set ..
extra_set.update(extra)
# .. now, we can just add or remove our own key, no matter if extra existed or not ..
if add_or_remove:
extra_set.add(channel_id)
else:
try:
extra_set.remove(channel_id)
except KeyError:
# This is fine, apparently the channel was not assigned to extra before
pass
# .. serialise the set back to a semicolong-separated list ..
extra = '; '.join(sorted(str(elem) for elem in extra_set))
# .. assign it to our job dict ..
job['extra'] = extra
# .. and save it back in ODB.
self._file_transfer_save_scheduler_job(job)
# ################################################################################################################################
def _create_file_transfer_channel(self, msg:'Bunch') -> 'None':
# Our caller in generic.py has already created the channel object
# so we only need to associate ourselves with a scheduler's job, if any.
if msg.scheduler_job_id:
self._file_transfer_modify_scheduler_job(None, msg.scheduler_job_id, msg.id, True)
# ################################################################################################################################
def _disassociate_channel_from_scheduler_jobs(self, msg:'Bunch') -> 'None':
for item in self._file_transfer_get_scheduler_job_list():
item = bunchify(item)
self._file_transfer_modify_scheduler_job(item, cast_('int', None), msg.id, False)
# ################################################################################################################################
def _edit_file_transfer_channel(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
# If we have a scheduler job on input ..
if msg.scheduler_job_id:
self._file_transfer_modify_scheduler_job(None, msg.scheduler_job_id, msg.id, True)
# .. otherwise, without a job ID on input, we still need to look up
# all scheduler jobs and disassociate our channel from any of the existing jobs ..
else:
self._disassociate_channel_from_scheduler_jobs(msg)
# .. finally, we can edit the channel itself.
self.file_transfer_api.edit(msg)
# ################################################################################################################################
def _delete_file_transfer_channel(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
# Our caller in generic.py has already created the channel object
# so we only need to disassociate ourselves with a scheduler's job, if any.
self._disassociate_channel_from_scheduler_jobs(msg)
# ################################################################################################################################
def get_file_transfer_channel_by_id(
self:'Generic', # type: ignore
channel_id, # type: int
) -> 'tuple_':
return self._find_conn_info(channel_id)
# ################################################################################################################################
# ################################################################################################################################
| 7,491
|
Python
|
.py
| 133
| 48.639098
| 130
| 0.464906
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,231
|
cache_builtin.py
|
zatosource_zato/code/zato-server/src/zato/server/base/worker/cache_builtin.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from base64 import b64decode
# Zato
from zato.common.api import CACHE
from zato.server.base.worker.common import WorkerImpl
# Python 2/3 compatibility
from zato.common.py23_ import pickle_loads
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.server.base.worker import WorkerStore
# ################################################################################################################################
# ################################################################################################################################
class CacheBuiltin(WorkerImpl):
""" Handles asynchronous updates to built-in caches.
"""
def on_broker_msg_CACHE_BUILTIN_CREATE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.cache_api.create(msg)
# ################################################################################################################################
def on_broker_msg_CACHE_BUILTIN_EDIT(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.cache_api.edit(msg)
# ################################################################################################################################
def on_broker_msg_CACHE_BUILTIN_DELETE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.cache_api.delete(msg)
# ################################################################################################################################
def _unpickle_msg(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg['is_key_pickled']:
msg['key'] = pickle_loads(msg['key'])
if msg['is_value_pickled']:
msg['value'] = pickle_loads(b64decode(msg['value']))
# ################################################################################################################################
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_SET(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_set(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_SET_BY_PREFIX(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_set_by_prefix(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_SET_BY_SUFFIX(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_set_by_suffix(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_SET_BY_REGEX(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_set_by_regex(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_SET_CONTAINS(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_set_contains(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_SET_NOT_CONTAINS(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_set_not_contains(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_SET_CONTAINS_ALL(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_set_contains_all(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_SET_CONTAINS_ANY(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_set_contains_any(CACHE.TYPE.BUILTIN, msg)
# ################################################################################################################################
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_DELETE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_delete(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_DELETE_BY_PREFIX(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_delete_by_prefix(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_DELETE_BY_SUFFIX(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
self._unpickle_msg(msg)
self.cache_api.sync_after_delete_by_suffix(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_DELETE_BY_REGEX(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_delete_by_regex(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_DELETE_CONTAINS(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_delete_contains(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_DELETE_NOT_CONTAINS(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_delete_not_contains(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_DELETE_CONTAINS_ALL(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_delete_contains_all(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_DELETE_CONTAINS_ANY(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_delete_contains_any(CACHE.TYPE.BUILTIN, msg)
# ################################################################################################################################
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_EXPIRE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_expire(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_EXPIRE_BY_PREFIX(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_expire_by_prefix(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_EXPIRE_BY_SUFFIX(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_expire_by_suffix(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_EXPIRE_BY_REGEX(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_expire_by_regex(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_EXPIRE_CONTAINS(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_expire_contains(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_EXPIRE_NOT_CONTAINS(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_expire_not_contains(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_EXPIRE_CONTAINS_ALL(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_expire_contains_all(CACHE.TYPE.BUILTIN, msg)
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_EXPIRE_CONTAINS_ANY(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
if msg['is_value_pickled'] or msg['is_key_pickled']:
self._unpickle_msg(msg)
self.cache_api.sync_after_expire_contains_any(CACHE.TYPE.BUILTIN, msg)
# ################################################################################################################################
def on_broker_msg_CACHE_BUILTIN_STATE_CHANGED_CLEAR(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.source_worker_id != self.server.worker_id:
self.cache_api.sync_after_clear(CACHE.TYPE.BUILTIN, msg)
# ################################################################################################################################
| 12,634
|
Python
|
.py
| 250
| 41.62
| 130
| 0.536597
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,232
|
cache_memcached.py
|
zatosource_zato/code/zato-server/src/zato/server/base/worker/cache_memcached.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.server.base.worker.common import WorkerImpl
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.server.base.worker import WorkerStore
# ################################################################################################################################
# ################################################################################################################################
class CacheMemcached(WorkerImpl):
""" Handles asynchronous updates to Memcached-based caches.
"""
def on_broker_msg_CACHE_MEMCACHED_CREATE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.cache_api.create(msg)
# ################################################################################################################################
def on_broker_msg_CACHE_MEMCACHED_EDIT(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.cache_api.edit(msg)
# ################################################################################################################################
def on_broker_msg_CACHE_MEMCACHED_DELETE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.cache_api.delete(msg)
# ################################################################################################################################
# ################################################################################################################################
| 1,965
|
Python
|
.py
| 36
| 50.138889
| 130
| 0.300469
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,233
|
sso.py
|
zatosource_zato/code/zato-server/src/zato/server/base/worker/sso.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.common.api import RATE_LIMIT
from zato.server.base.worker.common import WorkerImpl
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.server.base.worker import WorkerStore
# ################################################################################################################################
# ################################################################################################################################
class ModuleCtx:
Rate_Limit_Type = RATE_LIMIT.OBJECT_TYPE.SSO_USER
# ################################################################################################################################
# ################################################################################################################################
class SSO(WorkerImpl):
""" Callbacks for messages related to SSO.
"""
def on_broker_msg_SSO_USER_CREATE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.server._create_sso_user_rate_limiting(msg.user_id, msg.is_rate_limit_active, msg.rate_limit_def)
# ################################################################################################################################
def on_broker_msg_SSO_USER_EDIT(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if self.server.rate_limiting.has_config(ModuleCtx.Rate_Limit_Type, msg.user_id):
self.server.rate_limiting.edit(ModuleCtx.Rate_Limit_Type, msg.user_id, {
'id': msg.user_id,
'type_': ModuleCtx.Rate_Limit_Type,
'name': msg.user_id,
'is_active': msg.is_rate_limit_active,
'parent_type': None,
'parent_name': None,
}, msg.rate_limit_def, True)
# ################################################################################################################################
def on_broker_msg_SSO_LINK_AUTH_CREATE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.server.sso_api.user.on_broker_msg_SSO_LINK_AUTH_CREATE('zato.{}'.format(msg.auth_type), msg.auth_id, msg.user_id)
# ################################################################################################################################
def on_broker_msg_SSO_LINK_AUTH_DELETE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.server.sso_api.user.on_broker_msg_SSO_LINK_AUTH_DELETE(msg.auth_type, msg.auth_id)
# ################################################################################################################################
| 3,126
|
Python
|
.py
| 54
| 51.62963
| 130
| 0.362565
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,234
|
generic.py
|
zatosource_zato/code/zato-server/src/zato/server/base/worker/generic.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Bunch
from bunch import Bunch
# Zato
from zato.common.api import GENERIC as COMMON_GENERIC, LDAP, ZATO_NONE
from zato.common.broker_message import GENERIC as GENERIC_BROKER_MSG
from zato.common.const import SECRETS
from zato.common.util.api import as_bool, parse_simple_type
from zato.common.util.config import replace_query_string_items_in_dict
from zato.distlock import PassThrough as PassThroughLock
from zato.server.base.worker.common import WorkerImpl
from zato.server.generic.connection import GenericConnection
# ################################################################################################################################
# ################################################################################################################################
if 0:
from logging import Logger
from zato.common.typing_ import any_, callable_, stranydict, strnone, tuple_
from zato.server.connection.queue import Wrapper
Wrapper = Wrapper
# ################################################################################################################################
# ################################################################################################################################
_type_outconn_wsx = COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_WSX
_type_channel_file_transfer = COMMON_GENERIC.CONNECTION.TYPE.CHANNEL_FILE_TRANSFER
_secret_prefixes = (SECRETS.Encrypted_Indicator, SECRETS.PREFIX)
# ################################################################################################################################
# ################################################################################################################################
class Generic(WorkerImpl):
""" Handles broker messages destined for generic objects, such as connections.
"""
logger: 'Logger'
generic_conn_api: 'stranydict'
_generic_conn_handler: 'stranydict'
_get_generic_impl_func: 'callable_'
_delete_file_transfer_channel: 'callable_'
_edit_file_transfer_channel: 'callable_'
_create_file_transfer_channel: 'callable_'
# ################################################################################################################################
def _find_conn_info(self, item_id:'int', item_name:'str'='') -> 'tuple_':
if item_id:
search_key = 'id'
search_value = item_id
else:
search_key = 'name'
search_value = item_name
found_conn_dict = None
found_name = None
for _ignored_conn_type, value in self.generic_conn_api.items():
for _ignored_conn_name, conn_dict in value.items():
if conn_dict[search_key] == search_value:
return conn_dict, value
return found_conn_dict, found_name
# ################################################################################################################################
def get_conn_dict_by_id(self, conn_id:'int') -> 'dict | None':
conn_dict, _ = self._find_conn_info(conn_id)
return conn_dict
# ################################################################################################################################
def is_active_generic_conn(self, conn_id:'int') -> 'bool':
# Try to find such a connection ..
conn_dict = self.get_conn_dict_by_id(conn_id)
# .. if it exists, we can check if it is active ..
if conn_dict:
return conn_dict['is_active']
# .. otherwise, assume that it is not.
else:
return False
# ################################################################################################################################
def _delete_generic_connection(self, msg:'stranydict') -> 'None':
conn_dict, conn_value = self._find_conn_info(msg['id'], msg['name'])
if not conn_dict:
raise Exception('Could not find configuration matching input message `{}`'.format(msg))
else:
# Delete the connection object ..
conn = conn_dict.conn # type: Wrapper
# .. provide the reason code if the connection type supports it ..
has_delete_reasons = getattr(conn, 'has_delete_reasons', None)
if has_delete_reasons:
conn.delete(reason=COMMON_GENERIC.DeleteReason)
else:
conn.delete()
# .. and delete the connection from the configuration object.
conn_name = conn_dict['name']
_ = conn_value.pop(conn_name, None)
# Run a special path for file transfer channels
if msg['type_'] == _type_channel_file_transfer:
self._delete_file_transfer_channel(msg)
# ################################################################################################################################
def _create_generic_connection(
self,
msg:'stranydict',
needs_roundtrip:'bool'=False,
skip:'any_'=None,
raise_exc:'bool'=True,
is_starting:'bool'=False
) -> 'None':
# This roundtrip is needed to re-format msg in the format the underlying .from_bunch expects
# in case this is a broker message rather than a startup one.
if needs_roundtrip:
conn = GenericConnection.from_dict(msg, skip)
msg = conn.to_sql_dict(True)
item = GenericConnection.from_bunch(msg)
item_dict = item.to_dict(True) # type: stranydict
for key in msg:
if key not in item_dict:
if key != 'action':
item_dict[key] = msg[key]
item_dict['queue_build_cap'] = self.server.fs_server_config.misc.queue_build_cap
item_dict['auth_url'] = msg.get('address')
# Normalize the contents of the configuration message
self.generic_normalize_config(item_dict)
config_attr = self.generic_conn_api.get(item.type_)
if config_attr is None:
self.logger.info('No config attr found for generic connection `%s`', item.type_)
return
wrapper = self._generic_conn_handler[item.type_]
msg_name = msg['name'] # type: str
# It is possible that some of the input keys point to secrets
# and other data that will be encrypted. In such a case,
# decrypt them all here upfront.
for key, value in item_dict.items():
if isinstance(value, str):
if value.startswith(_secret_prefixes):
value = self.server.decrypt(value)
item_dict[key] = value
# Mask out all the relevant attributes
replace_query_string_items_in_dict(self.server, item_dict)
config_attr[msg_name] = item_dict
conn_wrapper = wrapper(item_dict, self.server)
config_attr[msg_name].conn = conn_wrapper
config_attr[msg_name].conn.build_wrapper()
if not is_starting:
# Run a special path for file transfer channels
if msg['type_'] == _type_channel_file_transfer:
self._create_file_transfer_channel(msg)
# ################################################################################################################################
def _edit_generic_connection(self, msg:'stranydict', skip:'any_'=None, secret:'strnone'=None) -> 'None':
# Special-case file transfer channels
if msg['type_'] == _type_channel_file_transfer:
self._edit_file_transfer_channel(msg)
return
# If we do not have a secret on input, we need to look it up in the incoming message.
# If it is still not there, assume that we are going to reuse the same secret
# that we already have defined for the object
if not secret:
secret = msg.get('secret', ZATO_NONE)
if secret == ZATO_NONE:
conn_dict, _ = self._find_conn_info(msg['id'])
secret = conn_dict['secret']
# Delete the connection
self._delete_generic_connection(msg)
# Recreate it now but make sure to include the secret too
msg['secret'] = secret
self._create_generic_connection(msg, True, skip)
# ################################################################################################################################
def ping_generic_connection(self, conn_id:'int') -> 'None':
conn_dict, _ = self._find_conn_info(conn_id)
self.logger.info('About to ping generic connection `%s` (%s)', conn_dict.name, conn_dict.type_)
conn = conn_dict['conn']
conn.ping()
self.logger.info('Generic connection `%s` pinged successfully (%s)', conn_dict.name, conn_dict.type_)
# ################################################################################################################################
def _change_password_generic_connection(self, msg:'stranydict') -> 'None':
conn_dict, _ = self._find_conn_info(msg['id'])
# Create a new message without live Python objects
edit_msg = Bunch()
for key, value in conn_dict.items():
if key in ('conn', 'parent'):
continue
edit_msg[key] = value
# Now, edit the connection which will actually delete it and create again
self._edit_generic_connection(edit_msg, secret=msg['password'])
# ################################################################################################################################
def reconnect_generic(self, conn_id:'int') -> 'None':
found_conn_dict, _ = self._find_conn_info(conn_id)
if not found_conn_dict:
return
edit_msg = Bunch()
edit_msg['action'] = GENERIC_BROKER_MSG.CONNECTION_EDIT.value
for k, v in found_conn_dict.items():
if k in ('conn', 'parent'):
continue
else:
edit_msg[k] = v
self.on_broker_msg_GENERIC_CONNECTION_EDIT(edit_msg, ['conn', 'parent'])
# ################################################################################################################################
def _on_broker_msg_GENERIC_CONNECTION_COMMON_ACTION(
self,
msg:'stranydict',
*args: 'any_',
**kwargs: 'any_'
) -> 'None':
func = self._get_generic_impl_func(msg)
if func:
func(msg)
# ################################################################################################################################
def on_broker_msg_GENERIC_CONNECTION_CREATE(self, *args:'any_', **kwargs:'any_') -> 'any_':
return self._on_broker_msg_GENERIC_CONNECTION_COMMON_ACTION(*args, **kwargs)
# ################################################################################################################################
def _get_edit_generic_lock(self, is_outconn_wsx:'bool', msg:'stranydict') -> 'callable_':
# Outgoing WSX connections that connect to Zato use a specific lock type ..
if is_outconn_wsx:
lock = self.server.wsx_connection_pool_wrapper.get_update_lock(is_zato=msg['is_zato'])
return lock
# .. if we are here, we use a pass-through lock.
return PassThroughLock
# ################################################################################################################################
def on_broker_msg_GENERIC_CONNECTION_EDIT(
self,
msg:'stranydict',
*args: 'any_',
**kwargs: 'any_'
) -> 'None':
# Local variables
_is_outconn_wsx = msg['type_'] == _type_outconn_wsx
# Find out what kind of a lock to use ..
_lock = self._get_edit_generic_lock(_is_outconn_wsx, msg)
# .. do use it ..
with _lock(msg['id']):
# .. and update the connection now.
return self._on_broker_msg_GENERIC_CONNECTION_COMMON_ACTION(msg, *args, **kwargs)
# ################################################################################################################################
def on_broker_msg_GENERIC_CONNECTION_DELETE(self, *args:'any_', **kwargs:'any_') -> 'any_':
return self._on_broker_msg_GENERIC_CONNECTION_COMMON_ACTION(*args, **kwargs)
# ################################################################################################################################
on_broker_msg_GENERIC_CONNECTION_CHANGE_PASSWORD = _change_password_generic_connection
# ################################################################################################################################
def _generic_normalize_config_outconn_ldap(self, config:'stranydict') -> 'None':
config['pool_max_cycles'] = int(config['pool_max_cycles'])
config['pool_keep_alive'] = int(config['pool_keep_alive'])
config['use_auto_range'] = as_bool(config['use_auto_range'])
config['use_tls'] = as_bool(config['use_tls'])
# If GSS-API SASL method is used, the username may be a set of credentials actually
if config['sasl_mechanism'] == LDAP.SASL_MECHANISM.GSSAPI.id:
sasl_credentials = []
if config['username']:
for elem in config['username'].split():
elem = elem.strip()
elem = parse_simple_type(elem)
sasl_credentials.append(elem)
config['sasl_credentials'] = sasl_credentials
else:
config['sasl_credentials'] = None
# Initially, this will be a string but during ChangePassword we are reusing
# the same configuration object in which case it will be already a list.
if not isinstance(config['server_list'], list):
config['server_list'] = [server.strip() for server in config['server_list'].splitlines()]
# ################################################################################################################################
def generic_normalize_config(self, config:'stranydict') -> 'None':
# Normalize type name to one that can potentially point to a method of ours
type_ = config['type_'] # type: str
preprocess_type = type_.replace('-', '_')
# Check if there is such a method and if so, invoke it to preprocess the message
func = getattr(self, '_generic_normalize_config_{}'.format(preprocess_type), None)
if func:
try:
func(config)
except Exception:
self.logger.warning('Could not invoke `%s` with `%r`', func, config)
raise
# ################################################################################################################################
# ################################################################################################################################
| 15,075
|
Python
|
.py
| 264
| 48.189394
| 130
| 0.484369
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,235
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/base/worker/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# pylint: disable=too-many-public-methods
# stdlib
import logging
import inspect
import os
import sys
from copy import deepcopy
from datetime import datetime
from errno import ENOENT
from inspect import isclass
from shutil import rmtree
from tempfile import gettempdir
from threading import RLock
from traceback import format_exc
from urllib.parse import urlparse
from uuid import uuid4
# Bunch
from bunch import bunchify
# gevent
import gevent
# orjson
from orjson import dumps
# Zato
from zato.bunch import Bunch
from zato.common import broker_message
from zato.common.api import API_Key, CHANNEL, CONNECTION, DATA_FORMAT, FILE_TRANSFER, GENERIC as COMMON_GENERIC, \
HotDeploy, HTTP_SOAP_SERIALIZATION_TYPE, IPC, NOTIF, PUBSUB, RATE_LIMIT, SEC_DEF_TYPE, simple_types, \
URL_TYPE, WEB_SOCKET, Wrapper_Name_Prefix_List, ZATO_DEFAULT, ZATO_NONE, ZATO_ODB_POOL_NAME, ZMQ
from zato.common.broker_message import code_to_name, GENERIC as BROKER_MSG_GENERIC, SERVICE
from zato.common.const import SECRETS
from zato.common.dispatch import dispatcher
from zato.common.json_internal import loads
from zato.common.match import Matcher
from zato.common.model.amqp_ import AMQPConnectorConfig
from zato.common.model.wsx import WSXConnectorConfig
from zato.common.odb.api import PoolStore, SessionWrapper
from zato.common.typing_ import cast_
from zato.common.util.api import get_tls_ca_cert_full_path, get_tls_key_cert_full_path, get_tls_from_payload, \
fs_safe_name, import_module_from_path, new_cid, parse_extra_into_dict, parse_tls_channel_security_definition, \
start_connectors, store_tls, update_apikey_username_to_channel, update_bind_port, visit_py_source, wait_for_dict_key, \
wait_for_dict_key_by_get_func
from zato.common.util.file_system import resolve_path
from zato.common.util.pubsub import is_service_subscription
from zato.cy.reqresp.payload import SimpleIOPayload
from zato.server.base.parallel.subprocess_.api import StartConfig as SubprocessStartConfig
from zato.server.base.worker.common import WorkerImpl
from zato.server.connection.amqp_ import ConnectorAMQP
from zato.server.connection.cache import CacheAPI
from zato.server.connection.connector import ConnectorStore, connector_type
from zato.server.connection.cloud.aws.s3 import S3Wrapper
from zato.server.connection.email import IMAPAPI, IMAPConnStore, SMTPAPI, SMTPConnStore
from zato.server.connection.ftp import FTPStore
from zato.server.connection.http_soap.channel import RequestDispatcher, RequestHandler
from zato.server.connection.http_soap.outgoing import HTTPSOAPWrapper, SudsSOAPWrapper
from zato.server.connection.http_soap.url_data import URLData
from zato.server.connection.odoo import OdooWrapper
from zato.server.connection.sap import SAPWrapper
from zato.server.connection.search.es import ElasticSearchAPI, ElasticSearchConnStore
from zato.server.connection.search.solr import SolrAPI, SolrConnStore
from zato.server.connection.sftp import SFTPIPCFacade
from zato.server.connection.sms.twilio import TwilioAPI, TwilioConnStore
from zato.server.connection.web_socket import ChannelWebSocket
from zato.server.connection.vault import VaultConnAPI
from zato.server.ext.zunicorn.workers.ggevent import GeventWorker as GunicornGeventWorker
from zato.server.file_transfer.api import FileTransferAPI
from zato.server.generic.api.channel_file_transfer import ChannelFileTransferWrapper
from zato.server.generic.api.channel_hl7_mllp import ChannelHL7MLLPWrapper
from zato.server.generic.api.cloud_confluence import CloudConfluenceWrapper
from zato.server.generic.api.cloud_dropbox import CloudDropbox
from zato.server.generic.api.cloud_jira import CloudJiraWrapper
from zato.server.generic.api.cloud_microsoft_365 import CloudMicrosoft365Wrapper
from zato.server.generic.api.cloud_salesforce import CloudSalesforceWrapper
from zato.server.generic.api.def_kafka import DefKafkaWrapper
from zato.server.generic.api.outconn_hl7_fhir import OutconnHL7FHIRWrapper
from zato.server.generic.api.outconn_hl7_mllp import OutconnHL7MLLPWrapper
from zato.server.generic.api.outconn_im_slack import OutconnIMSlackWrapper
from zato.server.generic.api.outconn_im_telegram import OutconnIMTelegramWrapper
from zato.server.generic.api.outconn_ldap import OutconnLDAPWrapper
from zato.server.generic.api.outconn_mongodb import OutconnMongoDBWrapper
from zato.server.generic.api.outconn.wsx.base import OutconnWSXWrapper
from zato.server.pubsub import PubSub
from zato.server.pubsub.delivery.tool import PubSubTool
from zato.server.rbac_ import RBAC
from zato.zmq_.channel import MDPv01 as ChannelZMQMDPv01, Simple as ChannelZMQSimple
from zato.zmq_.outgoing import Simple as OutZMQSimple
# ################################################################################################################################
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch as bunch_
from zato.broker.client import BrokerClient
from zato.common.typing_ import any_, anydict, anylist, anytuple, callable_, callnone, dictnone, stranydict, tupnone
from zato.server.base.parallel import ParallelServer
from zato.server.config import ConfigDict
from zato.server.config import ConfigStore
from zato.server.connection.http_soap.outgoing import BaseHTTPSOAPWrapper
from zato.server.service import Service
from zato.server.store import BaseAPI
ConfigStore = ConfigStore
ParallelServer = ParallelServer
Service = Service
# ################################################################################################################################
# ################################################################################################################################
_data_format_dict = DATA_FORMAT.DICT
# ################################################################################################################################
# ################################################################################################################################
pickup_conf_item_prefix = 'zato.pickup'
# ################################################################################################################################
# ################################################################################################################################
class _generic_msg:
create = BROKER_MSG_GENERIC.CONNECTION_CREATE.value
edit = BROKER_MSG_GENERIC.CONNECTION_EDIT.value
delete = BROKER_MSG_GENERIC.CONNECTION_DELETE.value
change_password = BROKER_MSG_GENERIC.CONNECTION_CHANGE_PASSWORD.value
# ################################################################################################################################
# ################################################################################################################################
class GeventWorker(GunicornGeventWorker):
def __init__(self, *args:'any_', **kwargs:'any_') -> 'None':
self.deployment_key = '{}.{}'.format(datetime.utcnow().isoformat(), uuid4().hex)
super(GunicornGeventWorker, self).__init__(*args, **kwargs)
# ################################################################################################################################
def _get_base_classes() -> 'anytuple':
ignore = ('__init__.py', 'common.py')
out = []
for py_path in visit_py_source(os.path.dirname(os.path.abspath(__file__))):
import_path = True
for item in ignore:
if py_path.endswith(item):
import_path = False
continue
if import_path:
mod_info = import_module_from_path(py_path)
for name in dir(mod_info.module):
item = getattr(mod_info.module, name)
if isclass(item) and issubclass(item, WorkerImpl) and item is not WorkerImpl:
out.append(item)
return tuple(out) # type: ignore
# ################################################################################################################################
# ################################################################################################################################
_base_type = '_WorkerStoreBase'
# Dynamically adds as base classes everything found in current directory that subclasses WorkerImpl
_WorkerStoreBase = type(_base_type, _get_base_classes(), {})
class WorkerStore(_WorkerStoreBase):
""" Dispatches work between different pieces of configuration of an individual gunicorn worker.
"""
broker_client: 'BrokerClient | None' = None
def __init__(self, worker_config:'ConfigStore', server:'ParallelServer') -> 'None':
self.logger = logging.getLogger(self.__class__.__name__)
self.is_ready = False
self.worker_config = worker_config
self.server = server
self.update_lock = RLock()
self.kvdb = server.kvdb
self.pubsub = PubSub(self.server.cluster_id, self.server)
self.rbac = RBAC()
self.worker_idx = int(os.environ['ZATO_SERVER_WORKER_IDX'])
# Which services can be invoked
self.invoke_matcher = Matcher()
# Which targets this server supports
self.target_matcher = Matcher()
# To expedite look-ups
self._simple_types = simple_types
# Generic connections - File transfer channels
self.channel_file_transfer = {}
# Generic connections - HL7 MLLP channels
self.channel_hl7_mllp = {}
# Generic connections - Cloud - Confluence
self.cloud_confluence = {}
# Generic connections - Cloud - Dropbox
self.cloud_dropbox = {}
# Generic connections - Cloud - Jira
self.cloud_jira = {}
# Generic connections - Cloud - Microsoft 365
self.cloud_microsoft_365 = {}
# Generic connections - Cloud - Salesforce
self.cloud_salesforce = {}
# Generic connections - Kafka definitions
self.def_kafka = {}
# Generic connections - HL7 FHIR outconns
self.outconn_hl7_fhir = {}
# Generic connections - HL7 MLLP outconns
self.outconn_hl7_mllp = {}
# Generic connections - IM Slack
self.outconn_im_slack = {}
# Generic connections - IM Telegram
self.outconn_im_telegram = {}
# Generic connections - LDAP outconns
self.outconn_ldap = {}
# Generic connections - MongoDB outconns
self.outconn_mongodb = {}
# Generic connections - WSX outconns
self.outconn_wsx = {}
# ################################################################################################################################
def init(self) -> 'None':
# Search
self.search_es_api = ElasticSearchAPI(ElasticSearchConnStore())
self.search_solr_api = SolrAPI(SolrConnStore())
# SMS
self.sms_twilio_api = TwilioAPI(TwilioConnStore())
# E-mail
self.email_smtp_api = SMTPAPI(SMTPConnStore())
self.email_imap_api = IMAPAPI(IMAPConnStore())
# ZeroMQ
self.zmq_mdp_v01_api = ConnectorStore(connector_type.duplex.zmq_v01, ChannelZMQMDPv01)
self.zmq_channel_api = ConnectorStore(connector_type.channel.zmq, ChannelZMQSimple)
self.zmq_out_api = ConnectorStore(connector_type.out.zmq, OutZMQSimple)
# WebSocket
self.web_socket_api = ConnectorStore(connector_type.duplex.web_socket, ChannelWebSocket, self.server)
# AMQP
self.amqp_api = ConnectorStore(connector_type.duplex.amqp, ConnectorAMQP)
self.amqp_out_name_to_def = {} # Maps outgoing connection names to definition names, i.e. to connector names
# Vault connections
self.vault_conn_api = VaultConnAPI()
# Caches
self.cache_api = CacheAPI(self.server)
# File transfer
self.file_transfer_api = FileTransferAPI(self.server, self)
# Maps generic connection types to their API handler objects
self.generic_conn_api = {
COMMON_GENERIC.CONNECTION.TYPE.CHANNEL_FILE_TRANSFER: self.channel_file_transfer,
COMMON_GENERIC.CONNECTION.TYPE.CHANNEL_HL7_MLLP: self.channel_hl7_mllp,
COMMON_GENERIC.CONNECTION.TYPE.CLOUD_CONFLUENCE: self.cloud_confluence,
COMMON_GENERIC.CONNECTION.TYPE.CLOUD_DROPBOX: self.cloud_dropbox,
COMMON_GENERIC.CONNECTION.TYPE.CLOUD_JIRA: self.cloud_jira,
COMMON_GENERIC.CONNECTION.TYPE.CLOUD_MICROSOFT_365: self.cloud_microsoft_365,
COMMON_GENERIC.CONNECTION.TYPE.CLOUD_SALESFORCE: self.cloud_salesforce,
COMMON_GENERIC.CONNECTION.TYPE.DEF_KAFKA: self.def_kafka,
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_HL7_FHIR: self.outconn_hl7_fhir,
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_HL7_MLLP: self.outconn_hl7_mllp,
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_IM_SLACK: self.outconn_im_slack,
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_IM_TELEGRAM: self.outconn_im_telegram,
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_LDAP: self.outconn_ldap,
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_MONGODB: self.outconn_mongodb,
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_WSX: self.outconn_wsx,
}
self._generic_conn_handler = {
COMMON_GENERIC.CONNECTION.TYPE.CHANNEL_FILE_TRANSFER: ChannelFileTransferWrapper,
COMMON_GENERIC.CONNECTION.TYPE.CHANNEL_HL7_MLLP: ChannelHL7MLLPWrapper,
COMMON_GENERIC.CONNECTION.TYPE.CLOUD_CONFLUENCE: CloudConfluenceWrapper,
COMMON_GENERIC.CONNECTION.TYPE.CLOUD_DROPBOX: CloudDropbox,
COMMON_GENERIC.CONNECTION.TYPE.CLOUD_JIRA: CloudJiraWrapper,
COMMON_GENERIC.CONNECTION.TYPE.CLOUD_MICROSOFT_365: CloudMicrosoft365Wrapper,
COMMON_GENERIC.CONNECTION.TYPE.CLOUD_SALESFORCE: CloudSalesforceWrapper,
COMMON_GENERIC.CONNECTION.TYPE.DEF_KAFKA: DefKafkaWrapper,
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_HL7_FHIR: OutconnHL7FHIRWrapper,
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_HL7_MLLP: OutconnHL7MLLPWrapper,
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_IM_SLACK: OutconnIMSlackWrapper,
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_IM_TELEGRAM: OutconnIMTelegramWrapper,
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_LDAP: OutconnLDAPWrapper,
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_MONGODB: OutconnMongoDBWrapper,
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_WSX: OutconnWSXWrapper
}
# Maps message actions against generic connection types and their message handlers
self.generic_impl_func_map = {}
# After connection is establised, a flag is stored here to let queries consult it
# before they attempt to prepare statements. In other words, queries wait for connections.
# They do it in separate greenlets.
self._cassandra_connections_ready = {}
# Cassandra
self.init_cassandra()
self.init_cassandra_queries()
# Search
self.init_search_es()
self.init_search_solr()
# SMS
self.init_sms_twilio()
# E-mail
self.init_email_smtp()
self.init_email_imap()
# ZeroMQ
self.init_zmq()
# Odoo
self.init_odoo()
# SAP RFC
self.init_sap()
# RBAC
self.init_rbac()
# Vault connections
self.init_vault_conn()
# Caches
self.init_caches()
# API keys
self.update_apikeys()
# SFTP - attach handles to connections to each ConfigDict now that all their configuration is ready
self.init_sftp()
request_handler = RequestHandler(self.server)
url_data = URLData(
self,
self.worker_config.http_soap,
self._get_channel_url_sec(),
self.worker_config.basic_auth,
self.worker_config.jwt,
self.worker_config.ntlm,
self.worker_config.oauth,
self.worker_config.apikey,
self.worker_config.aws,
self.worker_config.tls_channel_sec,
self.worker_config.tls_key_cert,
self.worker_config.vault_conn_sec,
self.kvdb,
self.broker_client,
self.server.odb,
self.server.jwt_secret,
self.vault_conn_api
)
# Request dispatcher - matches URLs, checks security and dispatches HTTP requests to services.
self.request_dispatcher = RequestDispatcher(
server = self.server,
url_data = url_data,
request_handler = request_handler,
simple_io_config = self.worker_config.simple_io,
return_tracebacks = self.server.return_tracebacks,
default_error_message = self.server.default_error_message,
http_methods_allowed = self.server.http_methods_allowed
)
# Security groups - add details of each one to REST channels
self._populate_channel_security_groups_info(self.worker_config.http_soap)
# Create all the expected connections and objects
self.init_sql()
self.init_http_soap()
self.init_cloud()
# AMQP
self.init_amqp()
# Initialise file transfer-based pickup here because it is required when generic connections are being created
self.convert_pickup_to_file_transfer()
# Generic connections
self.init_generic_connections_config()
self.init_generic_connections()
# All set, whoever is waiting for us, if anyone at all, can now proceed
self.is_ready = True
# ################################################################################################################################
def _populate_channel_security_groups_info(self, channel_data:'anylist') -> 'None':
# First, make sure the server has all the groups ..
self.server.security_groups_ctx_builder.populate_members()
# .. now, we can attach a groups context object to each channel that has any groups.
for channel_item in channel_data:
if security_groups := channel_item.get('security_groups'):
security_groups_ctx = self.server.security_groups_ctx_builder.build_ctx(channel_item['id'], security_groups)
channel_item['security_groups_ctx'] = security_groups_ctx
# ################################################################################################################################
def _get_channel_url_sec(self) -> 'any_':
out:'any_' = self.server.odb.get_url_security(self.server.cluster_id, 'channel')[0]
return out
# ################################################################################################################################
def early_init(self) -> 'None':
""" Initialises these parts of our configuration that are needed earlier than others.
"""
self.init_ftp()
# ################################################################################################################################
def _config_to_dict(self, config_list:'anylist', key:'str'='name') -> 'stranydict':
""" Converts a list of dictionaries produced by ConfigDict instances to a dictionary keyed with 'key' elements.
"""
out = {}
for elem in config_list:
out[elem[key]] = elem
return out
# ################################################################################################################################
def after_broker_client_set(self) -> 'None':
self.pubsub.broker_client = self.broker_client
# Pub/sub requires broker client
self.init_pubsub()
# WebSocket connections may depend on pub/sub so we create them only after pub/sub is initialized
self.init_wsx()
# ################################################################################################################################
def set_broker_client(self, broker_client:'BrokerClient') -> 'None':
self.broker_client = broker_client
self.after_broker_client_set()
# ################################################################################################################################
def filter(self, msg:'bunch_') -> 'bool':
return True
# ################################################################################################################################
def _update_queue_build_cap(self, item:'any_') -> 'None':
item.queue_build_cap = float(self.server.fs_server_config.misc.queue_build_cap)
# ################################################################################################################################
def _update_aws_config(self, msg:'bunch_') -> 'None':
""" Parses the address to AWS we store into discrete components S3Connection objects expect.
Also turns metadata string into a dictionary
"""
url_info = urlparse(msg.address)
msg.is_secure = True if url_info.scheme == 'https' else False
msg.port = url_info.port if url_info.port else (443 if msg.is_secure else 80)
msg.host = url_info.netloc
msg.metadata = parse_extra_into_dict(msg.metadata_)
# ################################################################################################################################
def _get_tls_verify_from_config(self, config:'any_') -> 'bool':
tls_config = self.worker_config.tls_ca_cert[config.sec_tls_ca_cert_name]
tls_config = tls_config.config
tls_config = tls_config.value
tls_from_payload = get_tls_from_payload(tls_config)
tls_verify = get_tls_ca_cert_full_path(self.server.tls_dir, tls_from_payload)
return tls_verify
# ################################################################################################################################
def _http_soap_wrapper_from_config(self, config:'bunch_', *, has_sec_config:'bool'=True) -> 'BaseHTTPSOAPWrapper':
""" Creates a new HTTP/SOAP connection wrapper out of a configuration dictionary.
"""
# Populate it upfront
conn_name = config['name']
# This can also be populated upfront but we need to ensure
# we do not include any potential name prefix in the FS-safe name.
for prefix in Wrapper_Name_Prefix_List:
if conn_name.startswith(prefix):
name_without_prefix = conn_name.replace(prefix, '', 1)
is_wrapper = True
break
else:
is_wrapper = False
prefix = ''
name_without_prefix = conn_name
config['name_fs_safe'] = prefix + fs_safe_name(name_without_prefix)
security_name = config.get('security_name')
sec_config = {
'security_name': security_name,
'security_id': None,
'sec_type': None,
'username': None,
'password': None,
'password_type': None,
'orig_username': None
}
_sec_config = None
# This will be set to True only if the method's invoked on a server's starting up
if has_sec_config:
# It's possible that there is no security config attached at all
if security_name:
_sec_config = config
else:
if security_name:
sec_type = config.sec_type
func = getattr(self.request_dispatcher.url_data, sec_type + '_get')
_sec_config = func(security_name).config
# Update the security configuration if it is a separate one ..
if _sec_config:
_sec_config_id = _sec_config.get('security_id') or _sec_config.get('id')
sec_config['security_id'] = _sec_config_id
sec_config['sec_type'] = _sec_config['sec_type']
sec_config['username'] = _sec_config.get('username')
sec_config['orig_username'] = _sec_config.get('orig_username')
sec_config['password'] = _sec_config.get('password')
sec_config['password_type'] = _sec_config.get('password_type')
sec_config['salt'] = _sec_config.get('salt')
if sec_config['sec_type'] == SEC_DEF_TYPE.TLS_KEY_CERT:
tls = cast_('bunch_', self.request_dispatcher.url_data.tls_key_cert_get(security_name))
auth_data = self.server.decrypt(tls.config.auth_data)
sec_config['tls_key_cert_full_path'] = get_tls_key_cert_full_path(
self.server.tls_dir, get_tls_from_payload(auth_data, True))
# .. otherwise, try to find it elsewhere ..
else:
# .. if it is a REST wrapper, it will have its own security configuration that we can use ..
if is_wrapper:
sec_config['sec_type'] = SEC_DEF_TYPE.BASIC_AUTH
sec_config['username'] = config['username']
sec_config['password'] = self.server.decrypt(config['password'])
wrapper_config = {
'id':config.id,
'is_active':config.is_active,
'method':config.method,
'data_format':config.get('data_format'),
'name':config.name,
'transport':config.transport,
'address_host':config.host,
'address_url_path':config.url_path,
'soap_action':config.soap_action,
'soap_version':config.soap_version,
'ping_method':config.ping_method,
'pool_size':config.pool_size,
'serialization_type':config.serialization_type,
'timeout':config.timeout,
'content_type':config.content_type,
}
wrapper_config.update(sec_config)
# Key 'sec_tls_ca_cert_verify_strategy' was added in 3.2
# so we need to handle cases when it exists or it does not.
sec_tls_ca_cert_verify_strategy = config.get('sec_tls_ca_cert_verify_strategy')
# 3.2+
if sec_tls_ca_cert_verify_strategy:
if sec_tls_ca_cert_verify_strategy is True:
tls_verify = True
elif sec_tls_ca_cert_verify_strategy is False:
tls_verify = False
else:
tls_verify = self._get_tls_verify_from_config(config)
# < 3.2
else:
if not config.get('sec_tls_ca_cert_id') and config.sec_tls_ca_cert_id in {ZATO_DEFAULT, ZATO_NONE}:
tls_verify = self._get_tls_verify_from_config(config)
else:
tls_verify = False
wrapper_config['tls_verify'] = tls_verify
conn_soap = wrapper_config['transport'] == URL_TYPE.SOAP
conn_suds = wrapper_config['serialization_type'] == HTTP_SOAP_SERIALIZATION_TYPE.SUDS.id
if conn_soap and conn_suds:
wrapper_config['queue_build_cap'] = float(self.server.fs_server_config.misc.queue_build_cap)
wrapper = SudsSOAPWrapper(wrapper_config)
if wrapper_config['is_active']:
wrapper.build_client_queue()
return wrapper
return HTTPSOAPWrapper(self.server, wrapper_config)
# ################################################################################################################################
def get_outconn_http_config_dicts(self) -> 'any_':
out:'any_' = []
for transport in('soap', 'plain_http'):
config_dict = getattr(self.worker_config, 'out_' + transport)
for name in list(config_dict): # Must use list explicitly so config_dict can be changed during iteration
config_data = config_dict[name]
if not isinstance(config_data, str):
out.append([config_dict, config_data])
return out
# ################################################################################################################################
def init_sql(self) -> 'None':
""" Initializes SQL connections, first to ODB and then any user-defined ones.
"""
# We need a store first
self.sql_pool_store = PoolStore()
# Connect to ODB
self.sql_pool_store[ZATO_ODB_POOL_NAME] = self.worker_config.odb_data
self.odb = SessionWrapper()
self.odb.init_session(ZATO_ODB_POOL_NAME, self.worker_config.odb_data, self.sql_pool_store[ZATO_ODB_POOL_NAME].pool)
# Any user-defined SQL connections left?
for pool_name in self.worker_config.out_sql:
config = self.worker_config.out_sql[pool_name]['config']
config['fs_sql_config'] = self.server.fs_sql_config
self.sql_pool_store[pool_name] = config
def init_ftp(self) -> 'None':
""" Initializes FTP connections. The method replaces whatever value self.out_ftp
previously had (initially this would be a ConfigDict of connection definitions).
"""
config_list = self.worker_config.out_ftp.get_config_list()
self.worker_config.out_ftp = FTPStore() # type: ignore
self.worker_config.out_ftp.add_params(config_list)
def init_sftp(self) -> 'None':
""" Each outgoing SFTP connection requires a connection handle to be attached here,
later, in run-time, this is the 'conn' parameter available via self.out[name].conn.
"""
for value in self.worker_config.out_sftp.values():
value['conn'] = SFTPIPCFacade(self.server, value['config'])
def init_http_soap(self, *, has_sec_config:'bool'=True) -> 'None':
""" Initializes plain HTTP/SOAP connections.
"""
config_dicts = self.get_outconn_http_config_dicts()
for config_dict, config_data in config_dicts:
wrapper = self._http_soap_wrapper_from_config(config_data.config, has_sec_config=has_sec_config)
config_data.conn = wrapper
# To make the API consistent with that of SQL connection pools
config_data.ping = wrapper.ping
# Store ID -> name mapping
config_dict.set_key_id_data(config_data.config)
def init_cloud(self) -> 'None':
""" Initializes all the cloud connections.
"""
data = (
('cloud_aws_s3', S3Wrapper),
)
for config_key, wrapper in data:
config_attr = getattr(self.worker_config, config_key)
for name in config_attr:
config = config_attr[name]['config']
if isinstance(wrapper, S3Wrapper):
self._update_aws_config(config)
config.queue_build_cap = float(self.server.fs_server_config.misc.queue_build_cap)
config_attr[name].conn = wrapper(config, self.server)
config_attr[name].conn.build_queue()
def get_notif_config(self, notif_type:'str', name:'str') -> 'ConfigDict':
config_dict = {
NOTIF.TYPE.SQL: self.worker_config.notif_sql,
}[notif_type]
return config_dict.get(name)
def create_edit_notifier(self, msg:'bunch_', action:'str', config_dict:'ConfigDict', update_func:'callnone'=None) -> 'None':
# It might be a rename
old_name = msg.get('old_name')
del_name = old_name if old_name else msg.name
config_dict.pop(del_name, None) # Delete and ignore if it doesn't exit (it's CREATE then)
config_dict[msg.name] = Bunch()
config_dict[msg.name].config = msg
if update_func:
update_func(msg)
# Start a new background notifier either if it's a create action or on rename.
if msg.source_service_type == 'create' or (old_name and old_name != msg.name):
self.on_message_invoke_service({
'service': 'zato.notif.invoke-run-notifier',
'payload': {'config': msg},
'cid': new_cid(),
}, CHANNEL.NOTIFIER_RUN, action)
# ################################################################################################################################
def _on_cassandra_connection_established(self, config:'bunch_') -> 'None':
self._cassandra_connections_ready[config.id] = True
def init_cassandra(self) -> 'None':
for k, v in self.worker_config.cassandra_conn.items():
try:
self._cassandra_connections_ready[v.config.id] = False
self.update_cassandra_conn(v.config)
self.cassandra_api.create_def(k, v.config, self._on_cassandra_connection_established)
except Exception:
logger.warning('Could not create a Cassandra connection `%s`, e:`%s`', k, format_exc())
# ################################################################################################################################
def _init_cassandra_query(self, create_func:'callable_', k:'str', config:'bunch_') -> 'None':
idx = 0
while not self._cassandra_connections_ready.get(config.def_id):
gevent.sleep(1)
idx += 1
if not idx % 20:
logger.warning('Still waiting for `%s` Cassandra connection', config.def_name)
create_func(k, config, def_=self.cassandra_api[config.def_name])
def init_cassandra_queries(self) -> 'None':
for k, v in self.worker_config.cassandra_query.items():
try:
gevent.spawn(self._init_cassandra_query, self.cassandra_query_api.create, k, v.config)
except Exception:
logger.warning('Could not create a Cassandra query `%s`, e:`%s`', k, format_exc())
# ################################################################################################################################
def init_simple(self, config:'bunch_', api:'BaseAPI', name:'str') -> 'None':
for k, v in config.items():
self._update_queue_build_cap(v.config)
try:
api.create(k, v.config)
except Exception:
logger.warning('Could not create {} connection `%s`, e:`%s`'.format(name), k, format_exc())
# ################################################################################################################################
def init_sms_twilio(self) -> 'None':
self.init_simple(self.worker_config.sms_twilio, self.sms_twilio_api, 'a Twilio')
# ################################################################################################################################
def init_search_es(self) -> 'None':
self.init_simple(self.worker_config.search_es, self.search_es_api, 'an ElasticSearch')
# ################################################################################################################################
def init_search_solr(self) -> 'None':
self.init_simple(self.worker_config.search_solr, self.search_solr_api, 'a Solr')
# ################################################################################################################################
def init_email_smtp(self) -> 'None':
self.init_simple(self.worker_config.email_smtp, self.email_smtp_api, 'an SMTP')
# ################################################################################################################################
def init_email_imap(self) -> 'None':
self.init_simple(self.worker_config.email_imap, self.email_imap_api, 'an IMAP')
# ################################################################################################################################
def _set_up_zmq_channel(self, name:'str', config:'bunch_', action:'str', start:'bool'=False) -> 'None':
""" Actually initializes a ZeroMQ channel, taking into account dissimilarities between MDP ones and PULL/SUB.
"""
# We need to consult old_socket_type because it may very well be the case that someone
# not only (say) renamed a channel but also changed its socket type as well.
if config.get('old_socket_type') and config.socket_type != config.old_socket_type:
raise ValueError('Cannot change a ZeroMQ channel\'s socket type')
if config.socket_type.startswith(ZMQ.MDP):
api = self.zmq_mdp_v01_api
zeromq_mdp_config = self.server.fs_server_config.zeromq_mdp
zeromq_mdp_config = {k:int(v) for k, v in zeromq_mdp_config.items()}
config.update(zeromq_mdp_config)
else:
api = self.zmq_channel_api
getattr(api, action)(name, config, self.on_message_invoke_service)
if start:
api.start(name)
# ################################################################################################################################
def init_zmq_channels(self) -> 'None':
""" Initializes ZeroMQ channels and MDP connections.
"""
# Channels
for name, data in self.worker_config.channel_zmq.items():
# Each worker uses a unique bind port
data = bunchify(data)
update_bind_port(data.config, self.worker_idx)
self._set_up_zmq_channel(name, bunchify(data.config), 'create')
self.zmq_mdp_v01_api.start()
self.zmq_channel_api.start()
def init_zmq_outconns(self):
""" Initializes ZeroMQ outgoing connections (but not MDP that are initialized along with channels).
"""
for name, data in self.worker_config.out_zmq.items():
# MDP ones were already handled in channels above
if data.config['socket_type'].startswith(ZMQ.MDP):
continue
self.zmq_out_api.create(name, data.config)
self.zmq_out_api.start()
# ################################################################################################################################
def init_zmq(self) -> 'None':
""" Initializes all ZeroMQ connections.
"""
# Iterate over channels and outgoing connections and populate their respetive connectors.
# Note that MDP are duplex and we create them in channels while in outgoing connections they are skipped.
self.init_zmq_channels()
self.init_zmq_outconns()
# ################################################################################################################################
def init_wsx(self) -> 'None':
""" Initializes all WebSocket connections.
"""
# Channels
for name, data in self.worker_config.channel_web_socket.items():
# Convert configuration to expected datatypes
data.config['max_len_messages_sent'] = int(data.config.get('max_len_messages_sent') or 0)
data.config['max_len_messages_received'] = int(data.config.get('max_len_messages_received') or 0)
data.config['pings_missed_threshold'] = int(
data.config.get('pings_missed_threshold') or WEB_SOCKET.DEFAULT.PINGS_MISSED_THRESHOLD)
data.config['ping_interval'] = int(
data.config.get('ping_interval') or WEB_SOCKET.DEFAULT.PING_INTERVAL)
# Create a new WebSocket connector definition ..
config = WSXConnectorConfig.from_dict(data.config)
# .. append common hook service to the configuration.
config.hook_service = self.server.fs_server_config.get('wsx', {}).get('hook_service', '')
self.web_socket_api.create(name, config, self.on_message_invoke_service,
self.request_dispatcher.url_data.authenticate_web_socket)
self.web_socket_api.start()
# ################################################################################################################################
def init_amqp(self) -> 'None':
""" Initializes all AMQP connections.
"""
def _name_matches(def_name:'str') -> 'callable_':
def _inner(config:'stranydict') -> 'bool':
return config['def_name']==def_name
return _inner
for def_name, data in self.worker_config.definition_amqp.items():
channels = self.worker_config.channel_amqp.get_config_list(_name_matches(def_name))
outconns = self.worker_config.out_amqp.get_config_list(_name_matches(def_name))
for outconn in outconns:
self.amqp_out_name_to_def[outconn['name']] = def_name
# Create a new AMQP connector definition ..
config = AMQPConnectorConfig.from_dict(data.config)
# .. AMQP definitions as such are always active. It is channels or outconns that can be inactive.
config.is_active = True
self.amqp_api.create(def_name, config, self.invoke,
channels=self._config_to_dict(channels), outconns=self._config_to_dict(outconns))
self.amqp_api.start()
# ################################################################################################################################
def init_odoo(self) -> 'None':
names = self.worker_config.out_odoo.keys()
for name in names:
item = config = self.worker_config.out_odoo[name]
config = item['config']
config.queue_build_cap = float(self.server.fs_server_config.misc.queue_build_cap)
item.conn = OdooWrapper(config, self.server)
item.conn.build_queue()
# ################################################################################################################################
def init_sap(self) -> 'None':
names = self.worker_config.out_sap.keys()
for name in names:
item = config = self.worker_config.out_sap[name]
config = item['config']
config.queue_build_cap = float(self.server.fs_server_config.misc.queue_build_cap)
item.conn = SAPWrapper(config, self.server)
item.conn.build_queue()
# ################################################################################################################################
def init_rbac(self) -> 'None':
for value in self.worker_config.service.values():
self.rbac.create_resource(value.config.id)
for value in self.worker_config.rbac_permission.values():
self.rbac.create_permission(value.config.id, value.config.name)
for value in self.worker_config.rbac_role.values():
self.rbac.create_role(value.config.id, value.config.name, value.config.parent_id)
for value in self.worker_config.rbac_client_role.values():
self.rbac.create_client_role(value.config.client_def, value.config.role_id)
# TODO - handle 'deny' as well
for value in self.worker_config.rbac_role_permission.values():
self.rbac.create_role_permission_allow(value.config.role_id, value.config.perm_id, value.config.service_id)
self.rbac.set_http_permissions()
# ################################################################################################################################
def init_vault_conn(self) -> 'None':
for value in self.worker_config.vault_conn_sec.values():
self.vault_conn_api.create(bunchify(value['config']))
# ################################################################################################################################
def init_caches(self) -> 'None':
for name in 'builtin', 'memcached':
cache = getattr(self.worker_config, 'cache_{}'.format(name))
for value in cache.values():
self.cache_api.create(bunchify(value['config']))
# ################################################################################################################################
def sync_security(self):
""" Rebuilds all the in-RAM security structures and objects.
"""
# First, load up all the definitions from the database ..
self.server.set_up_security(self.server.cluster_id)
# .. update in-RAM config values ..
url_sec = self._get_channel_url_sec()
self.request_dispatcher.url_data.set_security_objects(
url_sec=url_sec,
basic_auth_config=self.worker_config.basic_auth,
jwt_config=self.worker_config.jwt,
ntlm_config=self.worker_config.ntlm,
oauth_config=self.worker_config.oauth,
apikey_config=self.worker_config.apikey,
aws_config=self.worker_config.aws,
tls_channel_sec_config=self.worker_config.tls_channel_sec,
tls_key_cert_config=self.worker_config.tls_key_cert,
vault_conn_sec_config=self.worker_config.vault_conn_sec,
)
# .. now, initialize connections that may depend on what we have just loaded ..
self.init_http_soap(has_sec_config=False)
# ################################################################################################################################
def sync_pubsub(self):
""" Rebuilds all the in-RAM pub/sub structures and tasks.
"""
# First, stop everything ..
self.pubsub.stop()
# .. now, load it into RAM from the database ..
self.server.set_up_pubsub(self.server.cluster_id)
# .. finally, initialize everything once more.
self.init_pubsub()
# ################################################################################################################################
def init_pubsub(self) -> 'None':
""" Sets up all pub/sub endpoints, subscriptions and topics. Also, configures pubsub with getters for each endpoint type.
"""
# This is a pub/sub tool for delivery of Zato services within this server
service_pubsub_tool = PubSubTool(self.pubsub, self.server, PUBSUB.ENDPOINT_TYPE.SERVICE.id, True)
self.pubsub.service_pubsub_tool = service_pubsub_tool
for value in self.worker_config.pubsub_topic.values(): # type: ignore
self.pubsub.create_topic_object(bunchify(value['config']))
for value in self.worker_config.pubsub_endpoint.values(): # type: ignore
self.pubsub.create_endpoint(bunchify(value['config']))
for value in self.worker_config.pubsub_subscription.values(): # type: ignore
config:'bunch_' = bunchify(value['config'])
config.add_subscription = True # We don't create WSX subscriptions here so it is always True
self.pubsub.create_subscription_object(config)
# Special-case delivery of messages to services
if is_service_subscription(config):
self.pubsub.set_config_for_service_subscription(config['sub_key'])
self.pubsub.set_endpoint_impl_getter(PUBSUB.ENDPOINT_TYPE.REST.id, self.worker_config.out_plain_http.get_by_id)
# Not used but needed for API completeness
self.pubsub.set_endpoint_impl_getter(
PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id,
cast_('callable_', None),
)
# ################################################################################################################################
def update_apikeys(self) -> 'None':
""" API keys need to be upper-cased and in the format that WSGI environment will have them in.
"""
for config_dict in self.worker_config.apikey.values():
config_dict.config.orig_header = config_dict.config.get('header') or API_Key.Default_Header
update_apikey_username_to_channel(config_dict.config)
# ################################################################################################################################
def _update_auth(
self,
msg, # type: Bunch
action_name, # type: str
sec_type, # type: str
visit_wrapper, # type: callable_
keys=None # type: tupnone
) -> 'None':
""" A common method for updating auth-related configuration.
"""
with self.update_lock:
handler = getattr(self.request_dispatcher.url_data, 'on_broker_msg_' + action_name)
handler(msg)
for transport in ['plain_http', 'soap']:
config_dict = getattr(self.worker_config, 'out_' + transport)
for conn_name in config_dict.copy_keys():
config = config_dict[conn_name]['config']
wrapper = config_dict[conn_name]['conn']
if config['sec_type'] == sec_type:
if keys:
visit_wrapper(wrapper, msg, keys)
else:
visit_wrapper(wrapper, msg)
def _visit_wrapper_edit(self, wrapper:'HTTPSOAPWrapper', msg:'bunch_', keys:'anytuple') -> 'None':
""" Updates a given wrapper's security configuration.
"""
if wrapper.config['security_name'] == msg['old_name']:
for key in keys:
# All's good except for 'name', the msg's 'name' is known
# as 'security_name' in wrapper's config.
if key == 'name':
key1 = 'security_name'
key2 = key
else:
key1, key2 = key, key
wrapper.config[key1] = msg[key2]
wrapper.set_auth()
def _visit_wrapper_delete(self, wrapper:'HTTPSOAPWrapper', msg:'bunch_') -> 'None':
""" Deletes a wrapper.
"""
config_dict = getattr(self.worker_config, 'out_' + wrapper.config['transport'])
if wrapper.config['security_name'] == msg['name']:
del config_dict[wrapper.config['name']]
def _visit_wrapper_change_password(self, wrapper:'HTTPSOAPWrapper', msg:'bunch_', *, check_name:'bool'=True) -> 'None':
""" Changes a wrapper's password.
"""
# This check is performed by non-wrapper connection types
if check_name:
if not (wrapper.config['security_name'] == msg['name']):
return
# If we are here, it means that either the name matches or that the connection is a wrapper object
wrapper.config['password'] = msg['password']
wrapper.set_auth()
# ################################################################################################################################
def _convert_pickup_config_to_file_transfer(self, name:'str', config:'anydict | bunch_') -> 'bunch_ | None':
# Convert paths to full ones
pickup_from_list = config.get('pickup_from') or []
if not pickup_from_list:
return
pickup_from_list = pickup_from_list if isinstance(pickup_from_list, list) else [pickup_from_list]
pickup_from_list = [resolve_path(elem, self.server.base_dir) for elem in pickup_from_list]
move_processed_to = config.get('move_processed_to')
if move_processed_to:
move_processed_to = resolve_path(move_processed_to, self.server.base_dir)
# Make sure we have lists on input
service_list = config.get('services') or []
service_list = service_list if isinstance(service_list, list) else [service_list]
topic_list = config.get('topic_list') or []
topic_list = topic_list if isinstance(topic_list, list) else [topic_list]
data = {
# Tell the manager to start this channel only
# if we are very first process among potentially many ones for this server.
'_start_channel': True if self.server.is_starting_first else False,
'type_': COMMON_GENERIC.CONNECTION.TYPE.CHANNEL_FILE_TRANSFER,
'name': name,
'is_active': True,
'is_internal': True,
'data_encoding': config.get('data_encoding') or 'utf-8',
'source_type': FILE_TRANSFER.SOURCE_TYPE.LOCAL.id,
'pickup_from_list': pickup_from_list,
'is_hot_deploy': config.get('is_hot_deploy'),
'should_deploy_in_place': config.get('deploy_in_place', False),
'service_list': service_list,
'topic_list': topic_list,
'move_processed_to': move_processed_to,
'file_patterns': config.get('patterns') or '*',
'parse_with': config.get('parse_with'),
'should_read_on_pickup': config.get('read_on_pickup', True),
'should_parse_on_pickup': config.get('parse_on_pickup', False),
'should_delete_after_pickup': config.get('delete_after_pickup', True),
'is_case_sensitive': config.get('is_case_sensitive', True),
'is_line_by_line': config.get('is_line_by_line', False),
'is_recursive': config.get('is_recursive', False),
'binary_file_patterns': config.get('binary_file_patterns') or [],
'outconn_rest_list': [],
}
return bunchify(data)
# ################################################################################################################################
def convert_pickup_to_file_transfer(self) -> 'None':
# Default pickup directory
self._add_service_pickup_to_file_transfer(
'hot-deploy', self.server.hot_deploy_config.pickup_dir,
self.server.hot_deploy_config.delete_after_pickup, False)
# User-defined pickup directories
for name, config in self.server.pickup_config.items():
if name.startswith(HotDeploy.UserPrefix):
self._add_service_pickup_to_file_transfer(
name, config.pickup_from, False,
config.get('deploy_in_place', True))
# Convert all the other pickup entries
self._convert_pickup_to_file_transfer()
# ################################################################################################################################
def _add_service_pickup_to_file_transfer(
self,
name, # type: str
pickup_dir, # type: str
delete_after_pickup, # type: bool
deploy_in_place # type: bool
) -> 'None':
# Explicitly create configuration for hot-deployment
hot_deploy_name = '{}.{}'.format(pickup_conf_item_prefix, name)
hot_deploy_config = self._convert_pickup_config_to_file_transfer(hot_deploy_name, {
'is_hot_deploy': True,
'patterns': '*.py',
'services': 'zato.hot-deploy.create',
'pickup_from': pickup_dir,
'delete_after_pickup': delete_after_pickup,
'deploy_in_place': deploy_in_place,
})
# Add hot-deployment to local file transfer
self.worker_config.generic_connection[hot_deploy_name] = {'config': hot_deploy_config}
# ################################################################################################################################
def _convert_pickup_to_file_transfer(self) -> 'None':
# Create transfer channels based on pickup.conf
for key, value in self.server.pickup_config.items(): # type: (str, dict)
# Skip user-defined service pickup because it was already added in self.convert_pickup_to_file_transfer
if key.startswith(HotDeploy.UserPrefix):
continue
# This is an internal name
name = '{}.{}'.format(pickup_conf_item_prefix, key)
# We need to convert between config formats
config = self._convert_pickup_config_to_file_transfer(name, value)
if not config:
continue
# Add pickup configuration to local file transfer
self.worker_config.generic_connection[name] = {'config': config}
# ################################################################################################################################
def init_generic_connections(self) -> 'None':
# Some connection types are built elsewhere
to_skip = {
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_SFTP,
}
for config_dict in self.worker_config.generic_connection.values():
if config_dict:
config = config_dict.get('config')
if config:
config_type = config['type_']
# Not all generic connections are created here
if config_type in to_skip:
continue
self._create_generic_connection(bunchify(config), raise_exc=False, is_starting=True)
# ################################################################################################################################
def init_generic_connections_config(self) -> 'None':
# Local aliases
channel_file_transfer_map = self.generic_impl_func_map.setdefault(
COMMON_GENERIC.CONNECTION.TYPE.CHANNEL_FILE_TRANSFER, {})
channel_hl7_mllp_map = self.generic_impl_func_map.setdefault(
COMMON_GENERIC.CONNECTION.TYPE.CHANNEL_HL7_MLLP, {})
cloud_confluence_map = self.generic_impl_func_map.setdefault(COMMON_GENERIC.CONNECTION.TYPE.CLOUD_CONFLUENCE, {})
cloud_dropbox_map = self.generic_impl_func_map.setdefault(COMMON_GENERIC.CONNECTION.TYPE.CLOUD_DROPBOX, {})
cloud_jira_map = self.generic_impl_func_map.setdefault(COMMON_GENERIC.CONNECTION.TYPE.CLOUD_JIRA, {})
cloud_microsoft_365_map = self.generic_impl_func_map.setdefault(COMMON_GENERIC.CONNECTION.TYPE.CLOUD_MICROSOFT_365, {})
cloud_salesforce_map = self.generic_impl_func_map.setdefault(COMMON_GENERIC.CONNECTION.TYPE.CLOUD_SALESFORCE, {})
def_kafka_map = self.generic_impl_func_map.setdefault(COMMON_GENERIC.CONNECTION.TYPE.DEF_KAFKA, {})
outconn_hl7_fhir_map = self.generic_impl_func_map.setdefault(COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_HL7_FHIR, {})
outconn_hl7_mllp_map = self.generic_impl_func_map.setdefault(COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_HL7_MLLP, {})
outconn_im_slack_map = self.generic_impl_func_map.setdefault(COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_IM_SLACK, {})
outconn_im_telegram_map = self.generic_impl_func_map.setdefault(COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_IM_TELEGRAM, {})
outconn_ldap_map = self.generic_impl_func_map.setdefault(COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_LDAP, {})
outconn_mongodb_map = self.generic_impl_func_map.setdefault(COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_MONGODB, {})
outconn_sftp_map = self.generic_impl_func_map.setdefault(COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_SFTP, {})
outconn_wsx_map = self.generic_impl_func_map.setdefault(COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_WSX, {})
# These generic connections are regular - they use common API methods for such connections
regular_maps = [
channel_file_transfer_map,
channel_hl7_mllp_map,
cloud_confluence_map,
cloud_dropbox_map,
cloud_jira_map,
cloud_microsoft_365_map,
cloud_salesforce_map,
def_kafka_map,
outconn_hl7_fhir_map,
outconn_hl7_mllp_map,
outconn_im_slack_map,
outconn_im_telegram_map,
outconn_im_telegram_map,
outconn_ldap_map,
outconn_mongodb_map,
outconn_wsx_map,
]
password_maps = [
cloud_dropbox_map,
outconn_im_slack_map,
outconn_im_telegram_map,
outconn_ldap_map,
outconn_mongodb_map,
]
for regular_item in regular_maps:
regular_item[_generic_msg.create] = self._create_generic_connection
regular_item[_generic_msg.edit] = self._edit_generic_connection
regular_item[_generic_msg.delete] = self._delete_generic_connection
for password_item in password_maps:
password_item[_generic_msg.change_password] = self._change_password_generic_connection
# Some generic connections require different admin APIs
outconn_sftp_map[_generic_msg.create] = self._on_outconn_sftp_create
outconn_sftp_map[_generic_msg.edit] = self._on_outconn_sftp_edit
outconn_sftp_map[_generic_msg.delete] = self._on_outconn_sftp_delete
# ################################################################################################################################
def _on_outconn_sftp_create(self, msg:'bunch_') -> 'any_':
if not self.server.is_first_worker:
self.server._populate_connector_config(SubprocessStartConfig(has_sftp=True))
return
if not self.server.subproc_current_state.is_sftp_running:
config = SubprocessStartConfig()
config.has_sftp = True
self.server.init_subprocess_connectors(config)
connector_msg = deepcopy(msg)
self.worker_config.out_sftp[msg.name] = msg
self.worker_config.out_sftp[msg.name].conn = SFTPIPCFacade(self.server, msg)
return self.server.connector_sftp.invoke_connector(connector_msg)
# ################################################################################################################################
def _on_outconn_sftp_edit(self, msg:'bunch_') -> 'any_':
if not self.server.is_first_worker:
self.server._populate_connector_config(SubprocessStartConfig(has_sftp=True))
return
connector_msg = deepcopy(msg)
del self.worker_config.out_sftp[msg.old_name]
return self._on_outconn_sftp_create(connector_msg)
# ################################################################################################################################
def _on_outconn_sftp_delete(self, msg:'bunch_') -> 'any_':
if not self.server.is_first_worker:
self.server._populate_connector_config(SubprocessStartConfig(has_sftp=True))
return
connector_msg = deepcopy(msg)
del self.worker_config.out_sftp[msg.name]
return self.server.connector_sftp.invoke_connector(connector_msg)
# ################################################################################################################################
def _on_outconn_sftp_change_password(self, msg:'bunch_') -> 'None':
raise NotImplementedError('No password for SFTP connections can be set')
# ################################################################################################################################
def _get_generic_impl_func(self, msg:'bunch_', *args:'any_', **kwargs:'any_') -> 'any_':
""" Returns a function/method to invoke depending on which generic connection type is given on input.
Required because some connection types (e.g. SFTP) are not managed via GenericConnection objects,
for instance, in the case of SFTP, it uses subprocesses and a different management API.
"""
conn_type = msg['type_']
msg_action = msg['action']
func_map = self.generic_impl_func_map[conn_type]
impl_func = func_map.get(msg_action)
if impl_func:
return impl_func
else:
# Ignore missing CHANGE_PASSWORD handlers because they will rarely exist across generic connection types.
if msg_action == BROKER_MSG_GENERIC.CONNECTION_CHANGE_PASSWORD.value:
pass
else:
raise Exception('No impl_func found for action `%s` -> %s', msg_action, conn_type)
# ################################################################################################################################
# ################################################################################################################################
def wait_for_basic_auth(self, name:'str', timeout:'int'=999999) -> 'bool':
return wait_for_dict_key_by_get_func(self._basic_auth_get, name, timeout, interval=0.5)
# ################################################################################################################################
def _basic_auth_get(self, name:'str') -> 'bunch_':
""" Implements self.basic_auth_get.
"""
return self.request_dispatcher.url_data.basic_auth_get(name)
# ################################################################################################################################
def basic_auth_get(self, name:'str') -> 'bunch_':
""" Returns the configuration of the HTTP Basic Auth security definition of the given name.
"""
return self._basic_auth_get(name)
# ################################################################################################################################
def basic_auth_get_by_id(self, def_id:'int') -> 'bunch_':
""" Same as basic_auth_get but by definition ID.
"""
return self.request_dispatcher.url_data.basic_auth_get_by_id(def_id)
# ################################################################################################################################
def on_broker_msg_SECURITY_BASIC_AUTH_CREATE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Creates a new HTTP Basic Auth security definition
"""
dispatcher.notify(broker_message.SECURITY.BASIC_AUTH_CREATE.value, msg)
# ################################################################################################################################
def on_broker_msg_SECURITY_BASIC_AUTH_EDIT(self, msg:'bunch_', *args:'any_') -> 'None':
""" Updates an existing HTTP Basic Auth security definition.
"""
# Update channels and outgoing connections ..
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.BASIC_AUTH,
self._visit_wrapper_edit, keys=('username', 'name'))
# .. extract the newest information ..
sec_def = self.basic_auth_get_by_id(msg.id)
# .. update security groups ..
for security_groups_ctx in self._yield_security_groups_ctx_items(): # type: ignore
security_groups_ctx.set_current_basic_auth(msg.id, sec_def['username'], sec_def['password'])
# .. and update rate limiters.
self.server.set_up_object_rate_limiting(RATE_LIMIT.OBJECT_TYPE.SEC_DEF, msg.name, 'basic_auth')
# ################################################################################################################################
def on_broker_msg_SECURITY_BASIC_AUTH_DELETE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Deletes an HTTP Basic Auth security definition.
"""
# Update channels and outgoing connections ..
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.BASIC_AUTH, self._visit_wrapper_delete)
# .. update security groups ..
for security_groups_ctx in self._yield_security_groups_ctx_items(): # type: ignore
security_groups_ctx.on_basic_auth_deleted(msg.id)
# .. and update rate limiters.
self.server.delete_object_rate_limiting(RATE_LIMIT.OBJECT_TYPE.SEC_DEF, msg.name)
# ################################################################################################################################
def on_broker_msg_SECURITY_BASIC_AUTH_CHANGE_PASSWORD(self, msg:'bunch_', *args:'any_') -> 'None':
""" Changes password of an HTTP Basic Auth security definition.
"""
# Update channels and outgoing connections ..
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.BASIC_AUTH, self._visit_wrapper_change_password)
# .. extract the newest information ..
if msg.id:
sec_def = self.basic_auth_get_by_id(msg.id)
# .. and update security groups.
for security_groups_ctx in self._yield_security_groups_ctx_items(): # type: ignore
security_groups_ctx.set_current_basic_auth(msg.id, sec_def['username'], sec_def['password'])
# ################################################################################################################################
# ################################################################################################################################
def wait_for_apikey(self, name:'str', timeout:'int'=999999) -> 'bool':
return wait_for_dict_key_by_get_func(self.apikey_get, name, timeout, interval=0.5)
# ################################################################################################################################
def apikey_get(self, name:'str') -> 'bunch_':
""" Returns the configuration of the API key of the given name.
"""
return self.request_dispatcher.url_data.apikey_get(name)
# ################################################################################################################################
def apikey_get_by_id(self, def_id:'int') -> 'bunch_':
""" Same as apikey_get but by definition ID.
"""
return self.request_dispatcher.url_data.apikey_get_by_id(def_id)
# ################################################################################################################################
def on_broker_msg_SECURITY_APIKEY_CREATE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Creates a new API key security definition.
"""
dispatcher.notify(broker_message.SECURITY.APIKEY_CREATE.value, msg)
# ################################################################################################################################
def on_broker_msg_SECURITY_APIKEY_EDIT(self, msg:'bunch_', *args:'any_') -> 'None':
""" Updates an existing API key security definition.
"""
# Update channels and outgoing connections ..
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.APIKEY, self._visit_wrapper_edit, keys=('username', 'name'))
# .. and update rate limiters.
self.server.set_up_object_rate_limiting(RATE_LIMIT.OBJECT_TYPE.SEC_DEF, msg.name, 'apikey')
# ################################################################################################################################
def on_broker_msg_SECURITY_APIKEY_DELETE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Deletes an API key security definition.
"""
# Update channels and outgoing connections ..
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.APIKEY, self._visit_wrapper_delete)
# .. update security groups ..
for security_groups_ctx in self._yield_security_groups_ctx_items(): # type: ignore
security_groups_ctx.on_apikey_deleted(msg.id)
# .. and update rate limiters.
self.server.delete_object_rate_limiting(RATE_LIMIT.OBJECT_TYPE.SEC_DEF, msg.name)
# ################################################################################################################################
def on_broker_msg_SECURITY_APIKEY_CHANGE_PASSWORD(self, msg:'bunch_', *args:'any_') -> 'None':
""" Changes password of an API key security definition.
"""
# Update channels and outgoing connections ..
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.APIKEY, self._visit_wrapper_change_password)
# .. and update security groups.
for security_groups_ctx in self._yield_security_groups_ctx_items(): # type: ignore
security_groups_ctx.set_current_apikey(msg.id, msg.password)
# ################################################################################################################################
# ################################################################################################################################
def wait_for_aws(self, name:'str', timeout:'int'=999999) -> 'bool':
return wait_for_dict_key_by_get_func(self.aws_get, name, timeout, interval=0.5)
def aws_get(self, name:'str') -> 'bunch_':
""" Returns the configuration of the AWS security definition
of the given name.
"""
return self.request_dispatcher.url_data.aws_get(name)
def on_broker_msg_SECURITY_AWS_CREATE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Creates a new AWS security definition
"""
dispatcher.notify(broker_message.SECURITY.AWS_CREATE.value, msg)
def on_broker_msg_SECURITY_AWS_EDIT(self, msg:'bunch_', *args:'any_') -> 'None':
""" Updates an existing AWS security definition.
"""
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.AWS,
self._visit_wrapper_edit, keys=('username', 'name'))
def on_broker_msg_SECURITY_AWS_DELETE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Deletes an AWS security definition.
"""
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.AWS,
self._visit_wrapper_delete)
def on_broker_msg_SECURITY_AWS_CHANGE_PASSWORD(self, msg:'bunch_', *args:'any_') -> 'None':
""" Changes password of an AWS security definition.
"""
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.AWS,
self._visit_wrapper_change_password)
# ################################################################################################################################
def wait_for_ntlm(self, name:'str', timeout:'int'=999999) -> 'bool':
return wait_for_dict_key_by_get_func(self.ntlm_get, name, timeout, interval=0.5)
def ntlm_get(self, name:'str') -> 'bunch_':
""" Returns the configuration of the NTLM security definition
of the given name.
"""
return self.request_dispatcher.url_data.ntlm_get(name)
def on_broker_msg_SECURITY_NTLM_CREATE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Creates a new NTLM security definition
"""
dispatcher.notify(broker_message.SECURITY.NTLM_CREATE.value, msg)
def on_broker_msg_SECURITY_NTLM_EDIT(self, msg:'bunch_', *args:'any_') -> 'None':
""" Updates an existing NTLM security definition.
"""
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.NTLM,
self._visit_wrapper_edit, keys=('username', 'name'))
def on_broker_msg_SECURITY_NTLM_DELETE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Deletes an NTLM security definition.
"""
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.NTLM,
self._visit_wrapper_delete)
def on_broker_msg_SECURITY_NTLM_CHANGE_PASSWORD(self, msg:'bunch_', *args:'any_') -> 'None':
""" Changes password of an NTLM security definition.
"""
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.NTLM,
self._visit_wrapper_change_password)
# ################################################################################################################################
def on_broker_msg_VAULT_CONNECTION_CREATE(self, msg:'bunch_') -> 'None':
msg.token = self.server.decrypt(msg.token)
self.vault_conn_api.create(msg)
dispatcher.notify(broker_message.VAULT.CONNECTION_CREATE.value, msg)
def on_broker_msg_VAULT_CONNECTION_EDIT(self, msg:'bunch_') -> 'None':
msg.token = self.server.decrypt(msg.token)
self.vault_conn_api.edit(msg)
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.VAULT,
self._visit_wrapper_edit, keys=('username', 'name'))
def on_broker_msg_VAULT_CONNECTION_DELETE(self, msg:'bunch_') -> 'None':
self.vault_conn_api.delete(msg.name)
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.VAULT,
self._visit_wrapper_delete)
# ################################################################################################################################
def wait_for_jwt(self, name:'str', timeout:'int'=999999) -> 'bool':
return wait_for_dict_key_by_get_func(self.jwt_get, name, timeout, interval=0.5)
def jwt_get(self, name:'str') -> 'bunch_':
""" Returns the configuration of the JWT security definition of the given name.
"""
return self.request_dispatcher.url_data.jwt_get(name)
def jwt_get_by_id(self, def_id:'int') -> 'bunch_':
""" Same as jwt_get but returns information by definition ID.
"""
return self.request_dispatcher.url_data.jwt_get_by_id(def_id)
def on_broker_msg_SECURITY_JWT_CREATE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Creates a new JWT security definition
"""
dispatcher.notify(broker_message.SECURITY.JWT_CREATE.value, msg)
def on_broker_msg_SECURITY_JWT_EDIT(self, msg:'bunch_', *args:'any_') -> 'None':
""" Updates an existing JWT security definition.
"""
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.JWT,
self._visit_wrapper_edit, keys=('username', 'name'))
self.server.set_up_object_rate_limiting(RATE_LIMIT.OBJECT_TYPE.SEC_DEF, msg.name, 'jwt')
def on_broker_msg_SECURITY_JWT_DELETE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Deletes a JWT security definition.
"""
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.JWT,
self._visit_wrapper_delete)
self.server.delete_object_rate_limiting(RATE_LIMIT.OBJECT_TYPE.SEC_DEF, msg.name)
def on_broker_msg_SECURITY_JWT_CHANGE_PASSWORD(self, msg:'bunch_', *args:'any_') -> 'None':
""" Changes password of a JWT security definition.
"""
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.JWT,
self._visit_wrapper_change_password)
# ################################################################################################################################
def get_channel_file_transfer_config(self, name:'str') -> 'stranydict':
return self.generic_conn_api[COMMON_GENERIC.CONNECTION.TYPE.CHANNEL_FILE_TRANSFER][name]
# ################################################################################################################################
def wait_for_oauth(self, name:'str', timeout:'int'=999999) -> 'bool':
return wait_for_dict_key_by_get_func(self.oauth_get, name, timeout, interval=0.5)
def oauth_get(self, name:'str') -> 'bunch_':
""" Returns the configuration of the OAuth security definition
of the given name.
"""
return self.request_dispatcher.url_data.oauth_get(name)
def oauth_get_by_id(self, def_id:'int') -> 'bunch_':
""" Same as oauth_get but by definition ID.
"""
return self.request_dispatcher.url_data.oauth_get_by_id(def_id)
def oauth_get_all_id_list(self) -> 'any_':
""" Returns IDs of all OAuth definitions.
"""
for item in self.request_dispatcher.url_data.oauth_config.values():
config = item.config
yield config['id']
def on_broker_msg_SECURITY_OAUTH_CREATE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Creates a new OAuth security definition
"""
dispatcher.notify(broker_message.SECURITY.OAUTH_CREATE.value, msg)
def on_broker_msg_SECURITY_OAUTH_EDIT(self, msg:'bunch_', *args:'any_') -> 'None':
""" Updates an existing OAuth security definition.
"""
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.OAUTH,
self._visit_wrapper_edit, keys=('username', 'name'))
def on_broker_msg_SECURITY_OAUTH_DELETE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Deletes an OAuth security definition.
"""
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.OAUTH,
self._visit_wrapper_delete)
def on_broker_msg_SECURITY_OAUTH_CHANGE_PASSWORD(self, msg:'bunch_', *args:'any_') -> 'None':
""" Changes password of an OAuth security definition.
"""
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.OAUTH,
self._visit_wrapper_change_password)
# ################################################################################################################################
def _update_tls_outconns(self, material_type_id:'str', update_key:'str', msg:'bunch_') -> 'None':
for config_dict, config_data in self.get_outconn_http_config_dicts():
# Here, config_data is a string such as _zato_id_633 that points to an actual outconn name
if isinstance(config_data, str):
config_data = config_dict[config_data]
if config_data.config[material_type_id] == msg.id:
config_data.conn.config[update_key] = msg.full_path
config_data.conn.https_adapter.clear_pool()
# ################################################################################################################################
def _add_tls_from_msg(self, config_attr:'str', msg:'bunch_', msg_key:'str') -> 'None':
config = getattr(self.worker_config, config_attr)
config[msg.name] = Bunch(config=Bunch(value=msg[msg_key]))
def update_tls_ca_cert(self, msg:'bunch_') -> 'None':
msg.full_path = get_tls_ca_cert_full_path(self.server.tls_dir, get_tls_from_payload(msg.value))
def update_tls_key_cert(self, msg:'bunch_') -> 'None':
decrypted = self.server.decrypt(msg.auth_data)
msg.full_path = get_tls_key_cert_full_path(self.server.tls_dir, get_tls_from_payload(decrypted, True))
# ################################################################################################################################
def on_broker_msg_SECURITY_TLS_CHANNEL_SEC_CREATE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Creates a new security definition basing on TLS client certificates.
"""
# Parse it to be on the safe side
list(parse_tls_channel_security_definition(msg.value))
dispatcher.notify(broker_message.SECURITY.TLS_CHANNEL_SEC_CREATE.value, msg)
def on_broker_msg_SECURITY_TLS_CHANNEL_SEC_EDIT(self, msg:'bunch_', *args:'any_') -> 'None':
""" Updates an existing security definition basing on TLS client certificates.
"""
# Parse it to be on the safe side
list(parse_tls_channel_security_definition(msg.value))
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.TLS_CHANNEL_SEC,
self._visit_wrapper_edit, keys=('name', 'value'))
def on_broker_msg_SECURITY_TLS_CHANNEL_SEC_DELETE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Deletes a security definition basing on TLS client certificates.
"""
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.TLS_CHANNEL_SEC, self._visit_wrapper_delete)
# ################################################################################################################################
def on_broker_msg_SECURITY_TLS_KEY_CERT_CREATE(self, msg:'bunch_') -> 'None':
self.update_tls_key_cert(msg)
self._add_tls_from_msg('tls_key_cert', msg, 'auth_data')
decrypted = self.server.decrypt(msg.auth_data)
store_tls(self.server.tls_dir, decrypted, True)
dispatcher.notify(broker_message.SECURITY.TLS_KEY_CERT_CREATE.value, msg)
def on_broker_msg_SECURITY_TLS_KEY_CERT_EDIT(self, msg:'bunch_') -> 'None':
self.update_tls_key_cert(msg)
del self.worker_config.tls_key_cert[msg.old_name]
self._add_tls_from_msg('tls_key_cert', msg, 'auth_data')
decrypted = self.server.decrypt(msg.auth_data)
store_tls(self.server.tls_dir, decrypted, True)
self._update_tls_outconns('security_id', 'tls_key_cert_full_path', msg)
dispatcher.notify(broker_message.SECURITY.TLS_KEY_CERT_EDIT.value, msg)
def on_broker_msg_SECURITY_TLS_KEY_CERT_DELETE(self, msg:'bunch_') -> 'None':
self.update_tls_key_cert(msg)
dispatcher.notify(broker_message.SECURITY.TLS_KEY_CERT_DELETE.value, msg)
# ################################################################################################################################
def on_broker_msg_SECURITY_TLS_CA_CERT_CREATE(self, msg:'bunch_') -> 'None':
self.update_tls_ca_cert(msg)
self._add_tls_from_msg('tls_ca_cert', msg, 'value')
store_tls(self.server.tls_dir, msg.value)
dispatcher.notify(broker_message.SECURITY.TLS_CA_CERT_CREATE.value, msg)
def on_broker_msg_SECURITY_TLS_CA_CERT_EDIT(self, msg:'bunch_') -> 'None':
self.update_tls_ca_cert(msg)
del self.worker_config.tls_ca_cert[msg.old_name]
self._add_tls_from_msg('tls_ca_cert', msg, 'value')
store_tls(self.server.tls_dir, msg.value)
self._update_tls_outconns('sec_tls_ca_cert_id', 'tls_verify', msg)
dispatcher.notify(broker_message.SECURITY.TLS_CA_CERT_EDIT.value, msg)
def on_broker_msg_SECURITY_TLS_CA_CERT_DELETE(self, msg:'bunch_') -> 'None':
self.update_tls_ca_cert(msg)
dispatcher.notify(broker_message.SECURITY.TLS_CA_CERT_DELETE.value, msg)
# ################################################################################################################################
def wait_for_wss(self, name:'str', timeout:'int'=999999) -> 'bool':
return wait_for_dict_key_by_get_func(self.wss_get, name, timeout, interval=0.5)
def wss_get(self, name:'str') -> 'bunch_':
""" Returns the configuration of the WSS definition of the given name.
"""
self.request_dispatcher.url_data.wss_get(name)
def on_broker_msg_SECURITY_WSS_CREATE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Creates a new WS-Security definition.
"""
dispatcher.notify(broker_message.SECURITY.WSS_CREATE.value, msg)
def on_broker_msg_SECURITY_WSS_EDIT(self, msg:'bunch_', *args:'any_') -> 'None':
""" Updates an existing WS-Security definition.
"""
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.WSS,
self._visit_wrapper_edit, keys=('is_active', 'username', 'name',
'nonce_freshness_time', 'reject_expiry_limit', 'password_type',
'reject_empty_nonce_creat', 'reject_stale_tokens'))
def on_broker_msg_SECURITY_WSS_DELETE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Deletes a WS-Security definition.
"""
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.WSS,
self._visit_wrapper_delete)
def on_broker_msg_SECURITY_WSS_CHANGE_PASSWORD(self, msg:'bunch_', *args:'any_') -> 'None':
""" Changes the password of a WS-Security definition.
"""
self._update_auth(msg, code_to_name[msg.action], SEC_DEF_TYPE.WSS,
self._visit_wrapper_change_password)
# ################################################################################################################################
def invoke(self, service:'str', payload:'any_', **kwargs:'any_') -> 'any_':
""" Invokes a service by its name with request on input.
"""
channel = kwargs.get('channel', CHANNEL.WORKER)
if 'serialize' in kwargs:
serialize = kwargs.get('serialize')
else:
serialize = True
return self.on_message_invoke_service({
'channel': channel,
'payload': payload,
'data_format': kwargs.get('data_format'),
'service': service,
'cid': new_cid(),
'is_async': kwargs.get('is_async'),
'callback': kwargs.get('callback'),
'zato_ctx': kwargs.get('zato_ctx'),
'wsgi_environ': kwargs.get('wsgi_environ'),
'channel_item': kwargs.get('channel_item'),
}, channel, '', needs_response=True, serialize=serialize, skip_response_elem=kwargs.get('skip_response_elem'))
# ################################################################################################################################
def on_message_invoke_service(self, msg:'any_', channel:'str', action:'str', args:'any_'=None, **kwargs:'any_') -> 'any_':
""" Triggered by external events, such as messages sent through connectors. Creates a new service instance and invokes it.
"""
zato_ctx = msg.get('zato_ctx') or {}
cid = msg['cid']
# The default WSGI environment that always exists ..
wsgi_environ = {
'zato.request_ctx.async_msg':msg,
'zato.request_ctx.in_reply_to':msg.get('in_reply_to'),
'zato.request_ctx.fanout_cid':zato_ctx.get('fanout_cid'),
'zato.request_ctx.parallel_exec_cid':zato_ctx.get('parallel_exec_cid'),
}
if 'wsx' in msg:
wsgi_environ['zato.wsx'] = msg['wsx']
if zato_ctx:
wsgi_environ['zato.channel_item'] = zato_ctx.get('zato.channel_item')
data_format = msg.get('data_format') or _data_format_dict
transport = msg.get('transport')
if msg.get('channel') in (CHANNEL.FANOUT_ON_TARGET, CHANNEL.FANOUT_ON_FINAL, CHANNEL.PARALLEL_EXEC_ON_TARGET):
payload = loads(msg['payload'])
else:
payload = msg['payload']
service, is_active = self.server.service_store.new_instance_by_name(msg['service'])
if not is_active:
msg = 'Could not invoke an inactive service:`{}`, cid:`{}`'.format(service.get_name(), cid)
logger.warning(msg)
raise Exception(msg)
skip_response_elem=kwargs.get('skip_response_elem')
response = service.update_handle(service.set_response_data, service, payload,
channel, data_format, transport, self.server, self.broker_client, self, cid,
self.worker_config.simple_io, job_type=msg.get('job_type'), wsgi_environ=wsgi_environ,
environ=msg.get('environ'))
if skip_response_elem:
response = dumps(response)
response = response.decode('utf8')
# Invoke the callback, if any.
if msg.get('is_async') and msg.get('callback'):
cb_msg = {}
cb_msg['action'] = SERVICE.PUBLISH.value
cb_msg['service'] = msg['callback']
cb_msg['payload'] = response if skip_response_elem else service.response.payload
cb_msg['cid'] = new_cid()
cb_msg['channel'] = CHANNEL.INVOKE_ASYNC_CALLBACK
cb_msg['data_format'] = data_format
cb_msg['transport'] = transport
cb_msg['is_async'] = True
cb_msg['in_reply_to'] = cid
self.broker_client.invoke_async(cb_msg)
if kwargs.get('needs_response'):
if skip_response_elem:
return response
else:
response = service.response.payload
if hasattr(response, 'getvalue'):
response = response.getvalue(serialize=kwargs.get('serialize'))
return response
# ################################################################################################################################
def on_broker_msg_SCHEDULER_JOB_EXECUTED(self, msg:'bunch_', args:'any_'=None) -> 'any_':
# If statistics are disabled, all their related services will not be available
# so if they are invoked via scheduler, they should be ignored. Ultimately,
# the scheduler should not invoke them at all.
if msg.name.startswith('zato.stats'):
if not self.server.component_enabled.stats:
return
return self.on_message_invoke_service(msg, CHANNEL.SCHEDULER, 'SCHEDULER_JOB_EXECUTED', args)
# ################################################################################################################################
def on_broker_msg_SCHEDULER_SET_SCHEDULER_ADDRESS(self, msg:'bunch_', args:'any_'=None) -> 'any_':
self.invoke('zato.scheduler.set-scheduler-address-impl', msg)
# ################################################################################################################################
def on_broker_msg_CHANNEL_ZMQ_MESSAGE_RECEIVED(self, msg:'bunch_', args:'any_'=None) -> 'any_':
return self.on_message_invoke_service(msg, CHANNEL.ZMQ, 'CHANNEL_ZMQ_MESSAGE_RECEIVED', args)
# ################################################################################################################################
def on_broker_msg_OUTGOING_SQL_CREATE_EDIT(self, msg:'bunch_', *args:'any_') -> 'None':
""" Creates or updates an SQL connection, including changing its
password.
"""
if msg.password.startswith(SECRETS.PREFIX):
msg.password = self.server.decrypt(msg.password)
# Is it a rename? If so, delete the connection first
if msg.get('old_name') and msg.get('old_name') != msg['name']:
del self.sql_pool_store[msg['old_name']]
msg['fs_sql_config'] = self.server.fs_sql_config
self.sql_pool_store[msg['name']] = msg
def on_broker_msg_OUTGOING_SQL_CHANGE_PASSWORD(self, msg:'bunch_', *args:'any_') -> 'None':
""" Deletes an outgoing SQL connection pool and recreates it using the
new password.
"""
# First, make sure that we already have such an SQL connection
if wait_for_dict_key(self.sql_pool_store.wrappers, msg['name']):
# Ensure we use a clear-text form of the password
password = msg['password']
password = self.server.decrypt(password)
logger.info('Setting SQL password for `%s`', msg['name'])
# If we are here, it means that the connection must be available,
self.sql_pool_store.change_password(msg['name'], password)
else:
self.logger.warn('SQL connection not found -> `%s` (change-password)', msg['name'])
def on_broker_msg_OUTGOING_SQL_DELETE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Deletes an outgoing SQL connection pool.
"""
del self.sql_pool_store[msg['name']]
# ################################################################################################################################
def _get_channel_rest(self, connection_type:'str', value:'str', by_name:'bool'=True) -> 'dictnone':
item_key = 'name' if by_name else 'id'
with self.update_lock:
for item in self.request_dispatcher.url_data.channel_data:
if item['connection'] == connection_type:
if item[item_key] == value:
return item
# ################################################################################################################################
def _get_outconn_rest(self, value:'str', by_name:'bool'=True) -> 'dictnone':
item_key = 'name' if by_name else 'id'
with self.update_lock:
for outconn_value in self.worker_config.out_plain_http.values():
if isinstance(outconn_value, dict):
config = outconn_value['config'] # type: dict
if config[item_key] == value:
return outconn_value
# ################################################################################################################################
def wait_for_outconn_rest(self, name:'str', timeout:'int'=999999) -> 'bool':
return wait_for_dict_key(self.worker_config.out_plain_http, name, timeout, interval=0.5)
# ################################################################################################################################
def get_channel_rest(self, name:'str') -> 'bunch_':
return self._get_channel_rest(CONNECTION.CHANNEL, name)
# ################################################################################################################################
def get_outconn_rest(self, name:'str') -> 'dictnone':
self.wait_for_outconn_rest(name)
return self._get_outconn_rest(name)
# ################################################################################################################################
def get_outconn_rest_by_id(self, id:'str') -> 'dictnone':
return self._get_outconn_rest(id, False)
# ################################################################################################################################
def on_broker_msg_CHANNEL_HTTP_SOAP_CREATE_EDIT(self, msg:'bunch_', *args:'any_') -> 'None':
""" Creates or updates an HTTP/SOAP channel.
"""
self.request_dispatcher.url_data.on_broker_msg_CHANNEL_HTTP_SOAP_CREATE_EDIT(msg, *args)
def on_broker_msg_CHANNEL_HTTP_SOAP_DELETE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Deletes an HTTP/SOAP channel.
"""
# First, check if there was a cache for this channel. If so, make sure of all entries pointing
# to the channel are deleted too.
item = self.get_channel_rest(msg.name) or {}
if item['cache_type']:
cache = self.server.get_cache(item['cache_type'], item['cache_name'])
cache.delete_by_prefix('http-channel-{}'.format(item['id']))
# Delete the channel object now
self.request_dispatcher.url_data.on_broker_msg_CHANNEL_HTTP_SOAP_DELETE(msg, *args)
# ################################################################################################################################
def _delete_config_close_wrapper(
self,
name, # type: str
config_dict, # type: ConfigDict
conn_type, # type: str
log_func # type: callable_
) -> 'None':
""" Deletes a wrapper-based connection's config and closes its underlying wrapper.
"""
# Delete the connection first, if it exists at all ..
try:
try:
wrapper = config_dict[name].conn
except(KeyError, AttributeError):
log_func('Could not access wrapper, e:`{}`'.format(format_exc()))
else:
try:
wrapper.session.close()
finally:
del config_dict[name]
except Exception:
log_func('Could not delete `{}`, e:`{}`'.format(conn_type, format_exc()))
# ################################################################################################################################
def _delete_config_close_wrapper_http_soap(self, name:'str', transport:'str', log_func:'callable_') -> 'None':
""" Deletes/closes an HTTP/SOAP outconn.
"""
# Are we dealing with plain HTTP or SOAP?
config_dict = getattr(self.worker_config, 'out_' + transport)
self._delete_config_close_wrapper(name, config_dict, 'an outgoing HTTP/SOAP connection', log_func)
def on_broker_msg_OUTGOING_HTTP_SOAP_CREATE_EDIT(self, msg:'bunch_', *args:'any_') -> 'None':
""" Creates or updates an outgoing HTTP/SOAP connection.
"""
# With outgoing SOAP messages using suds, we need to delete /tmp/suds
# before the connection can be created. This is because our method can
# also be invoked by ReloadWSDL action and suds will not always reload
# the WSDL if /tmp/suds is around.
if msg.transport == URL_TYPE.SOAP and msg.serialization_type == HTTP_SOAP_SERIALIZATION_TYPE.SUDS.id:
# This is how suds obtains the location of its tmp directory in suds/cache.py
suds_tmp_dir = os.path.join(gettempdir(), 'suds')
if os.path.exists(suds_tmp_dir):
try:
rmtree(suds_tmp_dir, True)
except Exception:
logger.warning('Could not remove suds directory `%s`, e:`%s`', suds_tmp_dir, format_exc())
# It might be a rename
old_name = msg.get('old_name')
del_name = old_name if old_name else msg['name']
# .. delete the connection if it exists ..
self._delete_config_close_wrapper_http_soap(del_name, msg['transport'], logger.debug)
# .. and create a new one
wrapper = self._http_soap_wrapper_from_config(msg, has_sec_config=False)
config_dict = getattr(self.worker_config, 'out_' + msg['transport'])
config_dict[msg['name']] = Bunch()
config_dict[msg['name']].config = msg
config_dict[msg['name']].conn = wrapper
config_dict[msg['name']].ping = wrapper.ping # (just like in self.init_http)
# Store mapping of ID -> name
config_dict.set_key_id_data(msg)
def on_broker_msg_OUTGOING_HTTP_SOAP_DELETE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Deletes an outgoing HTTP/SOAP connection (actually delegates the
task to self._delete_config_close_wrapper_http_soap.
"""
self._delete_config_close_wrapper_http_soap(msg['name'], msg['transport'], logger.error)
# ################################################################################################################################
def on_broker_msg_OUTGOING_REST_WRAPPER_CHANGE_PASSWORD(self, msg:'bunch_', *args:'any_') -> 'None':
# Reusable
password = msg.password
password_decrypted = self.server.decrypt(password)
# All outgoing REST connections
out_plain_http = self.worker_config.out_plain_http
# .. get the one that we need ..
item = out_plain_http.get_by_id(msg.id)
# .. update its dict configuration ..
item['config']['password'] = password
# .. and its wrapper's configuration too.
self._visit_wrapper_change_password(item['conn'], {'password': password_decrypted}, check_name=False)
# ################################################################################################################################
def on_broker_msg_SERVICE_DELETE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Deletes the service from the service store and removes it from the filesystem
if it's not an internal one.
"""
# Delete the service from RBAC resources
self.rbac.delete_resource(msg.id)
# Where to delete it from in the second step
deployment_info = self.server.service_store.get_deployment_info(msg.impl_name)
# If the service is not deployed, there is nothing for us to do here
if not deployment_info:
return
fs_location = deployment_info['fs_location']
# Delete it from the service store
self.server.service_store.delete_service_data(msg.name)
# Remove rate limiting configuration
self.server.delete_object_rate_limiting(RATE_LIMIT.OBJECT_TYPE.SERVICE, msg.name)
# Delete it from the filesystem, including any bytecode left over. Note that
# other parallel servers may wish to do exactly the same so we just ignore
# the error if any files are missing. Also note that internal services won't
# be ever deleted from the FS.
if not msg.is_internal:
all_ext = ('py', 'pyc', 'pyo')
no_ext = '.'.join(fs_location.split('.')[:-1])
for ext in all_ext:
path = '{}.{}'.format(no_ext, ext)
try:
os.remove(path)
except OSError as e:
if e.errno != ENOENT:
raise
# It is possible that this module was already deleted from sys.modules
# in case there was more than one service in it and we first deleted
# one and then the other.
try:
service_info = self.server.service_store.services[msg.impl_name]
except KeyError:
return
else:
mod = inspect.getmodule(service_info['service_class'])
if mod:
del sys.modules[mod.__name__]
def on_broker_msg_SERVICE_EDIT(self, msg:'bunch_', *args:'any_') -> 'None':
del msg['action']
self.server.service_store.edit_service_data(msg)
# ################################################################################################################################
def on_broker_msg_OUTGOING_FTP_CREATE_EDIT(self, msg:'bunch_', *args:'any_') -> 'None':
out_ftp = cast_('FTPStore', self.worker_config.out_ftp)
out_ftp.create_edit(msg, msg.get('old_name'))
def on_broker_msg_OUTGOING_FTP_DELETE(self, msg:'bunch_', *args:'any_') -> 'None':
out_ftp = cast_('FTPStore', self.worker_config.out_ftp)
out_ftp.delete(msg.name)
def on_broker_msg_OUTGOING_FTP_CHANGE_PASSWORD(self, msg:'bunch_', *args:'any_') -> 'None':
out_ftp = cast_('FTPStore', self.worker_config.out_ftp)
out_ftp.change_password(msg.name, msg.password)
# ################################################################################################################################
def on_broker_msg_hot_deploy(
self,
msg, # type: Bunch
service, # type: str
payload, # type: any_
action, # type: str
*args, # type: any_
**kwargs # type: any_
) -> 'any_':
msg.cid = new_cid()
msg.service = service
msg.payload = payload
return self.on_message_invoke_service(msg, 'hot-deploy', 'HOT_DEPLOY_{}'.format(action), args, **kwargs)
# ################################################################################################################################
def on_broker_msg_HOT_DEPLOY_CREATE_SERVICE(self, msg:'bunch_', *args:'any_') -> 'None':
# Uploads the service
response = self.on_broker_msg_hot_deploy(
msg, 'zato.hot-deploy.create', {'package_id': msg.package_id}, 'CREATE_SERVICE', *args,
serialize=False, needs_response=True)
# If there were any services deployed, let pub/sub know that this service has been just deployed -
# pub/sub will go through all of its topics and reconfigure any of its hooks that this service implements.
services_deployed = response.get('zato_hot_deploy_create_response', '{}').get('services_deployed')
if services_deployed:
self.pubsub.on_broker_msg_HOT_DEPLOY_CREATE_SERVICE(services_deployed)
# ################################################################################################################################
def on_broker_msg_HOT_DEPLOY_CREATE_STATIC(self, msg:'bunch_', *args:'any_') -> 'None':
return self.on_broker_msg_hot_deploy(msg, 'zato.pickup.on-update-static', {
'data': msg.data,
'file_name': msg.file_name,
'full_path': msg.full_path,
'relative_dir': msg.relative_dir
}, 'CREATE_STATIC', *args)
# ################################################################################################################################
def on_broker_msg_HOT_DEPLOY_CREATE_USER_CONF(self, msg:'bunch_', *args:'any_') -> 'None':
return self.on_broker_msg_hot_deploy(msg, 'zato.pickup.on-update-user-conf', {
'data': msg.data,
'file_name': msg.file_name,
'full_path': msg.full_path,
'relative_dir': msg.relative_dir
}, 'CREATE_USER_CONF', *args)
# ################################################################################################################################
def on_broker_msg_HOT_DEPLOY_AFTER_DEPLOY(self, msg:'bunch_', *args:'any_') -> 'None':
# Update RBAC configuration
self.rbac.create_resource(msg.service_id)
# Redeploy services that depended on the service just deployed.
# Uses .get below because the feature is new in 3.1 which is why it is optional.
if self.server.fs_server_config.hot_deploy.get('redeploy_on_parent_change', True):
self.server.service_store.redeploy_on_parent_changed(msg.service_name, msg.service_impl_name)
# ################################################################################################################################
def on_broker_msg_SERVICE_PUBLISH(self, msg:'bunch_', args:'any_'=None) -> 'None':
return self.on_message_invoke_service(msg, msg.get('channel') or CHANNEL.INVOKE_ASYNC, 'SERVICE_PUBLISH', args)
# ################################################################################################################################
def _on_broker_msg_cloud_create_edit(
self,
msg, # type: Bunch
conn_type, # type: str
config_dict, # type: ConfigDict
wrapper_class # type: any_
) -> 'bunch_':
# It might be a rename
old_name = msg.get('old_name')
del_name = old_name if old_name else msg['name']
# .. delete the connection if it exists ..
self._delete_config_close_wrapper(del_name, config_dict, conn_type, logger.debug)
# .. and create a new one
msg['queue_build_cap'] = float(self.server.fs_server_config.misc.queue_build_cap)
wrapper = wrapper_class(msg, self.server)
wrapper.build_queue()
item = Bunch()
config_dict[msg['name']] = item
config_dict[msg['name']].config = msg
config_dict[msg['name']].conn = wrapper
return item
# ################################################################################################################################
def on_broker_msg_CLOUD_AWS_S3_CREATE_EDIT(self, msg:'bunch_', *args:'any_') -> 'None':
""" Creates or updates an AWS S3 connection.
"""
msg.password = self.server.decrypt(msg.password)
self._update_aws_config(msg)
self._on_broker_msg_cloud_create_edit(msg, 'AWS S3', self.worker_config.cloud_aws_s3, S3Wrapper)
def on_broker_msg_CLOUD_AWS_S3_DELETE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Closes and deletes an AWS S3 connection.
"""
self._delete_config_close_wrapper(msg['name'], self.worker_config.cloud_aws_s3, 'AWS S3', logger.debug)
# ################################################################################################################################
def on_broker_msg_OUTGOING_ODOO_CREATE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Creates or updates an Odoo connection.
"""
self._on_broker_msg_cloud_create_edit(msg, 'Odoo', self.worker_config.out_odoo, OdooWrapper)
on_broker_msg_OUTGOING_ODOO_CHANGE_PASSWORD = on_broker_msg_OUTGOING_ODOO_EDIT = on_broker_msg_OUTGOING_ODOO_CREATE
def on_broker_msg_OUTGOING_ODOO_DELETE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Closes and deletes an Odoo connection.
"""
self._delete_config_close_wrapper(msg['name'], self.worker_config.out_odoo, 'Odoo', logger.debug)
# ################################################################################################################################
def on_broker_msg_OUTGOING_SAP_CREATE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Creates or updates an SAP RFC connection.
"""
self._on_broker_msg_cloud_create_edit(msg, 'SAP', self.worker_config.out_sap, SAPWrapper)
on_broker_msg_OUTGOING_SAP_CHANGE_PASSWORD = on_broker_msg_OUTGOING_SAP_EDIT = on_broker_msg_OUTGOING_SAP_CREATE
def on_broker_msg_OUTGOING_SAP_DELETE(self, msg:'bunch_', *args:'any_') -> 'None':
""" Closes and deletes an SAP RFC connection.
"""
self._delete_config_close_wrapper(msg['name'], self.worker_config.out_sap, 'SAP', logger.debug)
# ################################################################################################################################
def on_broker_msg_NOTIF_RUN_NOTIFIER(self, msg:'bunch_') -> 'None':
self.on_message_invoke_service(loads(msg.request), CHANNEL.NOTIFIER_RUN, 'NOTIF_RUN_NOTIFIER')
# ################################################################################################################################
def _on_broker_msg_NOTIF_SQL_CREATE_EDIT(self, msg:'bunch_', source_service_type:'str') -> 'None':
msg.source_service_type = source_service_type
self.create_edit_notifier(msg, 'NOTIF_SQL', self.server.worker_store.worker_config.notif_sql)
def on_broker_msg_NOTIF_SQL_CREATE(self, msg:'bunch_') -> 'None':
self._on_broker_msg_NOTIF_SQL_CREATE_EDIT(msg, 'create')
def on_broker_msg_NOTIF_SQL_EDIT(self, msg:'bunch_') -> 'None':
self._on_broker_msg_NOTIF_SQL_CREATE_EDIT(msg, 'edit')
def on_broker_msg_NOTIF_SQL_DELETE(self, msg:'bunch_') -> 'None':
del self.server.worker_store.worker_config.notif_sql[msg.name]
# ################################################################################################################################
def update_cassandra_conn(self, msg:'bunch_') -> 'None':
for name in 'tls_ca_certs', 'tls_client_cert', 'tls_client_priv_key':
value = msg.get(name)
if value:
value = os.path.join(self.server.repo_location, 'tls', value)
msg[name] = value
def on_broker_msg_DEFINITION_CASSANDRA_CREATE(self, msg:'bunch_') -> 'None':
self.cassandra_api.create_def(msg.name, msg)
self.update_cassandra_conn(msg)
def on_broker_msg_DEFINITION_CASSANDRA_EDIT(self, msg:'bunch_') -> 'None':
# It might be a rename
dispatcher.notify(broker_message.DEFINITION.CASSANDRA_EDIT.value, msg)
old_name = msg.get('old_name')
del_name = old_name if old_name else msg['name']
self.update_cassandra_conn(msg)
new_def = self.cassandra_api.edit_def(del_name, msg)
self.cassandra_query_store.update_by_def(del_name, new_def)
def on_broker_msg_DEFINITION_CASSANDRA_DELETE(self, msg:'bunch_') -> 'None':
dispatcher.notify(broker_message.DEFINITION.CASSANDRA_DELETE.value, msg)
self.cassandra_api.delete_def(msg.name)
def on_broker_msg_DEFINITION_CASSANDRA_CHANGE_PASSWORD(self, msg:'bunch_') -> 'None':
dispatcher.notify(broker_message.DEFINITION.CASSANDRA_CHANGE_PASSWORD.value, msg)
self.cassandra_api.change_password_def(msg)
# ################################################################################################################################
def on_broker_msg_QUERY_CASSANDRA_CREATE(self, msg:'bunch_') -> 'None':
self.cassandra_query_api.create(msg.name, msg, def_=self.cassandra_api[msg.def_name])
def on_broker_msg_QUERY_CASSANDRA_EDIT(self, msg:'bunch_') -> 'None':
# It might be a rename
old_name = msg.get('old_name')
del_name = old_name if old_name else msg['name']
self.cassandra_query_api.edit(del_name, msg, def_=self.cassandra_api[msg.def_name])
def on_broker_msg_QUERY_CASSANDRA_DELETE(self, msg:'bunch_') -> 'None':
self.cassandra_query_api.delete(msg.name)
# ################################################################################################################################
def on_broker_msg_SEARCH_ES_CREATE(self, msg:'bunch_') -> 'None':
self.search_es_api.create(msg.name, msg)
def on_broker_msg_SEARCH_ES_EDIT(self, msg:'bunch_') -> 'None':
# It might be a rename
old_name = msg.get('old_name')
del_name = old_name if old_name else msg['name']
self.search_es_api.edit(del_name, msg)
def on_broker_msg_SEARCH_ES_DELETE(self, msg:'bunch_') -> 'None':
self.search_es_api.delete(msg.name)
# ################################################################################################################################
def on_broker_msg_SEARCH_SOLR_CREATE(self, msg:'bunch_') -> 'None':
self._update_queue_build_cap(msg)
self.search_solr_api.create(msg.name, msg)
def on_broker_msg_SEARCH_SOLR_EDIT(self, msg:'bunch_') -> 'None':
# It might be a rename
old_name = msg.get('old_name')
del_name = old_name if old_name else msg['name']
self._update_queue_build_cap(msg)
self.search_solr_api.edit(del_name, msg)
def on_broker_msg_SEARCH_SOLR_DELETE(self, msg:'bunch_') -> 'None':
self.search_solr_api.delete(msg.name)
# ################################################################################################################################
def on_broker_msg_EMAIL_SMTP_CREATE(self, msg:'bunch_') -> 'None':
self.email_smtp_api.create(msg.name, msg)
def on_broker_msg_EMAIL_SMTP_EDIT(self, msg:'bunch_') -> 'None':
# It might be a rename
old_name = msg.get('old_name')
del_name = old_name if old_name else msg['name']
msg.password = self.email_smtp_api.get(del_name, True).config.password
self.email_smtp_api.edit(del_name, msg)
def on_broker_msg_EMAIL_SMTP_DELETE(self, msg:'bunch_') -> 'None':
self.email_smtp_api.delete(msg.name)
def on_broker_msg_EMAIL_SMTP_CHANGE_PASSWORD(self, msg:'bunch_') -> 'None':
self.email_smtp_api.change_password(msg)
# ################################################################################################################################
def on_broker_msg_EMAIL_IMAP_CREATE(self, msg:'bunch_') -> 'None':
self.email_imap_api.create(msg.name, msg)
def on_broker_msg_EMAIL_IMAP_EDIT(self, msg:'bunch_') -> 'None':
# It might be a rename
old_name = msg.get('old_name')
del_name = old_name if old_name else msg['name']
msg.password = self.email_imap_api.get(del_name, True).config.password
self.email_imap_api.edit(del_name, msg)
def on_broker_msg_EMAIL_IMAP_DELETE(self, msg:'bunch_') -> 'None':
self.email_imap_api.delete(msg.name)
def on_broker_msg_EMAIL_IMAP_CHANGE_PASSWORD(self, msg:'bunch_') -> 'None':
self.email_imap_api.change_password(msg)
# ################################################################################################################################
def on_broker_msg_RBAC_PERMISSION_CREATE(self, msg:'bunch_') -> 'None':
self.rbac.create_permission(msg.id, msg.name)
def on_broker_msg_RBAC_PERMISSION_EDIT(self, msg:'bunch_') -> 'None':
self.rbac.edit_permission(msg.id, msg.name)
def on_broker_msg_RBAC_PERMISSION_DELETE(self, msg:'bunch_') -> 'None':
self.rbac.delete_permission(msg.id)
# ################################################################################################################################
def on_broker_msg_RBAC_ROLE_CREATE(self, msg:'bunch_') -> 'None':
self.rbac.create_role(msg.id, msg.name, msg.parent_id)
def on_broker_msg_RBAC_ROLE_EDIT(self, msg:'bunch_') -> 'None':
self.rbac.edit_role(msg.id, msg.old_name, msg.name, msg.parent_id)
def on_broker_msg_RBAC_ROLE_DELETE(self, msg:'bunch_') -> 'None':
self.rbac.delete_role(msg.id, msg.name)
# ################################################################################################################################
def on_broker_msg_RBAC_CLIENT_ROLE_CREATE(self, msg:'bunch_') -> 'None':
self.rbac.create_client_role(msg.client_def, msg.role_id)
def on_broker_msg_RBAC_CLIENT_ROLE_DELETE(self, msg:'bunch_') -> 'None':
self.rbac.delete_client_role(msg.client_def, msg.role_id)
# ################################################################################################################################
def on_broker_msg_RBAC_ROLE_PERMISSION_CREATE(self, msg:'bunch_') -> 'None':
self.rbac.create_role_permission_allow(msg.role_id, msg.perm_id, msg.service_id)
def on_broker_msg_RBAC_ROLE_PERMISSION_DELETE(self, msg:'bunch_') -> 'None':
self.rbac.delete_role_permission_allow(msg.role_id, msg.perm_id, msg.service_id)
# ################################################################################################################################
def zmq_channel_create_edit(
self,
name, # type: str
msg, # type: Bunch
action, # type: str
lock_timeout, # type: int
start # type: bool
) -> 'None':
with self.server.zato_lock_manager(msg.config_cid, ttl=10, block=lock_timeout):
self._set_up_zmq_channel(name, msg, action, start)
# ################################################################################################################################
def zmq_channel_create(self, msg:'bunch_') -> 'None':
self.zmq_channel_create_edit(msg.name, msg, 'create', 0, True)
# ################################################################################################################################
def on_broker_msg_CHANNEL_ZMQ_CREATE(self, msg:'bunch_') -> 'None':
if self.server.zato_lock_manager.acquire(msg.config_cid, ttl=10, block=False):
start_connectors(self, 'zato.channel.zmq.start', msg)
# ################################################################################################################################
def on_broker_msg_CHANNEL_ZMQ_EDIT(self, msg:'bunch_') -> 'None':
# Each worker uses a unique bind port
msg = bunchify(msg)
update_bind_port(msg, self.worker_idx)
self.zmq_channel_create_edit(msg.old_name, msg, 'edit', 5, False)
# ################################################################################################################################
def on_broker_msg_CHANNEL_ZMQ_DELETE(self, msg:'bunch_') -> 'None':
with self.server.zato_lock_manager(msg.config_cid, ttl=10, block=5):
api = self.zmq_mdp_v01_api if msg.socket_type.startswith(ZMQ.MDP) else self.zmq_channel_api
if msg.name in api.connectors:
api.delete(msg.name)
# ################################################################################################################################
def on_broker_msg_OUTGOING_ZMQ_CREATE(self, msg:'bunch_') -> 'None':
self.zmq_out_api.create(msg.name, msg)
def on_broker_msg_OUTGOING_ZMQ_EDIT(self, msg:'bunch_') -> 'None':
# It might be a rename
old_name = msg.get('old_name')
del_name = old_name if old_name else msg['name']
self.zmq_out_api.edit(del_name, msg)
def on_broker_msg_OUTGOING_ZMQ_DELETE(self, msg:'bunch_') -> 'None':
self.zmq_out_api.delete(msg.name)
# ################################################################################################################################
def on_ipc_message(
self,
msg, # type: any_
success=IPC.STATUS.SUCCESS, # type: str
failure=IPC.STATUS.FAILURE # type: str
) -> 'None':
# If there is target_pid we cannot continue if we are not the recipient.
if msg.target_pid and msg.target_pid != self.server.pid:
return
# By default, assume we will not succeed.
status = failure
response = 'default-ipc-response'
# We get here if there is no target_pid or if there is one and it matched that of ours.
try:
response = self.invoke(
msg.service,
msg.payload,
skip_response_elem=True,
channel=CHANNEL.IPC,
data_format=DATA_FORMAT.JSON,
serialize=True,
)
response = response.getvalue() if isinstance(response, SimpleIOPayload) else response
status = success
except Exception:
response = format_exc()
status = failure
finally:
data = '{};{}'.format(status, response)
try:
with open(msg.reply_to_fifo, 'wb') as fifo:
_ = fifo.write(data if isinstance(data, bytes) else data.encode('utf'))
except Exception:
logger.warning('Could not write to FIFO, m:`%s`, r:`%s`, s:`%s`, e:`%s`', msg, response, status, format_exc())
# ################################################################################################################################
| 124,795
|
Python
|
.py
| 2,047
| 51.590132
| 130
| 0.543206
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,236
|
groups.py
|
zatosource_zato/code/zato-server/src/zato/server/base/worker/groups.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.common.api import Groups
from zato.server.base.worker.common import WorkerImpl
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.common.typing_ import any_
from zato.server.base.worker import WorkerStore
# ################################################################################################################################
# ################################################################################################################################
class SecurityGroups(WorkerImpl):
""" Security groups-related functionality for worker objects.
"""
# ################################################################################################################################
def _yield_security_groups_ctx_items(
self:'WorkerStore' # type: ignore
) -> 'any_':
for channel_item in self.worker_config.http_soap:
if security_groups_ctx := channel_item.get('security_groups_ctx'):
yield security_groups_ctx
# ################################################################################################################################
def on_broker_msg_Groups_Edit_Member_List(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
member_id_list = msg.member_id_list
member_id_list = [elem.split('-') for elem in member_id_list]
member_id_list = [elem[1] for elem in member_id_list]
member_id_list = [int(elem) for elem in member_id_list]
for security_groups_ctx in self._yield_security_groups_ctx_items():
if msg.group_action == Groups.Membership_Action.Add:
func = security_groups_ctx.on_member_added_to_group
else:
func = security_groups_ctx.on_member_removed_from_group
for member_id in member_id_list:
func(msg.group_id, member_id)
# ################################################################################################################################
def on_broker_msg_Groups_Delete(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
for security_groups_ctx in self._yield_security_groups_ctx_items():
security_groups_ctx.on_group_deleted(msg.id)
# ################################################################################################################################
# ################################################################################################################################
| 2,983
|
Python
|
.py
| 51
| 51.921569
| 130
| 0.388393
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,237
|
amqp_.py
|
zatosource_zato/code/zato-server/src/zato/server/base/worker/amqp_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
from traceback import format_exc
# Zato
from zato.common.model.amqp_ import AMQPConnectorConfig
from zato.common.util.api import spawn_greenlet, start_connectors
from zato.server.base.worker.common import WorkerImpl
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.common.typing_ import any_, dictnone, strnone
from zato.server.base.worker import WorkerStore
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class AMQP(WorkerImpl):
""" AMQP-related functionality for worker objects.
"""
def amqp_connection_create(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
msg.is_active = True
with self.update_lock:
self.amqp_api.create(msg.name, msg, self.invoke, needs_start=True)
# ################################################################################################################################
def on_broker_msg_DEFINITION_AMQP_CREATE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
start_connectors(self, 'zato.connector.amqp_.start', msg)
# ################################################################################################################################
def on_broker_msg_DEFINITION_AMQP_EDIT(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
# Convert to a dataclass first
msg = AMQPConnectorConfig.from_dict(msg) # type: ignore
# Definitions are always active
msg.is_active = True
# Make sure connection passwords are always in clear text
msg.password = self.server.decrypt(msg.password)
with self.update_lock:
# Update outconn -> definition mappings
for out_name, def_name in self.amqp_out_name_to_def.items():
if def_name == msg.old_name:
self.amqp_out_name_to_def[out_name] = msg.name
# Update definition itself
self.amqp_api.edit(msg.old_name, msg)
# ################################################################################################################################
def on_broker_msg_DEFINITION_AMQP_DELETE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
with self.update_lock:
to_del = []
for out_name, def_name in self.amqp_out_name_to_def.items():
if def_name == msg.name:
to_del.append(out_name)
for out_name in to_del:
del self.amqp_out_name_to_def[out_name]
self.amqp_api.delete(msg.name)
# ################################################################################################################################
def on_broker_msg_DEFINITION_AMQP_CHANGE_PASSWORD(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
with self.update_lock:
self.amqp_api.change_password(msg.name, msg)
# ################################################################################################################################
def on_broker_msg_OUTGOING_AMQP_CREATE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
with self.update_lock:
self.amqp_out_name_to_def[msg.name] = msg.def_name
self.amqp_api.create_outconn(msg.def_name, msg)
# ################################################################################################################################
def on_broker_msg_OUTGOING_AMQP_EDIT(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
with self.update_lock:
del self.amqp_out_name_to_def[msg.old_name]
self.amqp_out_name_to_def[msg.name] = msg.def_name
self.amqp_api.edit_outconn(msg.def_name, msg)
# ################################################################################################################################
def on_broker_msg_OUTGOING_AMQP_DELETE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
with self.update_lock:
del self.amqp_out_name_to_def[msg.name]
self.amqp_api.delete_outconn(msg.def_name, msg)
# ################################################################################################################################
def on_broker_msg_CHANNEL_AMQP_CREATE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
with self.update_lock:
self.amqp_api.create_channel(msg.def_name, msg)
# ################################################################################################################################
def on_broker_msg_CHANNEL_AMQP_EDIT(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
with self.update_lock:
self.amqp_api.edit_channel(msg.def_name, msg)
# ################################################################################################################################
def on_broker_msg_CHANNEL_AMQP_DELETE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
with self.update_lock:
self.amqp_api.delete_channel(msg.def_name, msg)
# ################################################################################################################################
def amqp_invoke(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
out_name, # type: str
exchange='/', # type: str
routing_key=None, # type: strnone
properties=None, # type: dictnone
headers=None, # type: dictnone
**kwargs # type: any_
) -> 'any_':
""" Invokes a remote AMQP broker sending it a message with the specified routing key to an exchange through
a named outgoing connection. Optionally, lower-level details can be provided in properties and they will be
provided directly to the underlying AMQP library (kombu). Headers are AMQP headers attached to each message.
"""
with self.update_lock:
def_name = self.amqp_out_name_to_def[out_name]
return self.amqp_api.invoke(def_name, out_name, msg, exchange, routing_key, properties, headers, **kwargs)
def _amqp_invoke_async(
self:'WorkerStore', # type: ignore
*args, # type: any_
**kwargs # type: any_
) -> 'None':
try:
self.amqp_invoke(*args, **kwargs)
except Exception:
logger.warning(format_exc())
def amqp_invoke_async(
self:'WorkerStore', # type: ignore
*args, # type: any_
**kwargs # type: any_
) -> 'None':
spawn_greenlet(self._amqp_invoke_async, *args, **kwargs)
# ################################################################################################################################
# ################################################################################################################################
| 8,112
|
Python
|
.py
| 158
| 43.734177
| 130
| 0.419848
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,238
|
common.py
|
zatosource_zato/code/zato-server/src/zato/server/base/worker/common.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.server.base.parallel import ParallelServer
from zato.server.base.worker import WorkerStore
from zato.server.connection.http_soap.url_data import URLData
ParallelServer = ParallelServer
URLData = URLData
# ################################################################################################################################
# ################################################################################################################################
class WorkerImpl:
""" Base class for objects that implement worker functionality. Does not implement anything itself,
instead serving as a common marker for all derived subclasses.
"""
server: 'ParallelServer'
url_data: 'URLData'
worker_idx: 'int'
# ################################################################################################################################
def on_broker_msg_Common_Sync_Objects(self:'WorkerStore', msg:'Bunch') -> 'None':
_ = self.server.invoke('pub.zato.common.sync-objects-impl', msg)
# ################################################################################################################################
# ################################################################################################################################
| 1,776
|
Python
|
.py
| 28
| 59.964286
| 130
| 0.347326
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,239
|
web_socket.py
|
zatosource_zato/code/zato-server/src/zato/server/base/worker/web_socket.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Bunch
from bunch import bunchify
# Zato
from zato.common.util.api import start_connectors, wait_for_dict_key
from zato.server.base.worker.common import WorkerImpl
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.common.typing_ import any_, strdict
from zato.server.base.worker import WorkerStore
from zato.server.connection.connector import ConnectorStore
# ################################################################################################################################
# ################################################################################################################################
class WebSocket(WorkerImpl):
""" WebSocket-related functionality for worker objects.
"""
web_socket_api: 'ConnectorStore'
# ################################################################################################################################
def web_socket_channel_create_edit(
self:'WorkerStore', # type: ignore
name, # type: str
msg, # type: Bunch
action, # type: str
lock_timeout, # type: int
start # type: bool
) -> 'None':
with self.server.zato_lock_manager(msg.config_cid, ttl=10, block=lock_timeout):
# Get the name to delete, which may be actually an old name in case it is a rename ..
config_name:'str' = msg.get('name', '') or msg.get('old_name', '')
# .. delete the previous configuration, if any ..
_:'any_' = self.worker_config.channel_web_socket.pop(config_name, None)
# .. create the new one ..
config = {
'config': msg
}
# .. and assign it for later use ..
self.worker_config.channel_web_socket[config_name] = config
# .. now, proceed to the the low-level connector functionality.
func = getattr(self.web_socket_api, action)
func(name, msg, self.on_message_invoke_service, self.request_dispatcher.url_data.authenticate_web_socket)
# ################################################################################################################################
def get_web_socket_channel_id_by_name(
self: 'WorkerStore', # type: ignore
channel_name: 'str'
) -> 'int':
wait_for_dict_key(self.worker_config.channel_web_socket, channel_name, timeout=5) # type: ignore
item:'strdict' = self.worker_config.channel_web_socket.get(channel_name)
item_config = item['config']
channel_id:'int' = item_config['id']
return channel_id
# ################################################################################################################################
def web_socket_channel_create(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.web_socket_channel_create_edit(msg.name, msg, 'create', 0, True)
self.web_socket_api.start(msg.name)
# ################################################################################################################################
def on_broker_msg_CHANNEL_WEB_SOCKET_CREATE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if self.server.zato_lock_manager.acquire(msg.config_cid, ttl=10, block=False):
start_connectors(self, 'zato.channel.web-socket.start', msg)
# ################################################################################################################################
def on_broker_msg_CHANNEL_WEB_SOCKET_EDIT(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
msg = bunchify(msg)
self.web_socket_channel_create_edit(msg.old_name, msg, 'edit', 5, False)
# ################################################################################################################################
def on_broker_msg_CHANNEL_WEB_SOCKET_DELETE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
with self.server.zato_lock_manager(msg.config_cid, ttl=10, block=5):
self.web_socket_api.delete(msg.name)
# ################################################################################################################################
def on_broker_msg_CHANNEL_WEB_SOCKET_BROADCAST(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.invoke('zato.channel.web-socket.broadcast', {
'channel_name': msg.channel_name,
'data': msg.data
})
# ################################################################################################################################
| 5,141
|
Python
|
.py
| 94
| 47.553191
| 130
| 0.432842
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,240
|
pubsub.py
|
zatosource_zato/code/zato-server/src/zato/server/base/worker/pubsub.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.common.pubsub import MSG_PREFIX as PUBSUB_MSG_PREFIX
from zato.server.base.worker.common import WorkerImpl
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.server.base.worker import WorkerStore
from zato.server.pubsub import PubSub as ServerPubSub
ServerPubSub = ServerPubSub
# ################################################################################################################################
# ################################################################################################################################
class PubSub(WorkerImpl):
""" Publish/subscribe-related functionality for worker objects.
"""
pubsub: 'ServerPubSub'
# ################################################################################################################################
def on_broker_msg_PUBSUB_TOPIC_CREATE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.pubsub.create_topic_object(msg)
# ################################################################################################################################
def on_broker_msg_PUBSUB_TOPIC_EDIT(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
old_name = msg.get('old_name')
del_name = old_name if old_name else msg['name']
self.pubsub.edit_topic(del_name, msg)
# ################################################################################################################################
def on_broker_msg_PUBSUB_TOPIC_DELETE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.pubsub.delete_topic(msg.id)
# ################################################################################################################################
def on_broker_msg_PUBSUB_ENDPOINT_CREATE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.pubsub.create_endpoint(msg)
# ################################################################################################################################
def on_broker_msg_PUBSUB_ENDPOINT_EDIT(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.pubsub.edit_endpoint(msg)
# ################################################################################################################################
def on_broker_msg_PUBSUB_ENDPOINT_DELETE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.pubsub.delete_endpoint(msg.id)
# ################################################################################################################################
def on_broker_msg_PUBSUB_SUBSCRIPTION_CREATE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.pubsub.create_subscription_object(msg)
if msg.sub_key.startswith(PUBSUB_MSG_PREFIX.SERVICE_SK):
self.pubsub.set_config_for_service_subscription(msg.sub_key)
# ################################################################################################################################
def on_broker_msg_PUBSUB_SUBSCRIPTION_EDIT(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
msg.pop('action') # Not needed by pub/sub
self.pubsub.edit_subscription(msg)
# ################################################################################################################################
def on_broker_msg_PUBSUB_SUBSCRIPTION_DELETE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.pubsub.unsubscribe(msg.topic_sub_keys)
# ################################################################################################################################
def on_broker_msg_PUBSUB_SUB_KEY_SERVER_SET(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.pubsub.set_sub_key_server(msg)
# ################################################################################################################################
def on_broker_msg_PUBSUB_QUEUE_CLEAR(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.pubsub.clear_task(msg.sub_key)
# ################################################################################################################################
def on_broker_msg_PUBSUB_WSX_CLIENT_SUB_KEY_SERVER_REMOVE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
self.pubsub.remove_ws_sub_key_server(msg)
# ################################################################################################################################
def on_broker_msg_PUBSUB_DELIVERY_SERVER_CHANGE(
self:'WorkerStore', # type: ignore
msg, # type: Bunch
) -> 'None':
if msg.old_delivery_server_id == self.server.id:
old_server = self.pubsub.get_delivery_server_by_sub_key(msg.sub_key)
if old_server:
if old_server.server_pid == self.server.pid:
self.pubsub.migrate_delivery_server(msg)
# ################################################################################################################################
| 5,834
|
Python
|
.py
| 109
| 47.183486
| 130
| 0.372167
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,241
|
meta.py
|
zatosource_zato/code/zato-server/src/zato/server/service/meta.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from inspect import getmodule, isclass
from itertools import chain
from json import dumps
from logging import getLogger
from time import time
from traceback import format_exc
# Bunch
from bunch import bunchify
# SQLAlchemy
from sqlalchemy import Boolean, Integer
from sqlalchemy.exc import IntegrityError
# Zato
from zato.common.api import ZATO_NOT_GIVEN
from zato.common.odb.model import Base, Cluster
from zato.common.util.api import parse_literal_dict
from zato.common.util.sql import elems_with_opaque, set_instance_opaque_attrs
from zato.server.connection.http_soap import BadRequest
from zato.server.service import AsIs, Bool as BoolSIO, Int as IntSIO
from zato.server.service.internal import AdminSIO, GetListAdminSIO
# ################################################################################################################################
# Type checking
if 0:
from zato.server.service import Service
# For pyflakes
Service = Service
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
singleton = object()
# ################################################################################################################################
sa_to_sio = {
Boolean: BoolSIO,
Integer: IntSIO
}
# ################################################################################################################################
req_resp = {
'Create': 'create',
'Edit': 'edit',
'GetList': 'get_list',
'Delete': 'delete',
'Ping': 'ping',
}
# ################################################################################################################################
def _is_column_required(column):
return not (bool(column.nullable) is True)
# ################################################################################################################################
def get_columns_to_visit(columns, is_required):
out = []
# Models with inheritance may have multiple attributes of the same name,
# e.g. VaultConnection.id will have SecBase.id and we need to make sure only one of them is returned.
names_seen = set()
for elem in columns:
if is_required:
if not _is_column_required(elem):
continue
else:
if _is_column_required(elem):
continue
if elem.name not in names_seen:
names_seen.add(elem.name)
out.append(elem)
else:
continue
return out
# ################################################################################################################################
def get_io(attrs, elems_name, is_edit, is_required, is_output, is_get_list, has_cluster_id):
# This can be either a list or an SQLAlchemy object
elems = attrs.get(elems_name) or []
columns = []
# Generate elems out of SQLAlchemy tables, including calls to SIOElem's subclasses, such as Bool or Int.
if elems and isclass(elems) and issubclass(elems, Base):
columns_to_visit = [elem for elem in elems._sa_class_manager.mapper.mapped_table.columns]
columns_to_visit = get_columns_to_visit(columns_to_visit, is_required)
for column in columns_to_visit:
# Each model has a cluster_id column but it's not really needed for anything on output
if column.name == 'cluster_id' and is_output:
continue
# We already have cluster_id and don't need a SIOElem'd one.
if column.name == 'cluster_id' and has_cluster_id:
continue
if column.name in attrs.skip_input_params:
continue
# We never return passwords
if column.name == 'password' and is_get_list:
continue
if column.name == 'id':
if is_edit:
pass
else:
continue # Create or GetList
for k, v in sa_to_sio.items():
if isinstance(column.type, k):
if column.name in attrs.request_as_is:
wrapper = AsIs
else:
wrapper = v
columns.append(wrapper(column.name))
break
else:
if column.name in attrs.request_as_is:
columns.append(AsIs(column.name))
else:
columns.append(column.name)
return columns
# ################################################################################################################################
def update_attrs(cls, name, attrs):
attrs = bunchify(attrs)
mod = getmodule(cls)
attrs.elem = mod.elem
attrs.label = mod.label
attrs.model = mod.model
attrs.output_required_extra = getattr(mod, 'output_required_extra', [])
attrs.output_optional_extra = getattr(mod, 'output_optional_extra', [])
attrs.get_data_func = mod.list_func
attrs.def_needed = getattr(mod, 'def_needed', False)
attrs.initial_input = getattr(mod, 'initial_input', {})
attrs.skip_input_params = getattr(mod, 'skip_input_params', [])
attrs.skip_output_params = getattr(mod, 'skip_output_params', [])
attrs.pre_opaque_attrs_hook = getattr(mod, 'pre_opaque_attrs_hook', None)
attrs.instance_hook = getattr(mod, 'instance_hook', None)
attrs.response_hook = getattr(mod, 'response_hook', None)
attrs.delete_hook = getattr(mod, 'delete_hook', None)
attrs.broker_message_hook = getattr(mod, 'broker_message_hook', None)
attrs.extra_delete_attrs = getattr(mod, 'extra_delete_attrs', [])
attrs.input_required_extra = getattr(mod, 'input_required_extra', [])
attrs.input_optional_extra = getattr(mod, 'input_optional_extra', [])
attrs.create_edit_input_required_extra = getattr(mod, 'create_edit_input_required_extra', [])
attrs.create_edit_input_optional_extra = getattr(mod, 'create_edit_input_optional_extra', [])
attrs.create_edit_rewrite = getattr(mod, 'create_edit_rewrite', [])
attrs.create_edit_force_rewrite = getattr(mod, 'create_edit_force_rewrite', set())
attrs.check_existing_one = getattr(mod, 'check_existing_one', True)
attrs.request_as_is = getattr(mod, 'request_as_is', [])
attrs.sio_default_value = getattr(mod, 'sio_default_value', None)
attrs.get_list_docs = getattr(mod, 'get_list_docs', None)
attrs.delete_require_instance = getattr(mod, 'delete_require_instance', True)
attrs.skip_create_integrity_error = getattr(mod, 'skip_create_integrity_error', False)
attrs.skip_if_exists = getattr(mod, 'skip_if_exists', False)
attrs.skip_if_missing = getattr(mod, 'skip_if_missing', False)
attrs._meta_session = None
attrs.is_get_list = False
attrs.is_create = False
attrs.is_edit = False
attrs.is_create_edit = False
attrs.is_delete = False
if name == 'GetList':
attrs.is_get_list = True
attrs.output_required = attrs.model
attrs.output_optional = attrs.model
else:
attrs.broker_message = mod.broker_message
attrs.broker_message_prefix = mod.broker_message_prefix
if name in('Create', 'Edit'):
attrs.input_required = attrs.model
attrs.input_optional = attrs.model
attrs.is_create = name == 'Create'
attrs.is_edit = name == 'Edit'
attrs.is_create_edit = True
elif name == 'Delete':
attrs.is_delete = True
return attrs
# ################################################################################################################################
# ################################################################################################################################
class AdminServiceMeta(type):
@staticmethod
def get_sio(
*,
attrs,
name,
input_required=None,
input_optional=None,
output_required=None,
is_list=True,
class_=None,
skip_input_required=False
):
_BaseClass = GetListAdminSIO if is_list else AdminSIO
if not input_optional:
input_optional = list(_BaseClass.input_optional) if hasattr(_BaseClass, 'input_optional') else []
if not input_required:
if skip_input_required:
input_required = []
else:
input_required = ['cluster_id']
sio = {
'input_required': input_required,
'input_optional': input_optional,
'output_required': output_required if output_required is not None else ['id', 'name'],
}
class SimpleIO(_BaseClass):
request_elem = 'zato_{}_{}_request'.format(attrs.elem, req_resp[name])
response_elem = 'zato_{}_{}_response'.format(attrs.elem, req_resp[name])
default_value = attrs['sio_default_value']
input_required = sio['input_required'] + attrs['input_required_extra']
input_optional = sio['input_optional'] + attrs['input_optional_extra']
for param in attrs['skip_input_params']:
if param in input_required:
input_required.remove(param)
output_required = sio['output_required'] + attrs['output_required_extra']
output_optional = attrs['output_optional_extra']
for io in 'input', 'output':
for req in 'required', 'optional':
_name = '{}_{}'.format(io, req)
is_required = 'required' in req
is_output = 'output' in io
is_get_list = name=='GetList'
sio_elem = getattr(SimpleIO, _name)
has_cluster_id = 'cluster_id' in sio_elem
sio_to_add = get_io(
attrs, _name, attrs.get('is_edit'), is_required, is_output, is_get_list, has_cluster_id)
sio_elem.extend(sio_to_add)
if attrs.is_create_edit and is_required:
sio_elem.extend(attrs.create_edit_input_required_extra)
if attrs.is_create_edit and (not is_required):
sio_elem.extend(attrs.create_edit_input_optional_extra)
# Sorts and removes duplicates
setattr(SimpleIO, _name, list(set(sio_elem)))
for skip_name in attrs.skip_output_params:
for attr_names in chain([SimpleIO.output_required, SimpleIO.output_optional]):
if skip_name in attr_names:
attr_names.remove(skip_name)
SimpleIO.input_required = tuple(SimpleIO.input_required)
SimpleIO.input_optional = tuple(SimpleIO.input_optional)
SimpleIO.output_required = tuple(SimpleIO.output_required)
SimpleIO.output_optional = tuple(SimpleIO.output_optional)
return SimpleIO
# ################################################################################################################################
# ################################################################################################################################
class GetListMeta(AdminServiceMeta):
""" A metaclass customizing the creation of services returning lists of objects.
"""
def __init__(cls, name, bases, attrs):
attrs = update_attrs(cls, name, attrs)
cls.__doc__ = 'Returns a list of {}.'.format(attrs.get_list_docs)
cls.SimpleIO = GetListMeta.get_sio(attrs=attrs, name=name, is_list=True)
cls.handle = GetListMeta.handle(attrs)
cls.get_data = GetListMeta.get_data(attrs.get_data_func)
return super(GetListMeta, cls).__init__(cls)
@staticmethod
def get_data(get_data_func):
def get_data_impl(self, session):
# type: (Service, object)
return self._search(get_data_func, session, self.request.input.cluster_id, False)
return get_data_impl
@staticmethod
def handle(attrs):
def handle_impl(self:'Service') -> 'None':
input = self.request.input
input.cluster_id = input.get('cluster_id') or self.server.cluster_id
with closing(self.odb.session()) as session:
elems = elems_with_opaque(self.get_data(session))
self.response.payload[:] = elems
if attrs.response_hook:
attrs.response_hook(self, self.request.input, None, attrs, 'get_list')
return handle_impl
# ################################################################################################################################
# ################################################################################################################################
class CreateEditMeta(AdminServiceMeta):
is_create = False
output_required = ('id', 'name')
def __init__(cls, name, bases, attrs):
attrs = update_attrs(cls, name, attrs)
verb = 'Creates' if attrs.is_create else 'Updates'
cls.__doc__ = '{} {}.'.format(verb, attrs.label)
cls.SimpleIO = CreateEditMeta.get_sio(attrs=attrs, name=name, is_list=False, class_=cls)
cls.handle = CreateEditMeta.handle(attrs)
return super(CreateEditMeta, cls).__init__(cls)
@staticmethod
def handle(attrs):
def handle_impl(self):
# type: (Service)
input = self.request.input
input.cluster_id = input.get('cluster_id') or self.server.cluster_id
input.update(attrs.initial_input)
verb = 'edit' if attrs.is_edit else 'create'
old_name = None
has_integrity_error = False
# Try to parse the opaque elements into a dict ..
input.opaque1 = parse_literal_dict(input.opaque1)
# .. only to turn it into a JSON string suitable for SQL storage ..
input.opaque1 = dumps(input.opaque1)
with closing(self.odb.session()) as session:
try:
attrs._meta_session = session
if attrs.check_existing_one:
# Let's see if we already have an instance of that name before committing
# any stuff to the database. However, this is wrapped in an if condition
# because certain models don't have the .name attribute.
existing_one = session.query(attrs.model).\
filter(Cluster.id==input.cluster_id).\
filter(attrs.model.name==input.name)
if attrs.is_edit:
existing_one = existing_one.filter(attrs.model.id!=input.id)
existing_one = existing_one.first()
if existing_one:
if attrs.is_create:
if attrs.skip_if_exists:
pass # Ignore it explicitly
else:
raise BadRequest(self.cid, '{} `{}` already exists in this cluster'.format(
attrs.label[0].upper() + attrs.label[1:], input.name))
else:
if attrs.skip_if_missing:
pass # Ignore it explicitly
else:
raise BadRequest(self.cid, 'No such {} `{}` in this cluster'.format(
attrs.label[0].upper() + attrs.label[1:], input.name))
if attrs.is_edit:
instance = session.query(attrs.model).filter_by(id=input.id).one()
old_name = instance.name
else:
instance = self._new_zato_instance_with_cluster(attrs.model)
# Update the instance with data received on input, however,
# note that this may overwrite some of existing attributes
# if they are empty on input. If it's not desired,
# set skip_input_params = ['...'] to ignore such input parameters.
instance.fromdict(input, exclude=['password'], allow_pk=True)
# Invoke a hook that will set any additional opaque attrs
# that are required but were possibly not given on input.
if attrs.pre_opaque_attrs_hook:
attrs.pre_opaque_attrs_hook(self, input, instance, attrs)
# Populate all the opaque attrs now
set_instance_opaque_attrs(instance, input)
# Now that we have an instance which is known not to be a duplicate
# we can possibly invoke a customization function before we commit
# anything to the database.
if attrs.instance_hook:
attrs.instance_hook(self, input, instance, attrs)
session.add(instance)
try:
session.commit()
except IntegrityError:
if not attrs.skip_create_integrity_error:
raise
else:
has_integrity_error = True
except Exception:
session.rollback()
raise
else:
if attrs.def_needed:
def_ = session.query(attrs.def_needed).filter_by(id=input.def_id).one()
input.def_name = def_.name
action = getattr(attrs.broker_message, attrs.broker_message_prefix + verb.upper()).value
input.id = instance.id
input.action = action
input.old_name = old_name
if attrs.broker_message_hook:
attrs.broker_message_hook(self, input, instance, attrs, 'create_edit')
if not has_integrity_error:
self.broker_client.publish(input)
to_rewrite = chain(
attrs.create_edit_rewrite,
attrs.create_edit_force_rewrite,
self.SimpleIO.output_required
)
for name in to_rewrite:
value = getattr(instance, name, singleton)
if value is singleton or name in attrs.create_edit_force_rewrite:
value = input[name]
setattr(self.response.payload, name, value)
if attrs.response_hook:
attrs.response_hook(self, input, instance, attrs, 'create_edit')
return handle_impl
# ################################################################################################################################
# ################################################################################################################################
class DeleteMeta(AdminServiceMeta):
def __init__(cls, name, bases, attrs):
attrs = update_attrs(cls, name, attrs)
cls.__doc__ = 'Deletes {}.'.format(attrs.label)
cls.SimpleIO = DeleteMeta.get_sio(
attrs=attrs,
name=name,
input_required=[],
input_optional=['id', 'name', 'should_raise_if_missing'],
output_required=[],
skip_input_required=True,
)
cls.handle = DeleteMeta.handle(attrs)
return super(DeleteMeta, cls).__init__(cls)
@staticmethod
def handle(attrs):
def handle_impl(self):
# type: (Service)
input = self.request.input
input_id = input.get('id')
input_name = input.get('name')
if not (input_id or input_name):
raise BadRequest(self.cid, 'Either id or name is required on input')
with closing(self.odb.session()) as session:
attrs._meta_session = session
try:
query = session.query(attrs.model)
if input_id:
query = query.\
filter(attrs.model.id==input_id)
else:
query = query.\
filter(attrs.model.name==input_name)
instance = query.first()
# We do not always require for input ID to actually exist - this is useful
# with enmasse which may attempt to delete objects that no longer exist.
# This may happen if it deletes an object that was an FK to another one.
# That other one will be always deleted but enmasse will not know it
# so it will try to delete it too, which will fail. This happens, for instance,
# when a WebSocket channel is deleted - it may cascade to a pub/sub endpoint
# but enmasse does not know about, hence delete_require_instance is True in pubsub_endpoint's endpoint.py.
if not instance:
if attrs.delete_require_instance:
if input_id:
attr_name = 'id'
attr_value = input_id
else:
attr_name = 'name'
attr_value = input_name
# We may have a test case that deletes a Basic Auth definition before it tries
# to delete a WebSocket channel related to it. In such circumstances, this flag
# will be set to False to ensure that no unneeded exception will be raised.
if input.get('should_raise_if_missing', True):
raise BadRequest(self.cid, 'Could not find {} instance with {} `{}`'.format(
attrs.label, attr_name, attr_value))
else:
return
else:
return
if attrs.instance_hook:
attrs.instance_hook(self, input, instance, attrs)
session.delete(instance)
session.commit()
except Exception:
msg = 'Could not delete {}, e:`%s`'.format(attrs.label)
self.logger.error(msg, format_exc())
session.rollback()
raise
else:
input.action = getattr(attrs.broker_message, attrs.broker_message_prefix + 'DELETE').value
input.name = getattr(instance, 'name', ZATO_NOT_GIVEN)
for name in attrs.extra_delete_attrs:
input[name] = getattr(instance, name)
if attrs.broker_message_hook:
attrs.broker_message_hook(self, input, instance, attrs, 'delete')
self.broker_client.publish(input)
if attrs.delete_hook:
attrs.delete_hook(self, input, instance, attrs)
return handle_impl
# ################################################################################################################################
# ################################################################################################################################
class PingMeta(AdminServiceMeta):
def __init__(cls, name, bases, attrs):
attrs = update_attrs(cls, name, attrs)
cls.SimpleIO = PingMeta.get_sio(attrs=attrs, name=name, input_required=['id'], output_optional=['info', 'id'])
cls.handle = PingMeta.handle(attrs)
return super(PingMeta, cls).__init__(cls)
@staticmethod
def handle(attrs):
def handle_impl(self):
# type: (Service)
with closing(self.odb.session()) as session:
config = session.query(attrs.model).\
filter(attrs.model.id==self.request.input.id).\
one()
start_time = time()
self.ping(config)
response_time = time() - start_time
# Always return ID of the object we pinged
self.response.payload.id = self.request.input.id
# Return ping details
self.response.payload.info = 'Ping issued in {0:03.4f} s, check server logs for details, if any.'.format(
response_time)
return handle_impl
# ################################################################################################################################
# ################################################################################################################################
| 25,484
|
Python
|
.py
| 478
| 39.694561
| 130
| 0.503136
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,242
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
from datetime import datetime, timedelta
from http.client import BAD_REQUEST, METHOD_NOT_ALLOWED
from inspect import isclass
from json import loads
from traceback import format_exc
# Bunch
from bunch import bunchify
# lxml
from lxml.etree import _Element as EtreeElement # type: ignore
from lxml.objectify import ObjectifiedElement
# gevent
from gevent import Timeout, sleep as _gevent_sleep, spawn as _gevent_spawn
from gevent.lock import RLock
# Python 2/3 compatibility
from zato.common.py23_ import maxint
# Zato
from zato.bunch import Bunch
from zato.common.api import BROKER, CHANNEL, DATA_FORMAT, HL7, KVDB, NO_DEFAULT_VALUE, NotGiven, PARAMS_PRIORITY, PUBSUB, \
WEB_SOCKET, zato_no_op_marker
from zato.common.broker_message import CHANNEL as BROKER_MSG_CHANNEL
from zato.common.exception import Inactive, Reportable, ZatoException
from zato.common.facade import SecurityFacade
from zato.common.json_internal import dumps
from zato.common.json_schema import ValidationException as JSONSchemaValidationException
from zato.common.typing_ import cast_, type_
from zato.common.util.api import make_repr, new_cid, payload_from_request, service_name_from_impl, spawn_greenlet, uncamelify
from zato.common.util.python_ import get_module_name_by_path
from zato.server.commands import CommandsFacade
from zato.server.connection.cache import CacheAPI
from zato.server.connection.email import EMailAPI
from zato.server.connection.facade import KeysightContainer, RESTFacade, SchedulerFacade
from zato.server.connection.jms_wmq.outgoing import WMQFacade
from zato.server.connection.search import SearchAPI
from zato.server.connection.sms import SMSAPI
from zato.server.connection.zmq_.outgoing import ZMQFacade
from zato.server.pattern.api import FanOut
from zato.server.pattern.api import InvokeRetry
from zato.server.pattern.api import ParallelExec
from zato.server.pubsub import PubSub
from zato.server.service.reqresp import AMQPRequestData, Cloud, Definition, HL7API, HL7RequestData, IBMMQRequestData, \
InstantMessaging, Outgoing, Request
# Zato - Cython
from zato.cy.reqresp.payload import SimpleIOPayload
from zato.cy.reqresp.response import Response
# Not used here in this module but it's convenient for callers to be able to import everything from a single namespace
from zato.common.ext.dataclasses import dataclass
from zato.common.marshal_.api import Model, ModelCtx
from zato.simpleio import AsIs, CSV, Bool, Date, DateTime, Dict, Decimal, DictList, Elem as SIOElem, Float, Int, List, \
Opaque, Text, UTC, UUID
# For pyflakes
AsIs = AsIs
CSV = CSV # type: ignore
Bool = Bool
dataclass = dataclass
Date = Date
DateTime = DateTime
Decimal = Decimal
Bool = Bool
Dict = Dict
DictList = DictList
Float = Float
Int = Int
List = List
Model = Model
ModelCtx = ModelCtx
Opaque = Opaque
Text = Text
UTC = UTC # type: ignore
UUID = UUID # type: ignore
# ################################################################################################################################
if 0:
from logging import Logger
from zato.broker.client import BrokerClient
from zato.common.audit import AuditPII
from zato.common.crypto.api import ServerCryptoManager
from zato.common.json_schema import Validator as JSONSchemaValidator
from zato.common.kvdb.api import KVDB as KVDBAPI
from zato.common.odb.api import ODBManager
from zato.common.typing_ import any_, anydict, anydictnone, boolnone, callable_, callnone, dictnone, intnone, \
listnone, modelnone, strdict, strdictnone, strstrdict, strnone, strlist
from zato.common.util.time_ import TimeUtil
from zato.distlock import Lock
from zato.server.connection.connector import Connector
from zato.server.connection.ftp import FTPStore
from zato.server.connection.http_soap.outgoing import RESTWrapper
from zato.server.connection.web_socket import ChannelWebSocket, WebSocket
from zato.server.base.worker import WorkerStore
from zato.server.base.parallel import ParallelServer
from zato.server.config import ConfigDict, ConfigStore
from zato.server.connection.cassandra import CassandraAPI
from zato.server.query import CassandraQueryAPI
from zato.sso.api import SSOAPI
from zato.simpleio import CySimpleIO
AuditPII = AuditPII
BrokerClient = BrokerClient
callable_ = callable_
CassandraAPI = CassandraAPI
CassandraQueryAPI = CassandraQueryAPI
ConfigDict = ConfigDict
ConfigStore = ConfigStore
CySimpleIO = CySimpleIO # type: ignore
FTPStore = FTPStore
JSONSchemaValidator = JSONSchemaValidator
KVDBAPI = KVDBAPI # type: ignore
ODBManager = ODBManager
ParallelServer = ParallelServer
ServerCryptoManager = ServerCryptoManager
SSOAPI = SSOAPI # type: ignore
timedelta = timedelta
TimeUtil = TimeUtil
WebSocket = WebSocket
WorkerStore = WorkerStore
# ################################################################################################################################
logger = logging.getLogger(__name__)
_get_logger=logging.getLogger
# ################################################################################################################################
NOT_GIVEN = 'ZATO_NOT_GIVEN'
# ################################################################################################################################
# Backward compatibility
Boolean = Bool
Integer = Int
ForceType = SIOElem
ListOfDicts = DictList
Nested = Opaque
Unicode = Text
# ################################################################################################################################
# For code completion
PubSub = PubSub
# ################################################################################################################################
_async_callback = CHANNEL.INVOKE_ASYNC_CALLBACK
# ################################################################################################################################
_wsgi_channels = {CHANNEL.HTTP_SOAP, CHANNEL.INVOKE, CHANNEL.INVOKE_ASYNC}
# ################################################################################################################################
_response_raw_types=(bytes, str, dict, list, tuple, EtreeElement, Model, ObjectifiedElement)
_utcnow = datetime.utcnow
# ################################################################################################################################
before_job_hooks = ('before_job', 'before_one_time_job', 'before_interval_based_job', 'before_cron_style_job')
after_job_hooks = ('after_job', 'after_one_time_job', 'after_interval_based_job', 'after_cron_style_job')
before_handle_hooks = ('before_handle',)
after_handle_hooks = ('after_handle', 'finalize_handle')
# The almost identical methods below are defined separately because they are used in critical paths
# where every if counts.
def call_hook_no_service(hook:'callable_') -> 'None':
try:
hook()
except Exception:
logger.error('Can\'t run hook `%s`, e:`%s`', hook, format_exc())
def call_hook_with_service(hook:'callable_', service:'Service') -> 'None':
try:
hook(service)
except Exception:
logger.error('Can\'t run hook `%s`, e:`%s`', hook, format_exc())
internal_invoke_keys = {'target', 'set_response_func', 'cid'}
# ################################################################################################################################
class ModuleCtx:
HTTP_Channels = {CHANNEL.HTTP_SOAP, CHANNEL.INVOKE}
Channel_Scheduler = CHANNEL.SCHEDULER
Channel_Service = CHANNEL.SERVICE
Pattern_Call_Channels = {CHANNEL.FANOUT_CALL, CHANNEL.PARALLEL_EXEC_CALL}
# ################################################################################################################################
@dataclass(init=False)
class AsyncCtx:
""" Used by self.invoke_async to relay context of the invocation.
"""
calling_service: str
service_name: str
cid: str
data: str
data_format: str
zato_ctx: 'any_'
environ: 'anydict'
callback: 'listnone' = None
# ################################################################################################################################
class ChannelInfo:
""" Conveys information abouts the channel that a service is invoked through.
Available in services as self.channel or self.chan.
"""
__slots__ = ('id', 'name', 'type', 'data_format', 'is_internal', 'match_target', 'impl', 'security', 'sec')
def __init__(
self,
id: 'intnone',
name: 'strnone',
type: 'strnone',
data_format: 'strnone',
is_internal: 'boolnone',
match_target: 'any_',
security: 'ChannelSecurityInfo',
impl: 'any_'
) -> 'None':
self.id = id
self.name = name
self.type = type
self.data_format = data_format
self.is_internal = is_internal
self.match_target = match_target
self.impl = impl
self.security = self.sec = security
def __repr__(self) -> 'str':
return make_repr(self)
# ################################################################################################################################
class ChannelSecurityInfo:
""" Contains information about a security definition assigned to a channel, if any.
Available in services as:
* self.channel.security
* self.channel.sec
* self.chan.security
* self.chan.sec
"""
__slots__ = ('id', 'name', 'type', 'username', 'impl')
def __init__(self, id:'intnone', name:'strnone', type:'strnone', username:'strnone', impl:'any_') -> 'None':
self.id = id
self.name = name
self.type = type
self.username = username
self.impl = impl
# ################################################################################################################################
def to_dict(self, needs_impl:'bool'=False) -> 'strdict':
out = {
'id': self.id,
'name': self.name,
'type': self.type,
'username': self.username,
}
if needs_impl:
out['impl'] = self.impl
return out
# ################################################################################################################################
class _WSXChannel:
""" Provides communication with WebSocket channels.
"""
def __init__(self, server:'ParallelServer', channel_name:'str') -> 'None':
self.server = server
self.channel_name = channel_name
# ################################################################################################################################
def broadcast(self, data:'str', _action:'str'=BROKER_MSG_CHANNEL.WEB_SOCKET_BROADCAST.value) -> 'None':
""" Sends data to all WSX clients connected to this channel.
"""
# type: (str, str)
# If we are invoked, it means that self.channel_name points to an existing object
# so we can just let all servers know that they are to invoke their connected clients.
self.server.broker_client.publish({
'action': _action,
'channel_name': self.channel_name,
'data': data
})
# ################################################################################################################################
class _WSXChannelContainer:
""" A thin wrapper to mediate access to WebSocket channels.
"""
def __init__(self, server:'ParallelServer') -> 'None':
self.server = server
self._lock = RLock()
self._channels = {}
def invoke(self, cid:'str', conn_name:'str', **kwargs:'any_') -> 'any_':
wsx_channel:'Connector' = self.server.worker_store.web_socket_api.connectors[conn_name] # type: ignore
wsx_channel:'ChannelWebSocket' = cast_('ChannelWebSocket', wsx_channel) # type: ignore
response = wsx_channel.invoke_client(cid, **kwargs)
return response
# ################################################################################################################################
def __getitem__(self, channel_name):
# type: (str) -> _WSXChannel
with self._lock:
if channel_name not in self._channels:
if self.server.worker_store.web_socket_api.connectors.get(channel_name):
self._channels[channel_name] = _WSXChannel(self.server, channel_name)
else:
raise KeyError('No such WebSocket channel `{}`'.format(channel_name))
return self._channels[channel_name]
# ################################################################################################################################
def get(self, channel_name:'str') -> '_WSXChannel | None':
try:
return self[channel_name]
except KeyError:
return None # Be explicit in returning None
# ################################################################################################################################
class WSXFacade:
""" An object via which WebSocket channels and outgoing connections may be invoked or send broadcasts to.
"""
__slots__ = 'server', 'channel', 'out'
def __init__(self, server:'ParallelServer') -> 'None':
self.server = server
self.channel = _WSXChannelContainer(self.server)
# ################################################################################################################################
class AMQPFacade:
""" Introduced solely to let service access outgoing connections through self.amqp.invoke/_async
rather than self.out.amqp_invoke/_async. The .send method is kept for pre-3.0 backward-compatibility.
"""
__slots__ = ('send', 'invoke', 'invoke_async')
# ################################################################################################################################
class PatternsFacade:
""" The API through which services make use of integration patterns.
"""
__slots__ = ('invoke_retry', 'fanout', 'parallel')
def __init__(self, invoking_service:'Service', cache:'anydict', lock:'RLock') -> 'None':
self.invoke_retry = InvokeRetry(invoking_service)
self.fanout = FanOut(invoking_service, cache, lock)
self.parallel = ParallelExec(invoking_service, cache, lock)
# ################################################################################################################################
class Service:
""" A base class for all services deployed on Zato servers, no matter the transport and protocol, be it REST, IBM MQ
or any other, regardless whether they arere built-in or user-defined ones.
"""
rest: 'RESTFacade'
schedule: 'SchedulerFacade'
security: 'SecurityFacade'
call_hooks:'bool' = True
_filter_by = None
enforce_service_invokes: 'bool'
invokes = []
http_method_handlers = {}
# Class-wide attributes shared by all services thus created here instead of assigning to self.
cloud = Cloud()
definition = Definition()
im = InstantMessaging()
odb:'ODBManager'
kvdb:'KVDB'
pubsub:'PubSub'
static_config:'Bunch'
email:'EMailAPI | None' = None
search:'SearchAPI | None' = None
patterns: 'PatternsFacade | None' = None
cassandra_conn:'CassandraAPI | None' = None
cassandra_query:'CassandraQueryAPI | None' = None
amqp = AMQPFacade()
commands = CommandsFacade()
# For WebSockets
wsx:'WSXFacade'
_worker_store:'WorkerStore'
_worker_config:'ConfigStore'
_out_ftp:'FTPStore'
_out_plain_http:'ConfigDict'
_has_before_job_hooks:'bool' = False
_has_after_job_hooks:'bool' = False
_before_job_hooks = []
_after_job_hooks = []
has_sio:'bool'
# Cython based SimpleIO definition created by service store when the class is deployed
_sio:'CySimpleIO'
# Rate limiting
_has_rate_limiting:'bool' = False
# User management and SSO
sso:'SSOAPI'
# Crypto operations
crypto:'ServerCryptoManager'
# Audit log
audit_pii:'AuditPII'
# Vendors - Keysight
keysight: 'KeysightContainer'
# By default, services do not use JSON Schema
schema = '' # type: str
# JSON Schema validator attached only if service declares a schema to use
_json_schema_validator:'JSONSchemaValidator | None' = None
server: 'ParallelServer'
broker_client: 'BrokerClient'
time: 'TimeUtil'
# These two are the same
chan: 'ChannelInfo'
channel: 'ChannelInfo'
# When was the service invoked
invocation_time: 'datetime'
# When did our 'handle' method finished processing the request
handle_return_time: 'datetime'
# # A timedelta object with the processing time up to microseconds
processing_time_raw: 'timedelta'
# Processing time in milliseconds
processing_time: 'float'
component_enabled_sms: 'bool'
component_enabled_hl7: 'bool'
component_enabled_odoo: 'bool'
component_enabled_email: 'bool'
component_enabled_search: 'bool'
component_enabled_ibm_mq: 'bool'
component_enabled_zeromq: 'bool'
component_enabled_msg_path: 'bool'
component_enabled_patterns: 'bool'
component_enabled_target_matcher: 'bool'
component_enabled_invoke_matcher: 'bool'
cache: 'CacheAPI'
def __init__(
self,
*ignored_args:'any_',
**ignored_kwargs:'any_'
) -> 'None':
self.name = self.__class__.__service_name # Will be set through .get_name by Service Store
self.impl_name = self.__class__.__service_impl_name # Ditto
self.logger = _get_logger(self.name) # type: Logger
self.cid = ''
self.in_reply_to = ''
self.data_format = ''
self.transport = ''
self.wsgi_environ = {} # type: anydict
self.job_type = '' # type: str
self.environ = Bunch()
self.request = Request(self) # type: Request
self.response = Response(self.logger) # type: ignore
self.has_validate_input = False
self.has_validate_output = False
# This is where user configuration is kept
self.config = Bunch()
# This is kept for backward compatibility with code that uses self.user_config in services.
# Only self.config should be used in new services.
self.user_config = Bunch()
self.usage = 0 # How many times the service has been invoked
self.slow_threshold = maxint # After how many ms to consider the response came too late
self.out = self.outgoing = Outgoing(
self.amqp,
self._out_ftp,
WMQFacade(self) if self.component_enabled_ibm_mq else None,
self._worker_config.out_odoo,
self._out_plain_http,
self._worker_config.out_soap,
self._worker_store.sql_pool_store,
ZMQFacade(self._worker_store.zmq_out_api) if self.component_enabled_zeromq else NO_DEFAULT_VALUE,
self._worker_store.outconn_wsx,
self._worker_store.vault_conn_api,
SMSAPI(self._worker_store.sms_twilio_api) if self.component_enabled_sms else None,
self._worker_config.out_sap,
self._worker_config.out_sftp,
self._worker_store.outconn_ldap,
self._worker_store.outconn_mongodb,
self._worker_store.def_kafka,
self.kvdb
) # type: Outgoing
# REST facade for outgoing connections
self.rest = RESTFacade()
if self.component_enabled_hl7:
hl7_api = HL7API(self._worker_store.outconn_hl7_fhir, self._worker_store.outconn_hl7_mllp)
self.out.hl7 = hl7_api
# ################################################################################################################################
@staticmethod
def get_name_static(class_:'type[Service]') -> 'str':
return Service.get_name(class_) # type: ignore
# ################################################################################################################################
@classmethod
def get_name(class_:'type_[Service]') -> 'str': # type: ignore
""" Returns a service's name, settings its .name attribute along. This will
be called once while the service is being deployed.
"""
if not hasattr(class_, '__service_name'):
name = getattr(class_, 'name', None)
if not name:
impl_name = class_.get_impl_name()
name = service_name_from_impl(impl_name)
name = class_.convert_impl_name(name)
class_.__service_name = name # type: str
return class_.__service_name
# ################################################################################################################################
@classmethod
def get_impl_name(class_:'type_[Service]') -> 'str': # type: ignore
if not hasattr(class_, '__service_impl_name'):
class_.__service_impl_name = '{}.{}'.format(class_.__service_module_name, class_.__name__)
return class_.__service_impl_name
# ################################################################################################################################
@staticmethod
def convert_impl_name(name:'str') -> 'str':
split = uncamelify(name).split('.')
path, class_name = split[:-1], split[-1]
path = [elem.replace('_', '-') for elem in path]
class_name = class_name[1:] if class_name.startswith('-') else class_name
class_name = class_name.replace('.-', '.').replace('_-', '_')
return '{}.{}'.format('.'.join(path), class_name)
# ################################################################################################################################
@classmethod
def zato_set_module_name(class_:'type_[Service]', path:'str') -> 'str': # type: ignore
if not hasattr(class_, '__service_module_name'):
if 'zato' in path and 'internal' in path:
mod_name = class_.__module__
else:
mod_name = get_module_name_by_path(path)
class_.__service_module_name = mod_name
return class_.__service_module_name
# ################################################################################################################################
@classmethod
def add_http_method_handlers(class_:'type_[Service]') -> 'None': # type: ignore
for name in dir(class_):
if name.startswith('handle_'):
if not getattr(class_, 'http_method_handlers', False):
class_.http_method_handlers = {}
method = name.replace('handle_', '')
class_.http_method_handlers[method] = getattr(class_, name)
# ################################################################################################################################
def _init(self, may_have_wsgi_environ:'bool'=False) -> 'None':
""" Actually initializes the service.
"""
self.slow_threshold = self.server.service_store.services[self.impl_name]['slow_threshold']
# The if's below are meant to be written in this way because we don't want any unnecessary attribute lookups
# and method calls in this method - it's invoked each time a service is executed. The attributes are set
# for the whole of the Service class each time it is discovered they are needed. It cannot be done in ServiceStore
# because at the time that ServiceStore executes the worker config may still not be ready.
if self.component_enabled_email:
if not Service.email:
Service.email = EMailAPI(self._worker_store.email_smtp_api, self._worker_store.email_imap_api)
if self.component_enabled_search:
if not Service.search:
Service.search = SearchAPI(self._worker_store.search_es_api, self._worker_store.search_solr_api)
if self.component_enabled_patterns:
self.patterns = PatternsFacade(self, self.server.internal_cache_patterns, self.server.internal_cache_lock_patterns)
if may_have_wsgi_environ:
self.request.http.init(self.wsgi_environ)
# self.has_sio attribute is set by ServiceStore during deployment
if self.has_sio:
self.request.init(True, self.cid, self._sio, self.data_format, self.transport, self.wsgi_environ, self.server.encrypt)
self.response.init(self.cid, self._sio, self.data_format)
# Cache is always enabled
self.cache = self._worker_store.cache_api
# REST facade
self.rest.init(self.cid, self._out_plain_http)
# Vendors - Keysight
self.keysight = KeysightContainer()
self.keysight.init(self.cid, self._out_plain_http)
# ################################################################################################################################
def set_response_data(self, service:'Service', **kwargs:'any_') -> 'any_':
response = service.response.payload
if not isinstance(response, _response_raw_types):
if hasattr(response, 'getvalue'):
response = response.getvalue(serialize=kwargs.get('serialize'))
if kwargs.get('as_bunch'):
response = bunchify(response)
elif hasattr(response, 'to_dict'):
response = response.to_dict()
elif hasattr(response, 'to_json'):
response = response.to_json()
service.response.payload = response
return response
# ################################################################################################################################
def _invoke(self, service:'Service', channel:'str') -> 'None':
#
# If channel is HTTP and there are any per-HTTP verb methods, it means we want for the service to be a REST target.
# Let's say it is POST. If we have handle_POST, it is invoked. If there is no handle_POST,
# '405 Method Not Allowed is returned'.
#
# However, if we have 'handle' only, it means this is always invoked and no default 405 is returned.
#
# In short, implement handle_* if you want REST behaviour. Otherwise, keep everything in handle.
#
# Ok, this is HTTP
if channel in ModuleCtx.HTTP_Channels:
# We have at least one per-HTTP verb handler
if service.http_method_handlers:
# But do we have any handler matching current request's verb?
if service.request.http.method in service.http_method_handlers:
# Yes, call the handler
service.http_method_handlers[service.request.http.method](service)
# No, return 405
else:
service.response.status_code = METHOD_NOT_ALLOWED
# We have no custom handlers so we always call 'handle'
else:
service.handle()
# It's not HTTP so we simply call 'handle'
else:
service.handle()
# ################################################################################################################################
def extract_target(self, name:'str') -> 'tuple[str, str]':
""" Splits a service's name into name and target, if the latter is provided on input at all.
"""
# It can be either a name or a name followed by the target to invoke the service on,
# i.e. 'myservice' or 'myservice@mytarget'.
if '@' in name:
name, target = name.split('@')
if not target:
raise ZatoException(self.cid, 'Target must not be empty in `{}`'.format(name))
else:
target = ''
return name, target
# ################################################################################################################################
def update_handle(self,
set_response_func, # type: callable_
service, # type: Service
raw_request, # type: any_
channel, # type: str
data_format, # type: str
transport, # type: str
server, # type: ParallelServer
broker_client, # type: BrokerClient | None
worker_store, # type: WorkerStore
cid, # type: str
simple_io_config, # type: anydict
*args:'any_',
**kwargs:'any_'
) -> 'any_':
wsgi_environ = kwargs.get('wsgi_environ', {})
payload = wsgi_environ.get('zato.request.payload')
channel_item = wsgi_environ.get('zato.channel_item', {})
zato_response_headers_container = kwargs.get('zato_response_headers_container')
# Here's an edge case. If a SOAP request has a single child in Body and this child is an empty element
# (though possibly with attributes), checking for 'not payload' alone won't suffice - this evaluates
# to False so we'd be parsing the payload again superfluously.
if not isinstance(payload, ObjectifiedElement) and not payload:
payload = payload_from_request(server.json_parser, cid, raw_request, data_format, transport, channel_item)
job_type = kwargs.get('job_type') or ''
channel_params = kwargs.get('channel_params', {})
merge_channel_params = kwargs.get('merge_channel_params', True)
params_priority = kwargs.get('params_priority', PARAMS_PRIORITY.DEFAULT)
service.update(service, channel, server, broker_client, # type: ignore
worker_store, cid, payload, raw_request, transport, simple_io_config, data_format, wsgi_environ,
job_type=job_type, channel_params=channel_params,
merge_channel_params=merge_channel_params, params_priority=params_priority,
in_reply_to=wsgi_environ.get('zato.request_ctx.in_reply_to', None), environ=kwargs.get('environ'),
wmq_ctx=kwargs.get('wmq_ctx'), channel_info=kwargs.get('channel_info'),
channel_item=channel_item, wsx=wsgi_environ.get('zato.wsx'))
# It's possible the call will be completely filtered out. The uncommonly looking not self.accept shortcuts
# if ServiceStore replaces self.accept with None in the most common case of this method's not being
# implemented by user services.
if (not self.accept) or service.accept(): # type: ignore
# Assumes it goes fine by default
e, exc_formatted = None, None
try:
# Check rate limiting first - note the usage of 'service' rather than 'self',
# in case self is a gateway service such as an JSON-RPC one in which case
# we are in fact interested in checking the target service's rate limit,
# not our own.
if service._has_rate_limiting:
self.server.rate_limiting.check_limit(self.cid, ModuleCtx.Channel_Service, service.name,
self.wsgi_environ['zato.http.remote_addr'])
if service.server.component_enabled.stats:
_ = service.server.current_usage.incr(service.name)
service.invocation_time = _utcnow()
# Check if there is a JSON Schema validator attached to the service and if so,
# validate input before proceeding any further.
if service._json_schema_validator and service._json_schema_validator.is_initialized:
if isinstance(raw_request, str):
data = raw_request.decode('utf8') # type: ignore
data = loads(data)
else:
data = raw_request
validation_result = service._json_schema_validator.validate(cid, data)
if not validation_result:
error = validation_result.get_error()
error_msg = error.get_error_message()
error_msg_details = error.get_error_message(True)
raise JSONSchemaValidationException(
cid,
CHANNEL.SERVICE,
service.name,
error.needs_err_details,
error_msg,
error_msg_details
)
# All hooks are optional so we check if they have not been replaced with None by ServiceStore.
# Call before job hooks if any are defined and we are called from the scheduler
if service.call_hooks and service._has_before_job_hooks and self.channel.type == ModuleCtx.Channel_Scheduler:
for elem in service._before_job_hooks:
if elem:
call_hook_with_service(elem, service)
# Called before .handle - catches exceptions
if service.call_hooks and service.before_handle: # type: ignore
call_hook_no_service(service.before_handle)
# Called before .handle - does not catch exceptions
if service.validate_input: # type: ignore
service.validate_input()
# This is the place where the service is invoked
self._invoke(service, channel)
# Called after .handle - does not catch exceptions
if service.validate_output: # type: ignore
service.validate_output()
# Called after .handle - catches exceptions
if service.call_hooks and service.after_handle: # type: ignore
call_hook_no_service(service.after_handle)
# Call after job hooks if any are defined and we are called from the scheduler
if service._has_after_job_hooks and self.channel.type == ModuleCtx.Channel_Scheduler:
for elem in service._after_job_hooks:
if elem:
call_hook_with_service(elem, service)
# Optional, almost never overridden.
if service.finalize_handle: # type: ignore
call_hook_no_service(service.finalize_handle)
except Exception as ex:
e = ex
exc_formatted = format_exc()
finally:
try:
# This obtains the response
response = set_response_func(service, data_format=data_format, transport=transport, **kwargs)
# If this was fan-out/fan-in we need to always notify our callbacks no matter the result
if channel in ModuleCtx.Pattern_Call_Channels:
if channel == CHANNEL.FANOUT_CALL:
fanout = self.patterns.fanout # type: ignore
func = fanout.on_call_finished
exc_data = e
else:
parallel = self.patterns.parallel # type: ignore
func = parallel.on_call_finished
exc_data = exc_formatted
if isinstance(service.response.payload, SimpleIOPayload):
payload = service.response.payload.getvalue()
else:
payload = service.response.payload
spawn_greenlet(func, service, payload, exc_data)
# It is possible that, on behalf of our caller (e.g. pub.zato.service.service-invoker),
# we also need to populate a dictionary of headers that were produced by the service
# that we are invoking.
if zato_response_headers_container is not None:
if service.response.headers:
zato_response_headers_container.update(service.response.headers)
except Exception as resp_e:
if e:
if isinstance(e, Reportable):
raise e
else:
raise Exception(exc_formatted)
raise resp_e
else:
if e:
raise e from None
# We don't accept it but some response needs to be returned anyway.
else:
response = service.response
response.payload = ''
response.status_code = BAD_REQUEST
# If we are told always to skip response elements, this is where we make use of it.
_zato_needs_response_wrapper = getattr(service.__class__, '_zato_needs_response_wrapper', None)
if _zato_needs_response_wrapper is False:
kwargs['skip_response_elem'] = True
if kwargs.get('skip_response_elem') and hasattr(response, 'keys'):
# If if has .keys, it means it is a dict.
response = cast_('dict', response) # type: ignore
keys = list(response)
try:
keys.remove('_meta')
except ValueError:
# This is fine, there was only the actual response element here,
# without the '_meta' pagination
pass
# It is possible that the dictionary is empty
response_elem = keys[0] if keys else None
# This covers responses that have only one top-level element
# and that element's name is 'response' or, e.g. 'zato_amqp_...'
if len(keys) == 1:
if response_elem == 'response' or (isinstance(response_elem, str) and response_elem.startswith('zato')):
return response[response_elem]
# This may be a dict response from a service, in which case we return it as is
elif isinstance(response, dict): # type: ignore
return response
# .. otherwise, this could be a dictionary of elements other than the above
# so we just return the dict as it is.
else:
return response
else:
return response
# ################################################################################################################################
def invoke_by_impl_name(
self,
impl_name, # type: str
payload='', # type: str | anydict
channel=CHANNEL.INVOKE, # type: str
data_format=DATA_FORMAT.DICT, # type: str
transport='', # type: str
serialize=False, # type: bool
as_bunch=False, # type: bool
timeout=0, # type: int
raise_timeout=True, # type: bool
**kwargs:'any_'
) -> 'any_':
""" Invokes a service synchronously by its implementation name (full dotted Python name).
"""
if self.impl_name == impl_name:
msg = 'A service cannot invoke itself, name:[{}]'.format(self.name)
self.logger.error(msg)
raise ZatoException(self.cid, msg)
service, is_active = self.server.service_store.new_instance(impl_name)
if not is_active:
raise Inactive(service.get_name())
# If there is no payload but there are keyword arguments other than what we expect internally,
# we can turn them into a payload ourselves.
if not payload:
kwargs_keys = set(kwargs)
# Substracting keys that are known from the keys that are given on input
# gives us a set of keys that we do not know, i.e. the keys that are extra
# and that can be turned into a payload.
extra_keys = kwargs_keys - internal_invoke_keys
# Now, if the substraction did result in any keys, we can for sure build a dictionary with payload data.
if extra_keys:
payload = {}
for name in extra_keys:
payload[name] = kwargs[name]
set_response_func = kwargs.pop('set_response_func', service.set_response_data)
invoke_args = (set_response_func, service, payload, channel, data_format, transport, self.server,
self.broker_client, self._worker_store, kwargs.pop('cid', self.cid), {})
kwargs.update({'serialize':serialize, 'as_bunch':as_bunch})
if timeout:
g = None
try:
g = _gevent_spawn(self.update_handle, *invoke_args, **kwargs)
return g.get(block=True, timeout=timeout)
except Timeout:
if g:
g.kill()
logger.warning('Service `%s` timed out (%s)', service.name, self.cid)
if raise_timeout:
raise
else:
return self.update_handle(*invoke_args, **kwargs)
# ################################################################################################################################
def invoke(self, zato_name:'any_', *args:'any_', **kwargs:'any_') -> 'any_':
""" Invokes a service synchronously by its name.
"""
# The 'zato_name' parameter is actually a service class,
# not its name, and we need to extract the name ourselves.
if isclass(zato_name) and issubclass(zato_name, Service): # type: Service
zato_name = zato_name.get_name()
return self.invoke_by_impl_name(self.server.service_store.name_to_impl_name[zato_name], *args, **kwargs)
# ################################################################################################################################
def invoke_by_id(self, service_id:'int', *args:'any_', **kwargs:'any_') -> 'any_':
""" Invokes a service synchronously by its ID.
"""
if self.component_enabled_target_matcher:
service_id, target = self.extract_target(service_id) # type: ignore
kwargs['target'] = target
return self.invoke_by_impl_name(self.server.service_store.id_to_impl_name[service_id], *args, **kwargs)
# ################################################################################################################################
def invoke_async(
self,
name, # type: str
payload='', # type: str
channel=CHANNEL.INVOKE_ASYNC, # type: str
data_format=DATA_FORMAT.DICT, # type: str
transport='', # type: str
expiration=BROKER.DEFAULT_EXPIRATION, # type: int
to_json_string=False, # type: bool
cid='', # type: str
callback=None, # type: str | Service | None
zato_ctx=None, # type: strdict | None
environ=None # type: strdict | None
) -> 'str':
""" Invokes a service asynchronously by its name.
"""
zato_ctx = zato_ctx if zato_ctx is not None else {}
environ = environ if environ is not None else {}
if self.component_enabled_target_matcher:
name, target = self.extract_target(name)
zato_ctx['zato.request_ctx.target'] = target
else:
target = None
# Let's first find out if the service can be invoked at all
impl_name = self.server.service_store.name_to_impl_name[name]
if self.component_enabled_invoke_matcher:
if not self._worker_store.invoke_matcher.is_allowed(impl_name):
raise ZatoException(self.cid, 'Service `{}` (impl_name) cannot be invoked'.format(impl_name))
if to_json_string:
payload = dumps(payload)
cid = cid or new_cid()
# If there is any callback at all, we need to figure out its name because that's how it will be invoked by.
if callback:
# The same service
if callback is self:
callback = self.name
else:
sink = '{}-async-callback'.format(self.name)
if sink in self.server.service_store.name_to_impl_name:
callback = sink
else:
# Otherwise the callback must be a string pointing to the actual service to reply to
# so we do not need to do anything.
pass
async_ctx = AsyncCtx()
async_ctx.calling_service = self.name
async_ctx.service_name = name
async_ctx.cid = cid
async_ctx.data = payload
async_ctx.data_format = data_format
async_ctx.zato_ctx = zato_ctx
async_ctx.environ = environ
if callback:
async_ctx.callback = list(callback) if isinstance(callback, (list, tuple)) else [callback]
spawn_greenlet(self._invoke_async, async_ctx, channel)
return cid
# ################################################################################################################################
def _invoke_async(
self,
ctx, # type: AsyncCtx
channel, # type: str
_async_callback=_async_callback, # type: Service | str
) -> 'None':
# Invoke our target service ..
response = self.invoke(ctx.service_name, ctx.data, data_format=ctx.data_format, channel=channel, skip_response_elem=True)
# .. and report back the response to our callback(s), if there are any.
if ctx.callback:
for callback_service in ctx.callback: # type: str
_ = self.invoke(callback_service, payload=response, channel=_async_callback, cid=new_cid,
data_format=ctx.data_format, in_reply_to=ctx.cid, environ=ctx.environ,
skip_response_elem=True)
# ################################################################################################################################
def translate(self, *args:'any_', **kwargs:'any_') -> 'str':
raise NotImplementedError('An initializer should override this method')
# ################################################################################################################################
def handle(self) -> 'None':
""" The only method Zato services need to implement in order to process
incoming requests.
"""
raise NotImplementedError('Should be overridden by subclasses (Service.handle)')
# ################################################################################################################################
def lock(self, name:'str'='', *args:'any_', **kwargs:'any_') -> 'Lock':
""" Creates a distributed lock.
name - defaults to self.name effectively making access to this service serialized
ttl - defaults to 20 seconds and is the max time the lock will be held
block - how long (in seconds) we will wait to acquire the lock before giving up
"""
# The relevant part of signature in 2.0 was `expires=20, timeout=10`
# and the 3.0 -> 2.0 mapping is: ttl->expires, block=timeout
if not args:
ttl = kwargs.get('ttl') or kwargs.get('expires') or 20
block = kwargs.get('block') or kwargs.get('timeout') or 10
else:
if len(args) == 1:
ttl = args[0]
block = 10
else:
ttl = args[0]
block = args[1]
return self.server.zato_lock_manager(name or self.name, ttl=ttl, block=block)
# ################################################################################################################################
def sleep(self, timeout:'int'=1) -> 'None':
_gevent_sleep(timeout)
# ################################################################################################################################
def accept(self, _zato_no_op_marker:'any_'=zato_no_op_marker) -> 'bool':
return True
# ################################################################################################################################
def run_in_thread(self, *args:'any_', **kwargs:'any_') -> 'any_':
return _gevent_spawn(*args, **kwargs)
spawn = run_in_thread
# ################################################################################################################################
@classmethod
def before_add_to_store(cls, logger:'Logger') -> 'bool':
""" Invoked right before the class is added to the service store.
"""
return True
def before_job(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked if the service has been defined as a job's invocation target,
regardless of the job's type.
"""
def before_one_time_job(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked if the service has been defined as a one-time job's
invocation target.
"""
def before_interval_based_job(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked if the service has been defined as an interval-based job's
invocation target.
"""
def before_cron_style_job(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked if the service has been defined as a cron-style job's
invocation target.
"""
def before_handle(self, _zato_no_op_marker=zato_no_op_marker, *args, **kwargs): # type: ignore
""" Invoked just before the actual service receives the request data.
"""
def after_job(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked if the service has been defined as a job's invocation target,
regardless of the job's type.
"""
def after_one_time_job(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked if the service has been defined as a one-time job's
invocation target.
"""
def after_interval_based_job(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked if the service has been defined as an interval-based job's
invocation target.
"""
def after_cron_style_job(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked if the service has been defined as a cron-style job's
invocation target.
"""
def after_handle(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked right after the actual service has been invoked, regardless
of whether the service raised an exception or not.
"""
def finalize_handle(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Offers the last chance to influence the service's operations.
"""
@staticmethod
def after_add_to_store(logger): # type: ignore
""" Invoked right after the class has been added to the service store.
"""
def validate_input(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked right before handle. Any exception raised means handle will not be called.
"""
def validate_output(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked right after handle. Any exception raised means further hooks will not be called.
"""
def get_request_hash(self, _zato_no_op_marker=zato_no_op_marker, *args, **kwargs): # type: ignore
""" Lets services compute an incoming request's hash to decide whether i is already kept in cache,
if one is configured for this request's channel.
"""
# ################################################################################################################################
def _log_input_output(self, user_msg:'str', level:'int', suppress_keys:'strlist', is_response:'bool') -> 'strdict':
suppress_keys = suppress_keys or []
suppressed_msg = '(suppressed)'
container = 'response' if is_response else 'request'
payload_key = '{}.payload'.format(container)
user_msg = '{} '.format(user_msg) if user_msg else user_msg
msg = {}
if payload_key not in suppress_keys:
msg[payload_key] = getattr(self, container).payload
else:
msg[payload_key] = suppressed_msg
attrs = ('channel', 'cid', 'data_format', 'environ', 'impl_name',
'invocation_time', 'job_type', 'name', 'slow_threshold', 'usage', 'wsgi_environ')
if is_response:
attrs += ('handle_return_time', 'processing_time', 'processing_time_raw',
'zato.http.response.headers')
for attr in attrs:
if attr not in suppress_keys:
msg[attr] = self.channel.type if attr == 'channel' else getattr(self, attr, '(None)')
else:
msg[attr] = suppressed_msg
self.logger.log(level, '{}{}'.format(user_msg, msg))
return msg
def log_input(self, user_msg:'str'='', level:'int'=logging.INFO, suppress_keys:'any_'=None) -> 'strdict':
return self._log_input_output(user_msg, level, suppress_keys, False)
def log_output(self, user_msg:'str'='', level:'int'=logging.INFO, suppress_keys:'any_'=('wsgi_environ',)) -> 'strdict':
return self._log_input_output(user_msg, level, suppress_keys, True)
# ################################################################################################################################
@staticmethod
def update(
service, # type: Service
channel_type, # type: str
server, # type: ParallelServer
broker_client, # type: BrokerClient
_ignored, # type: any_
cid, # type: str
payload, # type: any_
raw_request, # type: any_
transport='', # type: str
simple_io_config=None, # type: anydictnone
data_format='', # type: str
wsgi_environ=None, # type: dictnone
job_type='', # type: str
channel_params=None, # type: dictnone
merge_channel_params=True, # type: bool
params_priority='', # type: str
in_reply_to='', # type: str
environ=None, # type: dictnone
init=True, # type: bool
wmq_ctx=None, # type: dictnone
channel_info=None, # type: ChannelInfo | None
channel_item=None, # type: dictnone
wsx=None, # type: WebSocket | None
_AMQP=CHANNEL.AMQP, # type: str
_IBM_MQ=CHANNEL.IBM_MQ, # type: str
_HL7v2=HL7.Const.Version.v2.id # type: str
) -> 'None':
""" Takes a service instance and updates it with the current request's context data.
"""
wsgi_environ = wsgi_environ or {}
service.server = server
service.broker_client = broker_client
service.cid = cid
service.request.payload = payload
service.request.raw_request = raw_request
service.transport = transport
service.data_format = data_format
service.wsgi_environ = wsgi_environ or {}
service.job_type = job_type
service.translate = server.kvdb.translate # type: ignore
service.config = server.user_config
service.user_config = server.user_config
service.static_config = server.static_config
service.time = server.time_util
service.security = SecurityFacade(service.server)
if channel_params:
service.request.channel_params.update(channel_params)
service.request.merge_channel_params = merge_channel_params
service.in_reply_to = in_reply_to
service.environ = environ or {}
channel_item = wsgi_environ.get('zato.channel_item') or {}
channel_item = cast_('strdict', channel_item)
sec_def_info = wsgi_environ.get('zato.sec_def', {})
if channel_type == _AMQP:
service.request.amqp = AMQPRequestData(channel_item['amqp_msg'])
elif channel_type == _IBM_MQ:
service.request.wmq = service.request.ibm_mq = IBMMQRequestData(wmq_ctx)
elif data_format == _HL7v2:
service.request.hl7 = HL7RequestData(channel_item['hl7_mllp_conn_ctx'], payload)
chan_sec_info = ChannelSecurityInfo(
sec_def_info.get('id'),
sec_def_info.get('name'),
sec_def_info.get('type'),
sec_def_info.get('username'),
sec_def_info.get('impl')
)
service.channel = service.chan = channel_info or ChannelInfo(
channel_item.get('id'),
channel_item.get('name'),
channel_type,
channel_item.get('data_format'),
channel_item.get('is_internal'),
channel_item.get('match_target'),
chan_sec_info, channel_item
)
if init:
service._init(channel_type in _wsgi_channels)
# ################################################################################################################################
def new_instance(self, service_name:'str', *args:'any_', **kwargs:'any_') -> 'Service':
""" Creates a new service instance without invoking its handle method.
"""
service: 'Service'
service, _ = \
self.server.service_store.new_instance_by_name(service_name, *args, **kwargs)
service.update(service, CHANNEL.NEW_INSTANCE, self.server, broker_client=self.broker_client, _ignored=None,
cid=self.cid, payload=self.request.payload, raw_request=self.request.raw_request, wsgi_environ=self.wsgi_environ)
return service
# ################################################################################################################################
class _Hook(Service):
""" Base class for all hook services.
"""
_hook_func_name: 'strdict'
class SimpleIO:
input_required = (Opaque('ctx'),)
output_optional = ('hook_action',)
def handle(self):
func_name = self._hook_func_name[self.request.input.ctx.hook_type]
func = getattr(self, func_name)
func()
# ################################################################################################################################
class PubSubHook(_Hook):
""" Subclasses of this class may act as pub/sub hooks.
"""
_hook_func_name = {}
def before_publish(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked for each pub/sub message before it is published to a topic.
"""
def before_delivery(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked for each pub/sub message right before it is delivered to an endpoint.
"""
def on_outgoing_soap_invoke(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked for each message that is to be sent through outgoing a SOAP Suds connection.
"""
def on_subscribed(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked for each new topic subscription.
"""
def on_unsubscribed(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked each time a client unsubscribes.
"""
PubSubHook._hook_func_name[PUBSUB.HOOK_TYPE.BEFORE_PUBLISH] = 'before_publish' # type: ignore
PubSubHook._hook_func_name[PUBSUB.HOOK_TYPE.BEFORE_DELIVERY] = 'before_delivery' # type: ignore
PubSubHook._hook_func_name[PUBSUB.HOOK_TYPE.ON_OUTGOING_SOAP_INVOKE] = 'on_outgoing_soap_invoke' # type: ignore
PubSubHook._hook_func_name[PUBSUB.HOOK_TYPE.ON_SUBSCRIBED] = 'on_subscribed' # type: ignore
PubSubHook._hook_func_name[PUBSUB.HOOK_TYPE.ON_UNSUBSCRIBED] = 'on_unsubscribed' # type: ignore
# ################################################################################################################################
# ################################################################################################################################
class WSXHook(_Hook):
""" Subclasses of this class may act as WebSockets hooks.
"""
_hook_func_name = {}
def on_connected(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked each time a new WSX connection is established.
"""
def on_disconnected(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked each time an existing WSX connection is dropped.
"""
def on_pubsub_response(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked each time a response to a previous pub/sub message arrives.
"""
def on_vault_mount_point_needed(self, _zato_no_op_marker=zato_no_op_marker): # type: ignore
""" Invoked each time there is need to discover the name of a Vault mount point
that a particular WSX channel is secured ultimately with, i.e. the mount point
where the incoming user's credentials are stored in.
"""
WSXHook._hook_func_name[WEB_SOCKET.HOOK_TYPE.ON_CONNECTED] = 'on_connected' # type: ignore
WSXHook._hook_func_name[WEB_SOCKET.HOOK_TYPE.ON_DISCONNECTED] = 'on_disconnected' # type: ignore
WSXHook._hook_func_name[WEB_SOCKET.HOOK_TYPE.ON_PUBSUB_RESPONSE] = 'on_pubsub_response' # type: ignore
WSXHook._hook_func_name[WEB_SOCKET.HOOK_TYPE.ON_VAULT_MOUNT_POINT_NEEDED] = 'on_vault_mount_point_needed' # type: ignore
# ################################################################################################################################
# ################################################################################################################################
class WSXAdapter(Service):
""" Subclasses of this class can be used in events related to outgoing WebSocket connections.
"""
on_connected:'callable_'
on_message_received:'callable_'
on_closed:'callable_'
# ################################################################################################################################
# ################################################################################################################################
class RESTAdapter(Service):
# These may be overridden by individual subclasses
model = None
conn_name = ''
auth_scopes = ''
sec_def_name = None
log_response = False
map_response = None
get_conn_name = None
get_auth = None
get_auth_scopes = None
get_path_params = None
get_method = None
get_request = None
get_headers = None
get_query_string = None
get_auth_bearer = None
get_sec_def_name = None
has_query_string_id = False
query_string_id_param = None
has_json_id = False
json_id_param = None
# Default to GET calls
method = 'GET'
# ################################################################################################################################
def rest_call(
self,
conn_name, # type: str
*,
data='', # type: str
model=None, # type: modelnone
callback=None, # type: callnone
params=None, # type: strdictnone
headers=None, # type: strdictnone
method='', # type: str
sec_def_name=None, # type: any_
auth_scopes=None, # type: any_
log_response=True, # type: bool
):
# Get the actual REST connection ..
conn:'RESTWrapper' = self.out.rest[conn_name].conn
# .. invoke the system and map its response back through the callback callable ..
out:'any_' = conn.rest_call(
cid=self.cid,
data=data,
model=model, # type: ignore
callback=callback,
params=params,
headers=headers,
method=method,
sec_def_name=sec_def_name,
auth_scopes=auth_scopes,
log_response=log_response,
)
# .. and return the result to our caller.
return out
# ################################################################################################################################
def handle(self):
# Local aliases
params:'strdict' = {}
request:'any_' = ''
headers:'strstrdict' = {}
# The outgoing connection to use may be static or dynamically generated
if self.get_conn_name:
conn_name = self.get_conn_name
else:
conn_name = self.conn_name
# The request to use may be dynamically generated
if self.get_request:
request = self.get_request() # type: ignore
#
# Build our query parameters, which can be partly implicit if this is an ID-only service
# or explicitly if we have a method to do so.
#
if self.has_query_string_id:
if self.query_string_id_param:
query_string_id_param = self.query_string_id_param
else:
query_string_id_param = 'id'
params[query_string_id_param] = self.request.input[query_string_id_param]
# Update the query string with information obtained earlier
if self.get_query_string:
_params:'strdict' = self.get_query_string(params)
params.update(_params)
# Obtain any possible path parameters
if self.get_path_params:
_params:'strdict' = self.get_path_params(params)
params.update(_params)
# The REST method may be dynamically generated
if self.get_method:
method:'str' = self.get_method()
else:
method = self.method
# Uppercase the method per what HTTP expects
method = method.upper()
# Authentication bearer token may be dynamically generated
if self.get_auth_bearer:
token:'str' = self.get_auth_bearer()
headers['Authorization'] = f'Bearer {token}'
# Security definition can be dynamically generated ..
if self.get_sec_def_name:
sec_def_name = self.get_sec_def_name()
# .. it may also have been given explicitly ..
elif self.sec_def_name:
sec_def_name = self.sec_def_name
# .. otherwise, we will indicate explicitly that it was not given on input in any way.
else:
sec_def_name = NotGiven
# Auth scopes can be dynamically generated ..
if self.get_auth_scopes:
auth_scopes = self.get_auth_scopes()
# .. it may also have been given explicitly ..
elif self.auth_scopes:
auth_scopes = self.auth_scopes
# .. otherwise, we will indicate explicitly that they were not given on input in any way.
else:
auth_scopes = ''
# Headers may be dynamically generated
if self.get_headers:
_headers:'strstrdict' = self.get_headers()
headers.update(_headers)
# Obtain the result ..
out = self.rest_call(
conn_name,
data=request,
model=self.model,
callback=self.map_response,
params=params,
headers=headers,
method=method,
sec_def_name=sec_def_name,
auth_scopes=auth_scopes,
log_response=self.log_response,
)
# .. and return it to our caller.
self.response.payload = out
# ################################################################################################################################
# ################################################################################################################################
| 68,383
|
Python
|
.py
| 1,317
| 42.679575
| 130
| 0.547781
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,243
|
adapter.py
|
zatosource_zato/code/zato-server/src/zato/server/service/adapter.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from copy import deepcopy
from uuid import uuid4
# Zato
from zato.common.api import ADAPTER_PARAMS, HTTPException
from zato.common.json_internal import dumps, loads
from zato.server.service import Service
# ################################################################################################################################
class JSONAdapter(Service):
""" Invokes HTTP resources with JSON or query strings. In echo mode, returns request into response. In dry-run mode,
returns in response the request that would have been sent to an external resource.
"""
outconn = 'not-configured-{}'.format(uuid4().hex)
method = 'not-configured-{}'.format(uuid4().hex)
params_to_qs = False
load_response = True
params = {}
force_in_qs = []
apply_params = ADAPTER_PARAMS.APPLY_AFTER_REQUEST
raise_error_on = ['4', '5'] # Any HTTP code starting with these prefixes will mean an exception
def get_call_params(self):
call_params = {'params':{}}
dyn_params = {}
for name in self.force_in_qs:
call_params['params'][name] = self.environ['zato.request_payload'].pop(name, '')
if self.apply_params == ADAPTER_PARAMS.APPLY_AFTER_REQUEST:
dyn_params.update(self.environ['zato.request_payload'])
dyn_params.update(self.params)
else:
dyn_params.update(self.params)
dyn_params.update(self.environ['zato.request_payload'])
if self.params_to_qs:
call_params['params'].update(dyn_params)
else:
call_params['data'] = dumps(dyn_params)
return call_params
def handle(self):
self.logger.debug(
'`%s` invoked with `%r` and `%r`, `%r`, `%r`, `%r`, `%r`, `%r`', self.name, self.request.payload,
self.outconn, self.method, self.params_to_qs, self.load_response, self.params, self.apply_params)
# Only return what was received
if self.request.payload:
if isinstance(self.request.payload, dict):
if self.request.payload.get('echo', False):
self.response.payload = self.request.payload
return
# It is possible that we are being invoked from self.patterns.invoke_retry and at the same
# time our self.force_in_qs is not empty. In such a case we want to operate on a deep copy
# of our request. The reason is that if self.force_in_qs is not empty then self.get_call_params
# will modify the request in place. If the call to the external resource fails and invoke_retry
# attempts to invoke us again, the already modified request will not have parameters removed
# during the initial self.get_call_params call and this will likely result in an exception.
# But since this applies to cases with non-empty self.force_in_qs, we do not create
# such a deep copy each time so as not to introduce additional overhead.
if self.force_in_qs:
self.environ['zato.request_payload'] = deepcopy(self.request.payload)
else:
self.environ['zato.request_payload'] = self.request.payload
# Parameters to invoke the remote resource with
call_params = self.get_call_params()
# Build a request but don't actually call it
if call_params.pop('dry-run', False):
self.response.payload = call_params
return
conn = self.outgoing.plain_http[self.outconn].conn
func = getattr(conn, self.method.lower())
response = func(self.cid, **call_params)
for item in self.raise_error_on:
if str(response.status_code).startswith(item):
raise HTTPException(self.cid, response.text, response.status_code)
if self.load_response:
try:
self.response.payload = loads(response.text)
except ValueError:
self.logger.error('Cannot load JSON response `%s` for request `%s` to `%s`',
response.text, call_params, self.outconn)
raise
else:
self.response.payload = response.text
# ################################################################################################################################
| 4,542
|
Python
|
.py
| 86
| 43.930233
| 130
| 0.612088
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,244
|
store.py
|
zatosource_zato/code/zato-server/src/zato/server/service/store.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import importlib
import inspect
import logging
import os
import sys
from dataclasses import dataclass
from datetime import datetime
from functools import total_ordering
from hashlib import sha256
from importlib import import_module
from inspect import getargspec, getmodule, getmro, getsourcefile, isclass
from pickle import HIGHEST_PROTOCOL as highest_pickle_protocol
from random import randint
from shutil import copy as shutil_copy
from traceback import format_exc
from typing import Any, List
# dill
from dill import load as dill_load
# gevent
from gevent import sleep as gevent_sleep
from gevent.lock import RLock
# humanize
from humanize import naturalsize
# PyYAML
try:
from yaml import CDumper # For pyflakes
Dumper = CDumper
except ImportError:
from yaml import Dumper # (Ditto)
Dumper = Dumper
# Zato
from zato.common.api import CHANNEL, DONT_DEPLOY_ATTR_NAME, RATE_LIMIT, SourceCodeInfo, TRACE1
from zato.common.facade import SecurityFacade
from zato.common.json_internal import dumps
from zato.common.json_schema import get_service_config, ValidationConfig as JSONSchemaValidationConfig, \
Validator as JSONSchemaValidator
from zato.common.match import Matcher
from zato.common.marshal_.api import Model as DataClassModel
from zato.common.marshal_.simpleio import DataClassSimpleIO
from zato.common.odb.model.base import Base as ModelBase
from zato.common.typing_ import cast_, list_
from zato.common.util.api import deployment_info, import_module_from_path, is_func_overridden, is_python_file, visit_py_source
from zato.common.util.platform_ import is_non_windows
from zato.common.util.python_ import get_module_name_by_path
from zato.server.config import ConfigDict
from zato.server.service import after_handle_hooks, after_job_hooks, before_handle_hooks, before_job_hooks, \
PubSubHook, SchedulerFacade, Service, WSXAdapter, WSXFacade
from zato.server.service.internal import AdminService
# Zato - Cython
from zato.simpleio import CySimpleIO
# ################################################################################################################################
# ################################################################################################################################
if 0:
from inspect import ArgSpec
from sqlalchemy.orm.session import Session as SASession
from zato.common.hot_deploy_ import HotDeployProject
from zato.common.odb.api import ODBManager
from zato.common.typing_ import any_, anydict, anylist, callable_, dictnone, intstrdict, module_, stranydict, \
strdictdict, strint, strintdict, strlist, stroriter, tuple_
from zato.server.base.parallel import ParallelServer
from zato.server.base.worker import WorkerStore
from zato.server.config import ConfigStore
ConfigStore = ConfigStore
HotDeployProject = HotDeployProject
ODBManager = ODBManager
ParallelServer = ParallelServer
WorkerStore = WorkerStore
# ################################################################################################################################
# ################################################################################################################################
# For pyflakes
Any = Any
List = List
# ################################################################################################################################
# ################################################################################################################################
logger = logging.getLogger(__name__)
has_debug = logger.isEnabledFor(logging.DEBUG)
has_trace1 = logger.isEnabledFor(TRACE1)
_utcnow=datetime.utcnow
# ################################################################################################################################
# ################################################################################################################################
# For backward compatibility we ignore certain modules
internal_to_ignore = []
# STOMP was removed in 3.2
internal_to_ignore.append('stomp')
# ################################################################################################################################
# ################################################################################################################################
_unsupported_pickle_protocol_msg = 'unsupported pickle protocol:'
data_class_model_class_name = 'zato.server.service.Model'
# ################################################################################################################################
# ################################################################################################################################
hook_methods = ('accept', 'get_request_hash') + before_handle_hooks + after_handle_hooks + before_job_hooks + after_job_hooks
# ################################################################################################################################
# ################################################################################################################################
class ModuleCtx:
Rate_Limit_Exact = RATE_LIMIT.TYPE.EXACT.id,
Rate_Limit_Service = RATE_LIMIT.OBJECT_TYPE.SERVICE
# ################################################################################################################################
# ################################################################################################################################
class _TestingWorkerStore:
sql_pool_store = None
outconn_wsx = None
vault_conn_api = None
outconn_ldap = None
outconn_mongodb = None
def_kafka = None
zmq_out_api = None
sms_twilio_api = None
cassandra_api = None
cassandra_query_api = None
email_smtp_api = None
email_imap_api = None
search_es_api = None
search_solr_api = None
cache_api = None
def __init__(self):
self.worker_config = cast_('ConfigStore', None)
# ################################################################################################################################
class _TestingWorkerConfig:
out_odoo = None
out_soap = None
out_sap = None
out_sftp = None
# ################################################################################################################################
@total_ordering
class InRAMService:
cluster_id: 'int' = 0
id: 'int' = 0
impl_name: 'str' = ''
name: 'str' = ''
deployment_info: 'str' = ''
service_class: 'type[Service]'
is_active: 'bool' = True
is_internal: 'bool' = False
slow_threshold: 'int' = 99999
source_code_info: 'SourceCodeInfo'
def __repr__(self) -> 'str':
return '<{} at {} name:{} impl_name:{}>'.format(self.__class__.__name__, hex(id(self)), self.name, self.impl_name)
def __eq__(self, other:'InRAMService') -> 'bool':
return self.name == other.name
def __lt__(self, other:'InRAMService') -> 'bool':
return self.name < other.name
def __hash__(self) -> 'int':
return hash(self.name)
def to_dict(self) -> 'stranydict':
return {
'name': self.name,
'impl_name': self.impl_name,
'is_active': self.is_active,
'is_internal': self.is_internal,
'cluster_id': self.cluster_id
}
inramlist = list_[InRAMService]
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class ModelInfo:
name: 'str'
path: 'str'
mod_name: 'str'
source: 'str'
modelinfolist = list_[ModelInfo]
# ################################################################################################################################
# ################################################################################################################################
class DeploymentInfo:
__slots__ = 'to_process', 'total_services', 'total_size', 'total_size_human'
def __init__(self):
self.to_process = [] # type: anylist
self.total_size = 0 # type: int
self.total_size_human = '' # type: str
# ################################################################################################################################
def get_service_name(class_obj:'type[Service]') -> 'str':
""" Return the name of a service which will be either given us explicitly
via the 'name' attribute or it will be a concatenation of the name of the
class and its module's name.
"""
return getattr(class_obj, 'name', '%s.%s' % (class_obj.__module__, class_obj.__name__))
# ################################################################################################################################
def get_batch_indexes(services:'inramlist', max_batch_size:'int') -> 'anylist':
# If there is only one service to deploy, we can already return the result
if len(services) == 1:
return [[0, 1]]
out = []
start_idx = 0
current_batch_size = 0
batch_size_reached = False
# We expect for indexes to end at this one
max_index_possible = len(services)
# This is needed because current_idx below is not available outside the loop
max_index_reached = 0
# We have more than one service, so we need to iterate through them all
for current_idx, item in enumerate(services, 1):
current_batch_size += item.source_code_info.len_source
if current_batch_size >= max_batch_size:
batch_size_reached = True
if batch_size_reached:
out.append([start_idx, current_idx])
start_idx = current_idx
max_index_reached = current_idx
current_batch_size = 0
batch_size_reached = False
# It is possible that the for loop above completed before we reached the list's theoretical max index,
# this is possible if batch_size_reached is not reached in the last iteration, i.e. there was not enough
# of len_source to fill out the whole batch. At this point, the batch must be smaller that the maximum
# size allowed so we can just group together anything that is left after the loop.
if max_index_reached < max_index_possible:
out.append([max_index_reached, max_index_possible])
return out
# ################################################################################################################################
class ServiceStore:
""" A store of Zato services.
"""
services: 'stranydict'
odb: 'ODBManager'
server: 'ParallelServer'
is_testing:'bool'
def __init__(
self,
*,
services, # type: strdictdict
odb, # type: ODBManager
server, # type: ParallelServer
is_testing, # type: bool
) -> 'None':
self.services = services
self.odb = odb
self.server = server
self.is_testing = is_testing
self.max_batch_size = 0
self.models = {} # type: stranydict
self.id_to_impl_name = {} # type: intstrdict
self.impl_name_to_id = {} # type: strintdict
self.name_to_impl_name = {} # type: stranydict
self.deployment_info = {} # type: stranydict
self.update_lock = RLock()
self.patterns_matcher = Matcher()
self.needs_post_deploy_attr = 'needs_post_deploy'
self.has_internal_cache = is_non_windows
if self.has_internal_cache:
self.action_internal_doing = 'Deploying and caching'
self.action_internal_done = 'Deployed and cached'
else:
self.action_internal_doing = 'Deploying'
self.action_internal_done = 'Deployed'
if self.is_testing:
self._testing_worker_store = cast_('WorkerStore', _TestingWorkerStore())
self._testing_worker_store.worker_config = cast_('ConfigStore', _TestingWorkerConfig())
# ################################################################################################################################
def is_service_wsx_adapter(self, service_name:'str') -> 'bool':
try:
impl_name = self.name_to_impl_name[service_name]
service_info = self.services[impl_name]
service_class = service_info['service_class']
return issubclass(service_class, WSXAdapter)
except Exception as e:
logger.warn('Exception in ServiceStore.is_service_wsx_adapter -> %s', e.args)
return False
# ################################################################################################################################
def edit_service_data(self, config:'stranydict') -> 'None':
# Udpate the ConfigDict object
config_dict = self.server.config.service[config['name']] # type: ConfigDict
config_dict['config'].update(config)
# Recreate the rate limiting configuration
self.set_up_rate_limiting(config['name'])
# ################################################################################################################################
def _delete_service_from_odb(self, service_id:'int') -> 'None':
_ = self.server.invoke('zato.service.delete', {
'id':service_id
})
# ################################################################################################################################
def _delete_service_data(self, name:'str', delete_from_odb:'bool'=False) -> 'None':
try:
impl_name = self.name_to_impl_name[name] # type: str
service_id = self.impl_name_to_id[impl_name] # type: int
del self.id_to_impl_name[service_id]
del self.impl_name_to_id[impl_name]
del self.name_to_impl_name[name]
del self.services[impl_name]
if delete_from_odb:
self._delete_service_from_odb(service_id)
except KeyError:
# This is as expected and may happen if a service
# was already deleted, e.g. it was in the same module
# that another deleted service was in.
pass
# ################################################################################################################################
def delete_service_data(self, name:'str') -> 'None':
with self.update_lock:
self._delete_service_data(name)
# ################################################################################################################################
def _delete_model_data(self, name:'str') -> 'None':
try:
del self.models[name]
except KeyError:
# Same comment as in self._delete_service_data
pass
# ################################################################################################################################
def delete_model_data(self, name:'str') -> 'None':
with self.update_lock:
self._delete_service_data(name)
# ################################################################################################################################
def _collect_objects_by_file_path(self, file_path:'str', container:'stranydict', *, is_dict:'bool') -> 'strlist':
# Our response to produce
out = []
# Go through all the objects in the container ..
for value in container.values():
# .. look up the path using keys or attributes, depending on whether it's a dict value or not ..
if is_dict:
object_path = value['path']
object_name = value['name']
else:
object_path = value.path
object_name = value.name
# .. do we have a match here? ..
if file_path == object_path:
# .. if yes, we're going to return that path to our caller ..
out.append(object_name)
# .. finally, we are ready to return our output.
return out
# ################################################################################################################################
def delete_objects_by_file_path(self, file_path:'str', *, delete_from_odb:'bool') -> 'None':
with self.update_lock:
# Collect all services to delete
services_to_delete = self._collect_objects_by_file_path(file_path, self.services, is_dict=True)
# Collect all models to delete
models_to_delete = self._collect_objects_by_file_path(file_path, self.models, is_dict=False)
# Delete all the services
for item in services_to_delete:
self._delete_service_data(item, delete_from_odb)
# Delete all the models
for item in models_to_delete:
self._delete_model_data(item)
# ################################################################################################################################
def post_deploy(self, class_:'type[Service]') -> 'None':
self.set_up_class_json_schema(class_)
# ################################################################################################################################
def set_up_class_json_schema(self, class_:'type[Service]', service_config:'dictnone'=None) -> 'None':
class_name = class_.get_name()
# We are required to configure JSON Schema for this service
# but first we need to check if the service is already deployed.
# If it is not, we need to set a flag indicating that our caller
# should do it later, once the service has been actually deployed.
service_info = self.server.config.service.get(class_name)
if not service_info:
setattr(class_, self.needs_post_deploy_attr, True)
return
_service_config = service_config or service_info['config'] # type: anydict
json_schema_config = get_service_config(_service_config, self.server)
# Make sure the schema points to an absolute path and that it exists
if not os.path.isabs(class_.schema):
schema_path = os.path.join(self.server.json_schema_dir, class_.schema)
else:
schema_path = class_.schema
if not os.path.exists(schema_path):
logger.warning('Could not find JSON Schema for `%s` in `%s` (class_.schema=%s)',
class_name, schema_path, class_.schema)
return
config = JSONSchemaValidationConfig()
config.is_enabled = json_schema_config['is_json_schema_enabled']
config.object_name = class_name
config.object_type = CHANNEL.SERVICE
config.schema_path = schema_path
config.needs_err_details = json_schema_config['needs_json_schema_err_details']
validator = JSONSchemaValidator()
validator.config = config
validator.init()
class_._json_schema_validator = validator
# ################################################################################################################################
def set_up_rate_limiting(
self,
name, # type: str
class_=None # type: type[Service] | None
) -> 'None':
if not class_:
service_id = self.get_service_id_by_name(name) # type: int
info = self.get_service_info_by_id(service_id) # type: anydict
_class = info['service_class'] # type: type[Service]
else:
_class = class_
# Will set up rate limiting for service if it needs to be done, returning in such a case or False otherwise.
is_rate_limit_active = self.server.set_up_object_rate_limiting(ModuleCtx.Rate_Limit_Service, name, 'service')
# Set a flag to signal that this service has rate limiting enabled or not
_class._has_rate_limiting = is_rate_limit_active
# ################################################################################################################################
def _has_io_data_class(
self,
class_:'type[Service]',
msg_class:'any_',
msg_type:'str'
) -> 'bool':
# Is this a generic alias, e.g. in the form of list_[MyModel]?
_is_generic = hasattr(msg_class, '__origin__')
# Do check it now ..
if _is_generic and msg_class.__origin__ is list:
# .. it is a list but does it have any inner models? ..
if msg_class.__args__:
# .. if we are here, it means that it is a generic class of a list type
# .. that has a model inside thus we need to check this model in later steps ..
msg_class = msg_class.__args__[0]
# Dataclasses require class objects ..
if isclass(msg_class):
# .. and it needs to be our own Model subclass ..
if not issubclass(msg_class, DataClassModel):
logger.warning('%s definition %s in service %s will be ignored - \'%s\' should be a subclass of %s',
msg_type,
msg_class,
class_,
msg_type.lower(),
data_class_model_class_name)
return False
# .. if we are here, it means that this is really a Model-based I/O definition
else:
return True
# It is not a dataclass so we can return False
else:
return False
# ################################################################################################################################
def set_up_class_attributes(self, class_:'type[Service]', service_store:'ServiceStore') -> 'None':
# Local aliases
_Class_SimpleIO = None # type: ignore
# Set up enforcement of what other services a given service can invoke
try:
class_.invokes
except AttributeError:
class_.invokes = []
# If the class does not have a SimpleIO attribute
# but it does have input or output declared
# then we add a SimpleIO wrapper ourselves.
if not hasattr(class_, 'SimpleIO'):
_direct_sio_input = getattr(class_, 'input', None)
_direct_sio_output = getattr(class_, 'output', None)
if _direct_sio_input or _direct_sio_output:
# If I/O is declared directly, it means that we do not need response wrappers
class_._zato_needs_response_wrapper = False # type: ignore
class _Class_SimpleIO:
pass
if _direct_sio_input:
_Class_SimpleIO.input = _direct_sio_input # type: ignore
if _direct_sio_output:
_Class_SimpleIO.output = _direct_sio_output # type: ignore
class_.SimpleIO = _Class_SimpleIO # type: ignore
try:
class_.SimpleIO # type: ignore
class_.has_sio = True
except AttributeError:
class_.has_sio = False
if class_.has_sio:
sio_input = getattr(
class_.SimpleIO, # type: ignore
'input',
None
)
sio_output = getattr(
class_.SimpleIO, # type: ignore
'output',
None
)
has_input_data_class = self._has_io_data_class(class_, sio_input, 'Input')
has_output_data_class = self._has_io_data_class(class_, sio_output, 'Output')
# If either input or output is a dataclass but the other one is not,
# we need to turn the latter into a dataclass as well.
# We are here if output is a dataclass ..
if has_output_data_class:
# .. but input is not and it should be ..
if (not has_input_data_class) and sio_input:
# .. create a name for the dynamically-generated input model class ..
name = class_.__module__ + '_' + class_.__name__
name = name.replace('.', '_')
name += '_AutoInput'
# .. generate the input model class now ..
model_input = DataClassModel.build_model_from_flat_input(
service_store.server,
service_store.server.sio_config,
CySimpleIO,
name,
sio_input
)
# .. and assign it as input.
if _Class_SimpleIO:
_Class_SimpleIO.input = model_input # type: ignore
# We are here if input is a dataclass ..
if has_input_data_class:
# .. but output is not and it should be.
if (not has_output_data_class) and sio_output:
# .. create a name for the dynamically-generated output model class ..
name = class_.__module__ + '_' + class_.__name__
name = name.replace('.', '_')
name += '_AutoOutput'
# .. generate the input model class now ..
model_output = DataClassModel.build_model_from_flat_input(
service_store.server,
service_store.server.sio_config,
CySimpleIO,
name,
sio_output
)
if _Class_SimpleIO:
_Class_SimpleIO.output = model_output # type: ignore
if has_input_data_class or has_output_data_class:
SIOClass = DataClassSimpleIO
else:
SIOClass = CySimpleIO # type: ignore
_ = SIOClass.attach_sio(service_store.server, service_store.server.sio_config, class_) # type: ignore
# May be None during unit-tests - not every test provides it.
if service_store:
# Set up all attributes that do not have to be assigned to each instance separately
# and can be shared as class attributes.
class_.wsx = WSXFacade(service_store.server)
if self.is_testing:
class_._worker_store = self._testing_worker_store
class_._worker_config = self._testing_worker_store.worker_config
class_.component_enabled_email = True
class_.component_enabled_search = True
class_.component_enabled_msg_path = True
class_.component_enabled_hl7 = True
class_.component_enabled_ibm_mq = True
class_.component_enabled_odoo = True
class_.component_enabled_zeromq = True
class_.component_enabled_patterns = True
class_.component_enabled_target_matcher = True
class_.component_enabled_invoke_matcher = True
class_.component_enabled_sms = True
else:
class_.add_http_method_handlers()
class_._worker_store = service_store.server.worker_store
class_._enforce_service_invokes = service_store.server.enforce_service_invokes # type: ignore
class_.odb = service_store.server.odb
class_.schedule = SchedulerFacade(service_store.server)
class_.kvdb = service_store.server.worker_store.kvdb # type: ignore
class_.pubsub = service_store.server.worker_store.pubsub
class_.cloud.aws.s3 = service_store.server.worker_store.worker_config.cloud_aws_s3
class_.cloud.confluence = service_store.server.worker_store.cloud_confluence
class_.cloud.dropbox = service_store.server.worker_store.cloud_dropbox
class_.cloud.jira = service_store.server.worker_store.cloud_jira
class_.cloud.salesforce = service_store.server.worker_store.cloud_salesforce
class_.cloud.ms365 = service_store.server.worker_store.cloud_microsoft_365
class_._out_ftp = service_store.server.worker_store.worker_config.out_ftp # type: ignore
class_._out_plain_http = service_store.server.worker_store.worker_config.out_plain_http
class_.amqp.invoke = service_store.server.worker_store.amqp_invoke # .send is for pre-3.0 backward compat
class_.amqp.invoke_async = class_.amqp.send = service_store.server.worker_store.amqp_invoke_async
class_.commands.init(service_store.server)
class_.definition.kafka = service_store.server.worker_store.def_kafka
class_.im.slack = service_store.server.worker_store.outconn_im_slack
class_.im.telegram = service_store.server.worker_store.outconn_im_telegram
class_._worker_config = service_store.server.worker_store.worker_config
class_.component_enabled_email = service_store.server.fs_server_config.component_enabled.email
class_.component_enabled_search = service_store.server.fs_server_config.component_enabled.search
class_.component_enabled_msg_path = service_store.server.fs_server_config.component_enabled.msg_path
class_.component_enabled_ibm_mq = service_store.server.fs_server_config.component_enabled.ibm_mq
class_.component_enabled_odoo = service_store.server.fs_server_config.component_enabled.odoo
class_.component_enabled_zeromq = service_store.server.fs_server_config.component_enabled.zeromq
class_.component_enabled_patterns = service_store.server.fs_server_config.component_enabled.patterns
class_.component_enabled_target_matcher = service_store.server.fs_server_config.component_enabled.target_matcher
class_.component_enabled_invoke_matcher = service_store.server.fs_server_config.component_enabled.invoke_matcher
class_.component_enabled_sms = service_store.server.fs_server_config.component_enabled.sms
# New in Zato 3.2, thus optional
class_.component_enabled_hl7 = service_store.server.fs_server_config.component_enabled.get('hl7')
# JSON Schema
if class_.schema:
self.set_up_class_json_schema(class_)
# User management and SSO
if service_store.server.is_sso_enabled:
class_.sso = service_store.server.sso_api
# Crypto operations
class_.crypto = service_store.server.crypto_manager
# Audit log
class_.audit_pii = service_store.server.audit_pii
class_._before_job_hooks = []
class_._after_job_hooks = []
# Override hook methods that have not been implemented by user
for func_name in hook_methods:
func = getattr(class_, func_name, None)
if func:
# Replace with None or use as-is depending on whether the hook was overridden by user.
impl = func if is_func_overridden(func) else None
# Assign to class either the replaced value or the original one.
setattr(class_, func_name, impl)
if impl and func_name in before_job_hooks:
class_._before_job_hooks.append(impl)
if impl and func_name in after_job_hooks:
class_._after_job_hooks.append(impl)
class_._has_before_job_hooks = bool(class_._before_job_hooks)
class_._has_after_job_hooks = bool(class_._after_job_hooks)
# ################################################################################################################################
def has_sio(self, service_name:'str') -> 'bool':
""" Returns True if service indicated by service_name has a SimpleIO definition.
"""
with self.update_lock:
service_id = self.get_service_id_by_name(service_name)
service_info = self.get_service_info_by_id(service_id) # type: stranydict
class_ = service_info['service_class'] # type: Service
return getattr(class_, 'has_sio', False)
# ################################################################################################################################
def get_service_info_by_id(self, service_id:'strint') -> 'stranydict':
if not isinstance(service_id, int):
service_id = int(service_id)
try:
impl_name = self.id_to_impl_name[service_id]
except KeyError:
keys_found = sorted(self.id_to_impl_name)
keys_found = [(elem, type(elem)) for elem in keys_found]
raise KeyError('No such service_id key `{}` `({})` among `{}`'.format(repr(service_id), type(service_id), keys_found))
else:
try:
return self.services[impl_name]
except KeyError:
keys_found = sorted(repr(elem) for elem in self.services.keys())
keys_found = [(elem, type(elem)) for elem in keys_found]
raise KeyError('No such impl_name key `{}` `({})` among `{}`'.format(
repr(impl_name), type(impl_name), keys_found))
# ################################################################################################################################
def get_service_id_by_name(self, service_name:'str') -> 'int':
impl_name = self.name_to_impl_name[service_name]
return self.impl_name_to_id[impl_name]
# ################################################################################################################################
def get_service_name_by_id(self, service_id:'int') -> 'str':
return self.get_service_info_by_id(service_id)['name']
# ################################################################################################################################
def get_deployment_info(self, impl_name:'str') -> 'anydict':
return self.deployment_info.get(impl_name) or {}
# ################################################################################################################################
def has_service(self, service_name:'str') -> 'bool':
return service_name in self.name_to_impl_name
# ################################################################################################################################
def _invoke_hook(self, service:'Service', hook_name:'str') -> 'None':
""" A utility method for invoking various service's hooks.
"""
try:
hook = getattr(service, hook_name)
hook()
except Exception:
logger.error('Error while invoking `%s` on service `%s` e:`%s`', hook_name, service, format_exc())
# ################################################################################################################################
def new_instance(self, impl_name:'str', *args:'any_', **kwargs:'any_') -> 'tuple_[Service, bool]':
""" Returns a new instance of a service of the given impl name.
"""
# Extract information about this instance ..
_info = self.services[impl_name]
# .. extract details ..
service_class = _info['service_class']
is_active = _info['is_active']
# .. do create a new instance ..
service:'Service' = service_class(*args, **kwargs)
# .. populate its basic attributes ..
service.server = self.server
service.config = self.server.user_config
service.user_config = self.server.user_config
service.time = self.server.time_util
service.security = SecurityFacade(service.server)
# .. and return everything to our caller.
return service, is_active
# ################################################################################################################################
def new_instance_by_id(self, service_id:'int', *args:'any_', **kwargs:'any_') -> 'tuple_[Service, bool]':
impl_name = self.id_to_impl_name[service_id]
return self.new_instance(impl_name)
# ################################################################################################################################
def new_instance_by_name(self, name:'str', *args:'any_', **kwargs:'any_') -> 'tuple_[Service, bool]':
try:
impl_name = self.name_to_impl_name[name]
except KeyError:
logger.warning('No such key `{}` among `{}`'.format(name, sorted(self.name_to_impl_name)))
raise
else:
return self.new_instance(impl_name, *args, **kwargs)
# ################################################################################################################################
def service_data(self, impl_name:'str') -> 'stranydict':
""" Returns all the service-related data.
"""
return self.services[impl_name]
# ################################################################################################################################
def is_deployed(self, name:'str') -> 'bool':
""" Returns True if input service by name is deployed, False otherwise.
"""
return name in self.name_to_impl_name
# ################################################################################################################################
def import_internal_services(
self,
items, # type: stroriter
base_dir, # type: str
sync_internal, # type: bool
is_first # type: bool
) -> 'anylist':
""" Imports and optionally caches locally internal services.
"""
cache_file_path = os.path.join(base_dir, 'config', 'repo', 'internal-cache.dat')
# It is possible that the cache file exists but it is of size zero.
# This will happen if the process of writing data out to the file
# was interrupted for any reason the last time the server was starting up.
# In that case, we need to delete the file altogether and let it recreate.
if os.path.exists(cache_file_path):
stat = os.stat(cache_file_path)
if stat.st_size == 0:
logger.info('Deleting empty `%s` file', cache_file_path)
os.remove(cache_file_path)
sql_services = {}
for item in self.odb.get_sql_internal_service_list(self.server.cluster_id):
sql_services[item.impl_name] = { # type: ignore
'id': item.id, # type: ignore
'impl_name': item.impl_name, # type: ignore
'is_active': item.is_active, # type: ignore
'slow_threshold': item.slow_threshold, # type: ignore
}
# sync_internal may be False but if the cache does not exist (which is the case if a server starts up the first time),
# we need to create it anyway and sync_internal becomes True then. However, the should be created only by the very first
# worker in a group of workers - the rest can simply assume that the cache is ready to read.
# if is_first and not os.path.exists(cache_file_path):
sync_internal = True
if sync_internal:
# Synchronizing internal modules means re-building the internal cache from scratch
# and re-deploying everything.
service_info = []
internal_cache = {
'service_info': service_info
}
# This is currently unused
internal_cache = internal_cache
logger.info('{} internal services (%s)'.format(self.action_internal_doing), self.server.name)
info = self.import_services_from_anywhere(items, base_dir)
for service in info.to_process: # type: InRAMService
class_ = service.service_class
impl_name = service.impl_name
service_info.append({
'service_class': class_,
'mod': inspect.getmodule(class_),
'impl_name': impl_name,
'service_id': self.impl_name_to_id[impl_name],
'is_active': self.services[impl_name]['is_active'],
'slow_threshold': self.services[impl_name]['slow_threshold'],
'fs_location': inspect.getfile(class_),
'deployment_info': 'no-deployment-info'
})
# All set, write out the cache file, assuming that we can do it.
# We cannot on Windows or under a debugger (as indicated by the environment variable).
if self.has_internal_cache:
if not os.environ.get('ZATO_SERVER_BASE_DIR'):
f = open(cache_file_path, 'wb')
# f.write(dill_dumps(internal_cache))
f.close()
logger.info('{} %d internal services (%s) (%s)'.format(self.action_internal_done),
len(info.to_process), info.total_size_human, self.server.name)
return info.to_process
else:
logger.info('Deploying cached internal services (%s)', self.server.name)
to_process = []
# Declare it upfront because we need to assume that opening the path can fail.
f = None
try:
f = open(cache_file_path, 'rb')
dill_items = dill_load(f)
except ValueError as e:
msg = e.args[0]
if _unsupported_pickle_protocol_msg in msg:
msg = msg.replace(_unsupported_pickle_protocol_msg, '').strip()
protocol_found = int(msg)
# If the protocol found is higher than our own, it means that the cache
# was built a Python version higher than our own, we are on Python 2.7
# and cache was created under Python 3.4. In such a case, we need to
# recreate the cache anew.
if protocol_found > highest_pickle_protocol:
logger.info('Cache pickle protocol found `%d` > current highest `%d`, forcing sync_internal',
protocol_found, highest_pickle_protocol)
return self.import_internal_services(items, base_dir, True, is_first)
# A different reason, re-raise the erorr then
else:
raise
# Must be a different kind of a ValueError, propagate it then
else:
raise
finally:
if f:
f.close()
len_si = len(dill_items['service_info'])
for _, item in enumerate(dill_items['service_info'], 1):
class_ = self._visit_class_for_service(item['mod'], item['service_class'], item['fs_location'], True)
to_process.append(class_)
self._store_in_ram(None, to_process)
logger.info('Deployed %d cached internal services (%s)', len_si, self.server.name)
return to_process
# ################################################################################################################################
def _store_in_ram(self, session:'SASession | None', to_process:'inramlist') -> 'None':
if self.is_testing:
services = {}
for in_ram_service in to_process: # type: InRAMService
service_info = {}
service_info['id'] = randint(0, 1000000)
services[in_ram_service.name] = service_info
else:
# We need to look up all the services in ODB to be able to find their IDs
if session:
needs_new_session = False
else:
needs_new_session = True
session = self.odb.session()
try:
services = self.get_basic_data_services(session)
finally:
if needs_new_session and session:
session.close()
with self.update_lock:
for item in to_process: # type: InRAMService
service_dict = services[item.name]
service_id = service_dict['id']
item_name = item.name
item_deployment_info = item.deployment_info
item_service_class = item.service_class
self.services[item.impl_name] = {}
self.services[item.impl_name]['name'] = item_name
self.services[item.impl_name]['deployment_info'] = item_deployment_info
self.services[item.impl_name]['service_class'] = item_service_class
self.services[item.impl_name]['path'] = item.source_code_info.path
self.services[item.impl_name]['source_code'] = item.source_code_info.source.decode('utf8')
item_is_active = item.is_active
item_slow_threshold = item.slow_threshold
self.services[item.impl_name]['is_active'] = item_is_active
self.services[item.impl_name]['slow_threshold'] = item_slow_threshold
self.id_to_impl_name[service_id] = item.impl_name
self.impl_name_to_id[item.impl_name] = service_id
self.name_to_impl_name[item.name] = item.impl_name
arg_spec = getargspec(item.service_class.after_add_to_store) # type: ArgSpec
args = arg_spec.args # type: anylist
# GH #1018 made server the argument that the hook receives ..
if len(args) == 1 and args[0] == 'server':
hook_arg = self.server
# .. but for backward-compatibility we provide the hook with the logger object by default.
else:
hook_arg = logger
item.service_class.after_add_to_store(hook_arg)
# ################################################################################################################################
def _store_services_in_odb(
self,
session, # type: any_
batch_indexes, # type: anylist
to_process # type: anylist
) -> 'bool':
""" Looks up all Service objects in ODB and if any of our local ones is not in the databaset yet, it is added.
"""
# Will be set to True if any of the batches added at list one new service to ODB
any_added = False
# Get all services already deployed in ODB for comparisons (Service)
services = self.get_basic_data_services(session)
# Add any missing Service objects from each batch delineated by indexes found
for start_idx, end_idx in batch_indexes:
to_add = []
batch_services = to_process[start_idx:end_idx]
for service in batch_services: # type: InRAMService
# No such Service object in ODB so we need to store it
if service.name not in services:
to_add.append(service)
# Add to ODB all the Service objects from this batch found not to be in ODB already
if to_add:
elems = [elem.to_dict() for elem in to_add]
# This saves services in ODB
self.odb.add_services(session, elems)
# Now that we have them, we can look up their IDs ..
service_id_list = self.odb.get_service_id_list(session, self.server.cluster_id,
[elem['name'] for elem in elems]) # type: anydict
# .. and add them for later use.
for item in service_id_list: # type: dict
self.impl_name_to_id[item.impl_name] = item.id
any_added = True
return any_added
# ################################################################################################################################
def _should_delete_deployed_service(self, service:'InRAMService', already_deployed:'stranydict') -> 'bool':
""" Returns True if a given service has been already deployed but its current source code,
one that is about to be deployed, is changed in comparison to what is stored in ODB.
"""
# Already deployed ..
if service.name in already_deployed:
# .. thus, return True if current source code is different to what we have already
if service.source_code_info.source != already_deployed[service.name]:
return True
# If we are here, it means that we should not delete this service
return False
# ################################################################################################################################
def _store_deployed_services_in_odb(
self,
session, # type: any_
batch_indexes, # type: anylist
to_process, # type: anylist
) -> 'None':
""" Looks up all Service objects in ODB, checks if any is not deployed locally and deploys it if it is not.
"""
# Local objects
now = _utcnow()
now_iso = now.isoformat()
# Get all services already deployed in ODB for comparisons (Service) - it is needed to do it again,
# in addition to _store_deployed_services_in_odb, because that other method may have added
# DB-level IDs that we need with our own objects.
services = self.get_basic_data_services(session)
# Same goes for deployed services objects (DeployedService)
already_deployed = self.get_basic_data_deployed_services()
# Modules visited may return a service that has been already visited via another module,
# in which case we need to skip such a duplicate service.
already_visited = set()
# Add any missing DeployedService objects from each batch delineated by indexes found
for start_idx, end_idx in batch_indexes:
# Deployed services that need to be deleted before they can be re-added,
# which will happen if a service's name does not change but its source code does
to_delete = []
# DeployedService objects to be added
to_add = []
# InRAMService objects to process in this iteration
batch_services = to_process[start_idx:end_idx]
for service in batch_services: # type: InRAMService
# Ignore service we have already processed
if service.name in already_visited:
continue
else:
already_visited.add(service.name)
# Make sure to re-deploy services that have changed their source code
if self._should_delete_deployed_service(service, already_deployed):
to_delete.append(self.get_service_id_by_name(service.name))
del already_deployed[service.name]
# At this point we wil always have IDs for all Service objects
service_id = services[service.name]['id']
# Metadata about this deployment as a JSON object
class_ = service.service_class
path = service.source_code_info.path
deployment_info_dict = deployment_info('service-store', str(class_), now_iso, path)
deployment_info_dict['line_number'] = service.source_code_info.line_number
self.deployment_info[service.impl_name] = deployment_info_dict
deployment_details = dumps(deployment_info_dict)
# No such Service object in ODB so we need to store it
if service.name not in already_deployed:
to_add.append({
'server_id': self.server.id,
'service_id': service_id,
'deployment_time': now,
'details': deployment_details,
'source': service.source_code_info.source,
'source_path': service.source_code_info.path,
'source_hash': service.source_code_info.hash,
'source_hash_method': service.source_code_info.hash_method,
})
# If any services are to be redeployed, delete them first now
if to_delete:
self.odb.drop_deployed_services_by_name(session, to_delete)
# If any services are to be deployed, do it now.
if to_add:
self.odb.add_deployed_services(session, to_add)
# ################################################################################################################################
def _store_in_odb(self, session:'SASession | None', to_process:'inramlist') -> 'None':
# Indicates boundaries of deployment batches
batch_indexes = get_batch_indexes(to_process, self.max_batch_size)
# Store Service objects first
needs_commit = self._store_services_in_odb(session, batch_indexes, to_process)
# This flag will be True if there were any services to be added,
# in which case we need to commit the sesssion here to make it possible
# for the next method to have access to these newly added Service objects.
if needs_commit:
if session:
session.commit()
# Now DeployedService can be added - they assume that all Service objects all are in ODB already
self._store_deployed_services_in_odb(session, batch_indexes, to_process)
# ################################################################################################################################
def get_basic_data_services(self, session:'SASession') -> 'anydict':
# We will return service keyed by their names
out = {}
# This is a list of services to turn into a dict
service_list = self.odb.get_basic_data_service_list(session)
for service_id, name, impl_name in service_list: # type: name, name
out[name] = {'id': service_id, 'impl_name': impl_name}
return out
# ################################################################################################################################
def get_basic_data_deployed_services(self) -> 'anydict':
# This is a list of services to turn into a set
deployed_service_list = self.odb.get_basic_data_deployed_service_list()
return {elem[0]:elem[1] for elem in deployed_service_list}
# ################################################################################################################################
def import_services_from_anywhere(
self,
items, # type: stroriter
base_dir, # type: str
is_internal=False, # type: bool
) -> 'DeploymentInfo':
""" Imports services from any of the supported sources.
"""
items = items if isinstance(items, (list, tuple)) else [items]
to_process = []
should_skip = False
for item in items:
for ignored_name in internal_to_ignore:
if ignored_name in item:
should_skip = True
break
else:
should_skip = False
if should_skip:
continue
if has_debug:
logger.debug('About to import services from:`%s`', item)
if is_internal is None:
is_internal = item.startswith('zato')
if isinstance(item, str):
# A regular directory
if os.path.isdir(item):
imported = self.import_services_from_directory(item, base_dir)
to_process.extend(imported)
# .. a .py/.pyw
elif is_python_file(item):
imported = self.import_services_from_file(item, is_internal, base_dir)
to_process.extend(imported)
# .. a named module
else:
imported = self.import_services_from_module(item, is_internal)
to_process.extend(imported)
# .. a list of project roots ..
elif isinstance(item, list):
# .. go through each project ..
for elem in item:
# .. add type hints ..
elem = cast_('HotDeployProject', elem)
# .. make the root directory's elements importable by adding the root to $PYTHONPATH ..
sys.path.insert(0, str(elem.sys_path_entry))
for dir_name in elem.pickup_from_path:
# .. turn Path objects into string, which is what is expected by the functions that we call ..
dir_name = str(dir_name)
# .. services need to be both imported and stored for later use ..
imported = self.import_services_from_directory(dir_name, base_dir)
to_process.extend(imported)
# .. while models we merely import ..
_ = self.import_models_from_directory(dir_name, base_dir)
# .. if we are here, it must be a module object.
else:
imported = self.import_services_from_module_object(item, is_internal)
to_process.extend(imported)
total_size = 0
to_process = set(to_process)
to_process = list(to_process)
for item in to_process:
item = cast_('InRAMService', item)
total_size += item.source_code_info.len_source
info = DeploymentInfo()
info.to_process[:] = to_process
info.total_size = total_size
info.total_size_human = naturalsize(info.total_size)
if self.is_testing:
session = None
else:
session = self.odb.session()
try:
# Save data to both ODB and RAM if we are not testing,
# otherwise, in RAM only.
if not self.is_testing:
self._store_in_odb(session, info.to_process)
self._store_in_ram(session, info.to_process)
# Postprocessing, like rate limiting which needs access to information that becomes
# available only after a service is saved to ODB.
if not self.is_testing:
self.after_import(session, info)
# Done with everything, we can commit it now, assuming we are not in a unittest
finally:
if session:
session.commit() # type: ignore
# Done deploying, we can return
return info
# ################################################################################################################################
def after_import(self, session:'SASession | None', info:'DeploymentInfo') -> 'None':
# Names of all services that have been just deployed ..
deployed_service_name_list = [item.name for item in info.to_process]
# .. out of which we need to substract the ones that the server is already aware of
# because they were added to SQL ODB prior to current deployment ..
for name in deployed_service_name_list[:]:
if name in self.server.config.service:
deployed_service_name_list.remove(name)
# .. and now we know for which services to create ConfigDict objects.
query = self.odb.get_service_list_with_include(
session, self.server.cluster_id, deployed_service_name_list, True) # type: anylist
service_list = ConfigDict.from_query('service_list_after_import', query, decrypt_func=self.server.decrypt)
self.server.config.service.update(service_list._impl)
# Rate limiting
for item in info.to_process: # type: InRAMService
self.set_up_rate_limiting(item.name, item.service_class)
# ################################################################################################################################
def _should_ignore_file(self, file_name:'str', base_dir:'str') -> 'bool':
if file_name.endswith('store.py') and 'current' in base_dir:
with open(file_name) as f:
data = f.read()
if 'Zato Source' in data:
return True
else:
return False
else:
return False
# ################################################################################################################################
def import_objects_from_file(
self,
file_name, # type: str
is_internal, # type: bool
base_dir, # type: str
visit_func # type: callable_
) -> 'anylist':
""" Imports all the services or models from the path to a file.
"""
# Our response to return
to_process = []
# Exit early if we are not to process this file
if self._should_ignore_file(file_name, base_dir):
return to_process
try:
mod_info = import_module_from_path(file_name, base_dir)
except Exception:
msg = 'Could not load source, file_name:`%s`, e:`%s`'
logger.error(msg, file_name, format_exc())
else:
to_process.extend(visit_func(mod_info.module, is_internal, mod_info.file_name))
finally:
return to_process
# ################################################################################################################################
def import_models_from_directory(self, dir_name:'str', base_dir:'str') -> 'modelinfolist':
""" Imports models from a specified directory.
"""
out:'modelinfolist' = []
for py_path in visit_py_source(dir_name):
out.extend(self.import_models_from_file(py_path, False, base_dir))
gevent_sleep(0.03) # type: ignore
return out
# ################################################################################################################################
def import_models_from_file(
self,
file_name, # type: str
is_internal, # type: bool
base_dir, # type: str
) -> 'modelinfolist':
""" Imports all the models from the path to a file.
"""
# This is a list of all the models imported ..
model_info_list = self.import_objects_from_file(file_name, is_internal, base_dir, self._visit_module_for_models)
# .. first, cache the information for later use ..
for item in model_info_list:
item = cast_('ModelInfo', item)
self.models[item.name] = item
# .. now, return the list to the caller.
return model_info_list
# ################################################################################################################################
def import_services_from_file(self, file_name:'str', is_internal:'bool', base_dir:'str') -> 'anylist':
""" Imports all the services from the path to a file.
"""
imported = self.import_objects_from_file(file_name, is_internal, base_dir, self._visit_module_for_services)
return imported
# ################################################################################################################################
def import_services_from_directory(self, dir_name:'str', base_dir:'str') -> 'anylist':
""" Imports services from a specified directory.
"""
# Local variables
to_process = []
py_path_list = visit_py_source(dir_name)
py_path_list = list(py_path_list)
for py_path in py_path_list:
imported = self.import_services_from_file(py_path, False, base_dir)
to_process.extend(imported)
gevent_sleep(0.03) # type: ignore
return to_process
# ################################################################################################################################
def import_services_from_module(self, mod_name:'str', is_internal:'bool') -> 'anylist':
""" Imports all the services from a module specified by the given name.
"""
try:
module_object = import_module(mod_name)
imported = self.import_services_from_module_object(module_object, is_internal)
return imported
except Exception as e:
logger.info('Could not import module `%s` (internal:%d) -> `%s` -> `%s`',
mod_name, is_internal, e.args, e)
return []
# ################################################################################################################################
def import_services_from_module_object(self, mod:'module_', is_internal:'bool') -> 'anylist':
""" Imports all the services from a Python module object.
"""
imported = self._visit_module_for_services(mod, is_internal, inspect.getfile(mod))
return imported
# ################################################################################################################################
def _has_module_import(self, source_code:'str', mod_name:'str') -> 'bool':
# .. ignore modules that do not import what we had on input ..
for line in source_code.splitlines():
# .. these two will be True if we are importing this module ..
has_import = 'import' in line
has_mod_name = mod_name in line
# .. in which case, there is no need to continue ..
if has_import and has_mod_name:
break
# .. otherwise, no, we are not importing this module ..
else:
has_import = False
has_mod_name = False
return has_import and has_mod_name
# ################################################################################################################################
def _get_service_module_imports(self, mod_name:'str') -> 'strlist':
""" Returns a list of paths pointing to modules with services that import the module given on input.
"""
# Local aliases
out = []
modules_visited = set()
# Go through all the services that we are aware of ..
for service_data in self.services.values():
# .. this is the Python class representing a service ..
service_class = service_data['service_class']
# .. get the module of this class based on the module's name ..
mod = importlib.import_module(service_class.__module__)
# .. get the source of the module that this class is in, ..
# .. but not if we have already visited this module before ..
if mod in modules_visited:
continue
else:
# .. get the actual source code ..
source_code = service_data['source_code']
# .. this module can be ignored if it does not import the input one ..
if not self._has_module_import(source_code, mod_name):
continue
# .. otherwise, extract the path of this module ..
path = service_data['path']
# .. store that module's path for later use ..
out.append(path)
# .. cache that item so that we do not have to visit it more than once ..
modules_visited.add(mod)
# .. now, we can return our result to the caller.
return out
# ################################################################################################################################
def _get_model_module_imports(self, mod_name:'str') -> 'strlist':
""" Returns a list of paths pointing to modules with services that import the module given on input.
"""
# Local aliases
out = []
modules_visited = set()
# Go through all the models that we are aware of ..
for model in self.models.values():
# .. add type hints ..
model = cast_('ModelInfo', model)
# .. ignore this module if we have already visited this module before ..
if model.mod_name in modules_visited:
continue
else:
# .. this module can be ignored if it does not import the input one ..
if not self._has_module_import(model.source, mod_name):
continue
# .. otherwise, store that module's path for later use ..
out.append(model.path)
# .. cache that item so that we do not have to visit it more than once ..
modules_visited.add(model.mod_name)
# .. now, we can return our result to the caller.
return out
# ################################################################################################################################
def get_module_importers(self, mod_name:'str') -> 'strlist':
""" Returns a list of paths pointing to modules that import the one given on input.
"""
# Local aliases
out = []
# .. get files with services that import this module ..
service_path_list = self._get_service_module_imports(mod_name)
# .. get files with models that import this module ..
model_path_list = self._get_model_module_imports(mod_name)
# .. add everything found to the result ..
out.extend(service_path_list)
out.extend(model_path_list)
# .. now, we can return our result to the caller.
return out
# ################################################################################################################################
def _should_deploy_model(self, name:'str', item:'any_', current_module:'module_', fs_location:'str') -> 'bool':
""" Is item a model that we can deploy?
"""
if isclass(item) and hasattr(item, '__mro__'):
if issubclass(item, DataClassModel) and (item is not DataClassModel):
if item.__module__ == current_module.__name__:
return True
# If we are here, it means that we should deploy that item
return False
# ################################################################################################################################
def _should_deploy_service(self, name:'str', item:'any_', current_module:'module_', fs_location:'str') -> 'bool':
""" Is item a service that we can deploy?
"""
if isclass(item) and hasattr(item, '__mro__') and hasattr(item, 'get_name'):
if item is not Service and item is not AdminService and item is not PubSubHook:
if not hasattr(item, DONT_DEPLOY_ATTR_NAME) and not issubclass(item, ModelBase):
# Do not deploy services that only happened to have been imported
# in this module but are actually defined elsewhere.
if getmodule(item) is not current_module:
return False
# After all the checks, at this point, we know that item must be a service class
item = cast_('Service', item)
# Make sure the service has its full module's name populated ..
item.zato_set_module_name(fs_location)
# .. now, we can access its name.
service_name = item.get_name()
# Don't deploy SSO services if SSO as such is not enabled
if not self.server.is_sso_enabled:
if 'zato.sso' in service_name:
return False
# We may be embedded in a test server from zato-testing
# in which case we deploy every service found.
if self.is_testing:
return True
else:
if self.patterns_matcher.is_allowed(service_name):
return True
else:
logger.info('Skipped disallowed `%s`', service_name)
# If we are here, it means that we should deploy that item
return False
# ################################################################################################################################
def _get_source_code_info(self, mod:'any_', class_:'any_') -> 'SourceCodeInfo':
""" Returns the source code of and the FS path to the given module.
"""
source_info = SourceCodeInfo()
try:
file_name = mod.__file__ or ''
if file_name[-1] in('c', 'o'):
file_name = file_name[:-1]
# We would have used inspect.getsource(mod) had it not been apparently using
# cached copies of the source code
source_info.source = open(file_name, 'rb').read()
source_info.len_source = len(source_info.source)
source_info.path = inspect.getsourcefile(mod) or 'no-source-file'
source_info.hash = sha256(source_info.source).hexdigest()
source_info.hash_method = 'SHA-256'
# The line number this class object is defined on
source_info.line_number = inspect.findsource(class_)[1]
except IOError:
if has_trace1:
logger.log(TRACE1, 'Ignoring IOError, mod:`%s`, e:`%s`', mod, format_exc())
return source_info
# ################################################################################################################################
def _visit_class_for_model(
self,
_ignored_mod, # type: module_
class_, # type: any_
fs_location, # type: str
_ignored_is_internal # type: any_
) -> 'ModelInfo':
# Reusable
mod_name = get_module_name_by_path(fs_location)
# Read the source and convert it from bytes to string
source = open(fs_location, 'rb').read()
source = source.decode('utf8')
out = ModelInfo()
out.name = '{}.{}'.format(mod_name, class_.__name__)
out.path = fs_location
out.mod_name = mod_name
out.source = source
return out
# ################################################################################################################################
def _visit_class_for_service(
self,
mod, # type: module_
class_, # type: type[Service]
fs_location, # type: str
is_internal # type: bool
) -> 'InRAMService':
# Populate the value of the module's name that this class is in
_ = class_.zato_set_module_name(fs_location)
name = class_.get_name()
impl_name = class_.get_impl_name()
self.set_up_class_attributes(class_, self)
# Note that at this point we do not have the service's ID, is_active and slow_threshold values;
# this is because this object is created prior to its deployment in ODB.
service = InRAMService()
service.cluster_id = self.server.cluster_id
service.is_active = True
service.is_internal = is_internal
service.name = name
service.impl_name = impl_name
service.service_class = class_
service.source_code_info = self._get_source_code_info(mod, class_)
return service
# ################################################################################################################################
def on_worker_initialized(self) -> 'None':
""" Executed after a worker has been fully initialized, e.g. all connectors are started and references to these objects
can be assigned as class-wide attributes to services.
"""
# ################################################################################################################################
def redeploy_on_parent_changed(self, changed_service_name:'str', changed_service_impl_name:'str') -> 'None':
# Local aliases
to_auto_deploy = []
# Iterate over all current services to check if any of these subclasses the service just deployed ..
for impl_name, service_info in self.services.items():
# .. skip the one just deployed ..
if impl_name == changed_service_impl_name:
continue
# .. a Python class representing each service ..
service_class = service_info['service_class']
service_module = getmodule(service_class)
# .. get all parent classes of the current one ..
service_mro = getmro(service_class)
# .. try to find the deployed service's parents ..
for base_class in service_mro:
if issubclass(base_class, Service) and (base_class is not Service):
base_class_name = base_class.get_name()
if base_class_name == changed_service_name:
# Do not deploy services that are defined in the same module their parent is
# because that would be an infinite loop of auto-deployment.
if getmodule(base_class) is service_module:
continue
# .. if it was found, add it to the list of what needs to be auto-redeployed ..
to_auto_deploy.append(service_info)
# We will not always have any services to redeploy
if to_auto_deploy:
# Inform users that we are to auto-redeploy services and why we are doing it
logger.info('Base service `%s` changed; auto-redeploying `%s`', changed_service_name,
sorted(item['name'] for item in to_auto_deploy))
# Go through each child service found and hot-deploy it
for item in to_auto_deploy:
module_path = getsourcefile(item['service_class']) or 'no-module-path'
logger.debug('Copying `%s` to `%s`', module_path, self.server.hot_deploy_config.pickup_dir)
shutil_copy(module_path, self.server.hot_deploy_config.pickup_dir)
# ################################################################################################################################
def _visit_module_for_objects(
self,
mod, # type: module_
is_internal, # type: bool
fs_location, # type: str
should_deploy_func, # type: callable_
visit_class_func, # type: callable_
needs_before_add_to_store_result # type: bool
) -> 'anylist':
""" Imports services or models from a module object.
"""
to_process = []
try:
for name in sorted(dir(mod)):
with self.update_lock:
item = getattr(mod, name)
if should_deploy_func(name, item, mod, fs_location):
# Only services enter here ..
if needs_before_add_to_store_result:
if self.is_testing:
before_add_to_store_result = True
else:
before_add_to_store_result = item.before_add_to_store(logger)
# .. while models go here.
else:
before_add_to_store_result = True
if before_add_to_store_result:
to_process.append(visit_class_func(mod, item, fs_location, is_internal))
else:
logger.info('Skipping `%s` from `%s`', item, fs_location)
except Exception:
logger.error(
'Exception while visiting module:`%s`, is_internal:`%s`, fs_location:`%s`, e:`%s`',
mod, is_internal, fs_location, format_exc())
finally:
return to_process
# ################################################################################################################################
def _visit_module_for_models(self, mod:'module_', is_internal:'bool', fs_location:'str') -> 'anylist':
""" Imports models from a module object.
"""
return self._visit_module_for_objects(mod, is_internal, fs_location,
self._should_deploy_model, self._visit_class_for_model,
needs_before_add_to_store_result=False)
# ################################################################################################################################
def _visit_module_for_services(self, mod:'module_', is_internal:'bool', fs_location:'str') -> 'anylist':
""" Imports services from a module object.
"""
return self._visit_module_for_objects(mod, is_internal, fs_location,
self._should_deploy_service, self._visit_class_for_service,
needs_before_add_to_store_result=True)
# ################################################################################################################################
| 81,915
|
Python
|
.py
| 1,460
| 44.577397
| 130
| 0.512845
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,245
|
http_soap.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/http_soap.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from traceback import format_exc
# Paste
from paste.util.converters import asbool
# Zato
from zato.common.api import CONNECTION, DEFAULT_HTTP_PING_METHOD, DEFAULT_HTTP_POOL_SIZE, \
Groups, HL7, HTTP_SOAP_SERIALIZATION_TYPE, MISC, PARAMS_PRIORITY, SEC_DEF_TYPE, URL_PARAMS_PRIORITY, URL_TYPE, \
ZATO_DEFAULT, ZATO_NONE, ZatoNotGiven, ZATO_SEC_USE_RBAC
from zato.common.broker_message import CHANNEL, OUTGOING
from zato.common.exception import ServiceMissingException, ZatoException
from zato.common.json_internal import dumps
from zato.common.odb.model import Cluster, HTTPSOAP, SecurityBase, Service, TLSCACert, to_json
from zato.common.odb.query import cache_by_id, http_soap, http_soap_list
from zato.common.rate_limiting import DefinitionParser
from zato.common.util.api import as_bool
from zato.common.util.sql import elems_with_opaque, get_dict_with_opaque, get_security_by_id, parse_instance_opaque_attr, \
set_instance_opaque_attrs
from zato.server.connection.http_soap import BadRequest
from zato.server.service import AsIs, Boolean, Integer, List
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, anylist, strdict, strintdict
# ################################################################################################################################
# ################################################################################################################################
_GetList_Optional = ('include_wrapper', 'cluster_id', 'connection', 'transport', 'data_format', 'needs_security_group_names')
# ################################################################################################################################
# ################################################################################################################################
class _HTTPSOAPService:
""" A common class for various HTTP/SOAP-related services.
"""
def notify_worker_threads(self, params, action):
""" Notify worker threads of new or updated parameters.
"""
params['action'] = action
self.broker_client.publish(params)
def _validate_tls(self, input, sec_info):
if sec_info['sec_type'] == SEC_DEF_TYPE.TLS_KEY_CERT:
if not input.get('sec_tls_ca_cert_id'):
raise ZatoException(self.cid, 'TLS CA certs is a required field if TLS keys/certs are used')
def _handle_security_info(self, session, security_id, connection, transport):
""" First checks whether the security type is correct for the given
connection type. If it is, returns a dictionary of security-related information.
"""
info = {'security_id': None, 'security_name':None, 'sec_type':None}
if security_id:
sec_def = session.query(SecurityBase.name, SecurityBase.sec_type).\
filter(SecurityBase.id==security_id).\
one()
if connection == 'outgoing':
if transport == URL_TYPE.PLAIN_HTTP and \
sec_def.sec_type not in (SEC_DEF_TYPE.BASIC_AUTH, SEC_DEF_TYPE.TLS_KEY_CERT,
SEC_DEF_TYPE.APIKEY, SEC_DEF_TYPE.OAUTH, SEC_DEF_TYPE.NTLM):
raise Exception('Unsupported sec_type `{}`'.format(sec_def.sec_type))
info['security_id'] = security_id
info['security_name'] = sec_def.name
info['sec_type'] = sec_def.sec_type
return info
# ################################################################################################################################
class _BaseGet(AdminService):
""" Base class for services returning information about HTTP/SOAP objects.
"""
class SimpleIO:
output_required = 'id', 'name', 'is_active', 'is_internal', 'url_path'
output_optional = 'service_id', 'service_name', 'security_id', 'security_name', 'sec_type', \
'method', 'soap_action', 'soap_version', 'data_format', 'host', 'ping_method', 'pool_size', 'merge_url_params_req', \
'url_params_pri', 'params_pri', 'serialization_type', 'timeout', AsIs('sec_tls_ca_cert_id'), Boolean('has_rbac'), \
'content_type', Boolean('sec_use_rbac'), 'cache_id', 'cache_name', Integer('cache_expiry'), 'cache_type', \
'content_encoding', Boolean('match_slash'), 'http_accept', List('service_whitelist'), 'is_rate_limit_active', \
'rate_limit_type', 'rate_limit_def', Boolean('rate_limit_check_parent_def'), \
'hl7_version', 'json_path', 'should_parse_on_input', 'should_validate', 'should_return_errors', \
'data_encoding', 'is_audit_log_sent_active', 'is_audit_log_received_active', \
Integer('max_len_messages_sent'), Integer('max_len_messages_received'), \
Integer('max_bytes_per_message_sent'), Integer('max_bytes_per_message_received'), \
'username', 'is_wrapper', 'wrapper_type', AsIs('security_groups'), 'security_group_count', \
'security_group_member_count', 'needs_security_group_names'
# ################################################################################################################################
def _get_sec_tls_ca_cert_id_from_item(self, item):
sec_tls_ca_cert_id = item.get('sec_tls_ca_cert_id')
sec_tls_ca_cert_verify_strategy = item.get('sec_tls_ca_cert_verify_strategy')
if sec_tls_ca_cert_id is None:
if sec_tls_ca_cert_verify_strategy is False:
out = ZATO_NONE
else:
out = ZATO_DEFAULT
else:
out = sec_tls_ca_cert_id
return out
# ################################################################################################################################
def _get_security_groups_info(self, item:'any_', security_groups_member_count:'strintdict') -> 'strdict':
# Our response to produce
out:'strdict' = {
'group_count': 0,
'member_count': 0,
}
if security_groups := item.get('security_groups'):
for group_id in security_groups:
member_count = security_groups_member_count.get(group_id) or 0
out['member_count'] += member_count
out['group_count'] += 1
# .. now, return the response to our caller.
return out
# ################################################################################################################################
class Get(_BaseGet):
""" Returns information about an individual HTTP/SOAP object by its ID.
"""
class SimpleIO(_BaseGet.SimpleIO):
request_elem = 'zato_http_soap_get_request'
response_elem = 'zato_http_soap_get_response'
input_optional = 'cluster_id', 'id', 'name'
def handle(self):
cluster_id = self.request.input.get('cluster_id') or self.server.cluster_id
with closing(self.odb.session()) as session:
self.request.input.require_any('id', 'name')
item = http_soap(session, cluster_id, self.request.input.id, self.request.input.name)
out = get_dict_with_opaque(item)
out['sec_tls_ca_cert_id'] = self._get_sec_tls_ca_cert_id_from_item(out)
self.response.payload = out
# ################################################################################################################################
class GetList(_BaseGet):
""" Returns a list of HTTP/SOAP connections.
"""
_filter_by = HTTPSOAP.name,
class SimpleIO(GetListAdminSIO, _BaseGet.SimpleIO):
request_elem = 'zato_http_soap_get_list_request'
response_elem = 'zato_http_soap_get_list_response'
input_optional = GetListAdminSIO.input_optional + _GetList_Optional
output_optional = _BaseGet.SimpleIO.output_optional + ('connection', 'transport')
output_repeated = True
def get_data(self, session):
# Local aliases
out:'anylist' = []
cluster_id = self.request.input.get('cluster_id') or self.server.cluster_id
needs_security_group_names = self.request.input.get('needs_security_group_names') or False
include_wrapper = self.request.input.get('include_wrapper') or False
should_ignore_wrapper = not include_wrapper
# Get information about security groups which may be used later on
security_groups_member_count = self.invoke('zato.groups.get-member-count', group_type=Groups.Type.API_Clients)
if needs_security_group_names:
all_security_groups = self.invoke('zato.groups.get-list', group_type=Groups.Type.API_Clients)
all_security_groups
else:
all_security_groups = []
# Obtain the basic result ..
result = self._search(http_soap_list, session, cluster_id,
self.request.input.connection, self.request.input.transport,
asbool(self.server.fs_server_config.misc.return_internal_objects),
self.request.input.get('data_format'),
False,
)
# .. extract all the opaque elements ..
data:'anylist' = elems_with_opaque(result)
# .. go through everything we have so far ..
for item in data:
# .. build a dictionary of information about groups ..
security_groups_for_item_info = self._get_security_groups_info(item, security_groups_member_count)
item['security_group_count'] = security_groups_for_item_info['group_count']
item['security_group_member_count'] = security_groups_for_item_info['member_count']
# .. optionally, we may need to turn security group IDs into their names ..
if needs_security_group_names:
if security_groups_for_item := item.get('security_groups'):
new_security_groups = []
for item_group_id in security_groups_for_item:
for group in all_security_groups:
if item_group_id == group['id']:
new_security_groups.append(group['name'])
break
item['security_groups'] = sorted(new_security_groups)
# .. this needs to be extracted ..
item['sec_tls_ca_cert_id'] = self._get_sec_tls_ca_cert_id_from_item(item)
# .. ignore wrapper elements if told do ..
if should_ignore_wrapper and item.get('is_wrapper'):
continue
# .. if we are here, it means that this element is to be returned ..
out.append(item)
# .. now, return the result to our caller.
return out
def handle(self):
with closing(self.odb.session()) as session:
data = self.get_data(session)
self.response.payload[:] = data
# ################################################################################################################################
# ################################################################################################################################
class _CreateEdit(AdminService, _HTTPSOAPService):
def add_tls_ca_cert(self, input, sec_tls_ca_cert_id):
with closing(self.odb.session()) as session:
input.sec_tls_ca_cert_name = session.query(TLSCACert.name).\
filter(TLSCACert.id==sec_tls_ca_cert_id).\
one()[0]
# ################################################################################################################################
def _raise_error(self, name, url_path, http_accept, http_method, soap_action, source):
msg = 'Such a channel already exists ({}); url_path:`{}`, http_accept:`{}`, http_method:`{}`, soap_action:`{}` (src:{})'
raise Exception(msg.format(name, url_path, http_accept, http_method, soap_action, source))
# ################################################################################################################################
def ensure_channel_is_unique(self, session, url_path, http_accept, http_method, soap_action, cluster_id):
existing_ones = session.query(HTTPSOAP).\
filter(HTTPSOAP.cluster_id==cluster_id).\
filter(HTTPSOAP.url_path==url_path).\
filter(HTTPSOAP.soap_action==soap_action).\
filter(HTTPSOAP.connection==CONNECTION.CHANNEL).\
all()
# At least one channel with this kind of basic information already exists
# but it is possible that it requires different HTTP headers (e.g. Accept, Method)
# so we need to check each one manually.
if existing_ones:
for item in existing_ones:
opaque = parse_instance_opaque_attr(item)
item_http_accept = opaque.get('http_accept')
# Raise an exception if the existing channel's method is equal to ours
# but only if they use different Accept headers.
if http_method:
if item.method == http_method:
if item_http_accept == http_accept:
self._raise_error(item.name, url_path, http_accept, http_method, soap_action, 'chk1')
# Similar, but from the Accept header's perspective
if item_http_accept == http_accept:
if item.method == http_method:
self._raise_error(item.name, url_path, http_accept, http_method, soap_action, 'chk2')
# ################################################################################################################################
def _set_sec_tls_ca_cert_id(self, item, input):
# This can be used by enmasse to simplify its configuration ..
tls_verify = input.pop('tls_verify', ZatoNotGiven)
# .. this can be used by both enmasse and any other client.
sec_tls_ca_cert_id = input.get('sec_tls_ca_cert_id')
# If we have a simplified value on input, it will take priority ..
if tls_verify is not ZatoNotGiven:
tls_verify = as_bool(tls_verify)
if tls_verify:
sec_tls_ca_cert_id = ZATO_DEFAULT
else:
sec_tls_ca_cert_id = ZATO_NONE
if sec_tls_ca_cert_id:
# Skip validation
if sec_tls_ca_cert_id == ZATO_NONE:
item.sec_tls_ca_cert_id = None
input['sec_tls_ca_cert_verify_strategy'] = False
# Use the default CA certs bundle
elif sec_tls_ca_cert_id == ZATO_DEFAULT:
item.sec_tls_ca_cert_id = None
input['sec_tls_ca_cert_verify_strategy'] = True
# A user-defined bundle
else:
item.sec_tls_ca_cert_id = sec_tls_ca_cert_id
input['sec_tls_ca_cert_verify_strategy'] = None
else:
item.sec_tls_ca_cert_id = None
input['sec_tls_ca_cert_verify_strategy'] = True # By default, verify using the built-in bundle
# ################################################################################################################################
def _preprocess_security_groups(self, input):
# This will contain only IDs
new_input_security_groups = []
# Security groups are optional
if input_security_groups := input.get('security_groups'):
# Get information about security groups which is need to turn group names into group IDs
existing_security_groups = self.invoke('zato.groups.get-list', group_type=Groups.Type.API_Clients)
for input_group in input_security_groups:
group_id = None
try:
input_group = int(input_group)
except ValueError:
for existing_group in existing_security_groups:
if input_group == existing_group['name']:
group_id = existing_group['id']
break
else:
raise Exception(f'Could not find ID for group `{input_group}`')
else:
group_id = input_group
finally:
if group_id:
new_input_security_groups.append(group_id)
# Return what we have to our caller
return new_input_security_groups
# ################################################################################################################################
def _get_service_from_input(self, session, input):
service = session.query(Service).\
filter(Cluster.id==input.cluster_id).\
filter(Service.cluster_id==Cluster.id)
if input.service:
service = service.filter(Service.name==input.service)
elif input.service_id:
service = service.filter(Service.id==input.service_id)
else:
raise Exception('Either service or service_id is required on input')
service = service.first()
if not service:
msg = 'Service `{}` does not exist in this cluster'.format(input.service)
self.logger.info(msg)
raise ServiceMissingException(msg)
else:
return service
# ################################################################################################################################
# ################################################################################################################################
class Create(_CreateEdit):
""" Creates a new HTTP/SOAP connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_http_soap_create_request'
response_elem = 'zato_http_soap_create_response'
input_required = 'name', 'url_path', 'connection'
input_optional = 'service', 'service_id', AsIs('security_id'), 'method', 'soap_action', 'soap_version', 'data_format', \
'host', 'ping_method', 'pool_size', Boolean('merge_url_params_req'), 'url_params_pri', 'params_pri', \
'serialization_type', 'timeout', AsIs('sec_tls_ca_cert_id'), Boolean('has_rbac'), 'content_type', \
'cache_id', Integer('cache_expiry'), 'content_encoding', Boolean('match_slash'), 'http_accept', \
List('service_whitelist'), 'is_rate_limit_active', 'rate_limit_type', 'rate_limit_def', \
Boolean('rate_limit_check_parent_def'), Boolean('sec_use_rbac'), 'hl7_version', 'json_path', \
'should_parse_on_input', 'should_validate', 'should_return_errors', 'data_encoding', \
'is_audit_log_sent_active', 'is_audit_log_received_active', \
Integer('max_len_messages_sent'), Integer('max_len_messages_received'), \
Integer('max_bytes_per_message_sent'), Integer('max_bytes_per_message_received'), \
'is_active', 'transport', 'is_internal', 'cluster_id', 'tls_verify', \
'is_wrapper', 'wrapper_type', 'username', 'password', AsIs('security_groups')
output_required = 'id', 'name'
output_optional = 'url_path'
def handle(self):
# For later use
skip_opaque = []
# If we have a rate limiting definition, let's check it upfront
DefinitionParser.check_definition_from_input(self.request.input)
input = self.request.input
input.sec_use_rbac = input.get('sec_use_rbac') or (input.security_id == ZATO_SEC_USE_RBAC)
input.security_id = input.security_id if input.security_id not in (ZATO_NONE, ZATO_SEC_USE_RBAC) else None
input.soap_action = input.soap_action if input.soap_action else ''
input.timeout = input.get('timeout') or MISC.DEFAULT_HTTP_TIMEOUT
input.security_groups = self._preprocess_security_groups(input)
input.is_active = input.get('is_active', True)
input.is_internal = input.get('is_internal', False)
input.transport = input.get('transport') or URL_TYPE.PLAIN_HTTP
input.cluster_id = input.get('cluster_id') or self.server.cluster_id
input.data_format = input.get('data_format') or ''
# For HL7
input.data_encoding = input.get('data_encoding') or 'utf-8'
input.hl7_version = input.get('hl7_version') or HL7.Const.Version.v2.id
# Remove extra whitespace
input_name = input.name
input_host = input.host
input_url_path = input.url_path
input_ping_method = input.get('ping_method')
input_content_type = input.get('content_type')
if input_name:
input.name = input_name.strip()
if input_host:
input.host = input_host.strip()
if input_url_path:
input.url_path = input_url_path.strip()
if input_ping_method:
input.ping_method = input_ping_method.strip() or DEFAULT_HTTP_PING_METHOD
if input_content_type:
input.content_type = input_content_type.strip()
if input.content_encoding and input.content_encoding != 'gzip':
raise Exception('Content encoding must be empty or equal to `gzip`')
with closing(self.odb.session()) as session:
existing_one = session.query(HTTPSOAP.id).\
filter(HTTPSOAP.cluster_id==input.cluster_id).\
filter(HTTPSOAP.name==input.name).\
filter(HTTPSOAP.connection==input.connection).\
filter(HTTPSOAP.transport==input.transport).\
first()
if existing_one:
raise Exception('An object of that name `{}` already exists in this cluster'.format(input.name))
if input.connection == CONNECTION.CHANNEL:
service = self._get_service_from_input(session, input)
else:
service = None
# Will raise exception if the security type doesn't match connection
# type and transport
sec_info = self._handle_security_info(session, input.security_id,
input.connection, input.transport)
# Make sure this combination of channel parameters does not exist already
if input.connection == CONNECTION.CHANNEL:
self.ensure_channel_is_unique(session,
input.url_path, input.http_accept, input.method, input.soap_action, input.cluster_id)
try:
item = self._new_zato_instance_with_cluster(HTTPSOAP)
item.connection = input.connection
item.transport = input.transport
item.is_internal = input.is_internal
item.name = input.name
item.is_active = input.is_active
item.host = input.host
item.url_path = input.url_path
item.method = input.method
item.soap_action = input.soap_action.strip()
item.soap_version = input.soap_version or None
item.data_format = input.data_format
item.service = service
item.ping_method = input.ping_method
item.pool_size = input.get('pool_size') or DEFAULT_HTTP_POOL_SIZE
item.merge_url_params_req = input.get('merge_url_params_req') or True
item.url_params_pri = input.get('url_params_pri') or URL_PARAMS_PRIORITY.DEFAULT
item.params_pri = input.get('params_pri') or PARAMS_PRIORITY.DEFAULT
item.serialization_type = input.get('serialization_type') or HTTP_SOAP_SERIALIZATION_TYPE.DEFAULT.id
item.timeout = input.timeout
item.has_rbac = input.get('has_rbac') or input.sec_use_rbac or False
item.content_type = input.content_type
item.sec_use_rbac = input.sec_use_rbac
item.cache_id = input.get('cache_id') or None
item.cache_expiry = input.get('cache_expiry') or 0
item.content_encoding = input.content_encoding
item.is_wrapper = bool(input.is_wrapper)
item.wrapper_type = input.wrapper_type
if input.username:
item.username = input.username
else:
skip_opaque.append('username')
if input.password:
item.password = input.password
else:
skip_opaque.append('password')
# Configure CA certs
self._set_sec_tls_ca_cert_id(item, input)
if input.security_id:
item.security = get_security_by_id(session, input.security_id)
else:
input.security_id = None # To ensure that SQLite does not reject ''
# Opaque attributes
set_instance_opaque_attrs(item, input, skip=skip_opaque)
session.add(item)
session.commit()
if input.connection == CONNECTION.CHANNEL:
input.impl_name = service.impl_name
input.service_id = service.id
input.service_name = service.name
cache = cache_by_id(session, input.cluster_id, item.cache_id) if item.cache_id else None
if cache:
input.cache_type = cache.cache_type
input.cache_name = cache.name
else:
input.cache_type = None
input.cache_name = None
if item.sec_tls_ca_cert_id:
self.add_tls_ca_cert(input, item.sec_tls_ca_cert_id)
input.id = item.id
input.update(sec_info)
if input.connection == CONNECTION.CHANNEL:
action = CHANNEL.HTTP_SOAP_CREATE_EDIT.value
else:
action = OUTGOING.HTTP_SOAP_CREATE_EDIT.value
self.notify_worker_threads(input, action)
self.response.payload.id = item.id
self.response.payload.name = item.name
self.response.payload.url_path = item.url_path
except Exception:
self.logger.error('Object could not be created, e:`%s', format_exc())
session.rollback()
raise
# ################################################################################################################################
class Edit(_CreateEdit):
""" Updates an HTTP/SOAP connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_http_soap_edit_request'
response_elem = 'zato_http_soap_edit_response'
input_required = 'id', 'name', 'url_path', 'connection'
input_optional = 'service', 'service_id', AsIs('security_id'), 'method', 'soap_action', 'soap_version', \
'data_format', 'host', 'ping_method', 'pool_size', Boolean('merge_url_params_req'), 'url_params_pri', \
'params_pri', 'serialization_type', 'timeout', AsIs('sec_tls_ca_cert_id'), Boolean('has_rbac'), 'content_type', \
'cache_id', Integer('cache_expiry'), 'content_encoding', Boolean('match_slash'), 'http_accept', \
List('service_whitelist'), 'is_rate_limit_active', 'rate_limit_type', 'rate_limit_def', \
Boolean('rate_limit_check_parent_def'), Boolean('sec_use_rbac'), 'hl7_version', 'json_path', \
'should_parse_on_input', 'should_validate', 'should_return_errors', 'data_encoding', \
'is_audit_log_sent_active', 'is_audit_log_received_active', \
Integer('max_len_messages_sent'), Integer('max_len_messages_received'), \
Integer('max_bytes_per_message_sent'), Integer('max_bytes_per_message_received'), \
'cluster_id', 'is_active', 'transport', 'tls_verify', \
'is_wrapper', 'wrapper_type', 'username', 'password', AsIs('security_groups')
output_optional = 'id', 'name'
def handle(self):
# For later use
skip_opaque = []
# If we have a rate limiting definition, let's check it upfront
DefinitionParser.check_definition_from_input(self.request.input)
input = self.request.input
input.sec_use_rbac = input.get('sec_use_rbac') or (input.security_id == ZATO_SEC_USE_RBAC)
input.security_id = input.security_id if input.security_id not in (ZATO_NONE, ZATO_SEC_USE_RBAC) else None
input.soap_action = input.soap_action if input.soap_action else ''
input.timeout = input.get('timeout') or MISC.DEFAULT_HTTP_TIMEOUT
input.security_groups = self._preprocess_security_groups(input)
input.is_active = input.get('is_active', True)
input.is_internal = input.get('is_internal', False)
input.transport = input.get('transport') or URL_TYPE.PLAIN_HTTP
input.cluster_id = input.get('cluster_id') or self.server.cluster_id
input.data_format = input.get('data_format') or ''
# For HL7
input.data_encoding = input.get('data_encoding') or 'utf-8'
input.hl7_version = input.get('hl7_version') or HL7.Const.Version.v2.id
# Remove extra whitespace
input_name = input.name
input_host = input.host
input_url_path = input.url_path
input_ping_method = input.get('ping_method')
input_content_type = input.get('content_type')
if input_name:
input.name = input_name.strip()
if input_host:
input.host = input_host.strip()
if input_url_path:
input.url_path = input_url_path.strip()
if input_ping_method:
input.ping_method = input_ping_method.strip() or DEFAULT_HTTP_PING_METHOD
if input_content_type:
input.content_type = input_content_type.strip()
if input.content_encoding and input.content_encoding != 'gzip':
raise Exception('Content encoding must be empty or equal to `gzip`')
with closing(self.odb.session()) as session:
existing_one = session.query(
HTTPSOAP.id,
HTTPSOAP.url_path,
).\
filter(HTTPSOAP.cluster_id==input.cluster_id).\
filter(HTTPSOAP.id!=input.id).\
filter(HTTPSOAP.name==input.name).\
filter(HTTPSOAP.connection==input.connection).\
filter(HTTPSOAP.transport==input.transport).\
first()
if existing_one:
if input.connection == CONNECTION.CHANNEL:
object_type = 'channel'
else:
object_type = 'connection'
msg = 'A {} of that name:`{}` already exists in this cluster; path: `{}` (id:{})'
raise Exception(msg.format(object_type, input.name, existing_one.url_path, existing_one.id))
if input.connection == CONNECTION.CHANNEL:
service = self._get_service_from_input(session, input)
else:
service = None
# Will raise exception if the security type doesn't match connection
# type and transport
sec_info = self._handle_security_info(session, input.security_id, input.connection, input.transport)
# TLS data comes in combinations, i.e. certain elements are required only if TLS keys/certs are used
self._validate_tls(input, sec_info)
try:
item = session.query(HTTPSOAP).filter_by(id=input.id).one()
opaque = parse_instance_opaque_attr(item)
old_name = item.name
old_url_path = item.url_path
old_soap_action = item.soap_action
old_http_method = item.method
old_http_accept = opaque.get('http_accept')
item.name = input.name
item.is_active = input.is_active
item.host = input.host
item.url_path = input.url_path
item.security_id = input.security_id or None # So that SQLite does not reject ''
item.connection = input.connection
item.transport = input.transport
item.cluster_id = input.cluster_id
item.method = input.method
item.soap_action = input.soap_action
item.soap_version = input.soap_version or None
item.data_format = input.data_format
item.service = service
item.ping_method = input.ping_method
item.pool_size = input.get('pool_size') or DEFAULT_HTTP_POOL_SIZE
item.merge_url_params_req = input.get('merge_url_params_req') or False
item.url_params_pri = input.get('url_params_pri') or URL_PARAMS_PRIORITY.DEFAULT
item.params_pri = input.get('params_pri') or PARAMS_PRIORITY.DEFAULT
item.serialization_type = input.get('serialization_type') or HTTP_SOAP_SERIALIZATION_TYPE.DEFAULT.id
item.timeout = input.get('timeout')
item.has_rbac = input.get('has_rbac') or input.sec_use_rbac or False
item.content_type = input.content_type
item.sec_use_rbac = input.sec_use_rbac
item.cache_id = input.get('cache_id') or None
item.cache_expiry = input.get('cache_expiry') or 0
item.content_encoding = input.content_encoding
item.is_wrapper = bool(input.is_wrapper)
item.wrapper_type = input.wrapper_type
if input.username:
item.username = input.username
else:
skip_opaque.append('username')
if input.password:
item.password = input.password
else:
skip_opaque.append('password')
# Configure CA certs
self._set_sec_tls_ca_cert_id(item, input)
# Opaque attributes
set_instance_opaque_attrs(item, input, skip=skip_opaque)
session.add(item)
session.commit()
if input.connection == CONNECTION.CHANNEL:
input.impl_name = service.impl_name
input.service_id = service.id
input.service_name = service.name
input.merge_url_params_req = item.merge_url_params_req
input.url_params_pri = item.url_params_pri
input.params_pri = item.params_pri
cache = cache_by_id(session, input.cluster_id, item.cache_id) if item.cache_id else None
if cache:
input.cache_type = cache.cache_type
input.cache_name = cache.name
else:
input.cache_type = None
input.cache_name = None
else:
input.ping_method = item.ping_method
input.pool_size = item.pool_size
input.is_internal = item.is_internal
input.old_name = old_name
input.old_url_path = old_url_path
input.old_soap_action = old_soap_action
input.old_http_method = old_http_method
input.old_http_accept = old_http_accept
input.update(sec_info)
if item.sec_tls_ca_cert_id and item.sec_tls_ca_cert_id != ZATO_NONE:
self.add_tls_ca_cert(input, item.sec_tls_ca_cert_id)
if input.connection == CONNECTION.CHANNEL:
action = CHANNEL.HTTP_SOAP_CREATE_EDIT.value
else:
action = OUTGOING.HTTP_SOAP_CREATE_EDIT.value
self.notify_worker_threads(input, action)
self.response.payload.id = item.id
self.response.payload.name = item.name
except Exception:
self.logger.error('Object could not be updated, e:`%s`', format_exc())
session.rollback()
raise
# ################################################################################################################################
class Delete(AdminService, _HTTPSOAPService):
""" Deletes an HTTP/SOAP connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_http_soap_delete_request'
response_elem = 'zato_http_soap_delete_response'
input_optional = 'id', 'name', 'connection', 'should_raise_if_missing'
output_optional = 'details'
def handle(self):
input = self.request.input
input_id = input.get('id')
name = input.get('name')
connection = input.get('connection')
has_expected_input = input_id or (name and connection)
if not has_expected_input:
raise Exception('Either ID or name/connection are required on input')
with closing(self.odb.session()) as session:
try:
query = session.query(HTTPSOAP)
if input_id:
query = query.\
filter(HTTPSOAP.id==input_id)
else:
query = query.\
filter(HTTPSOAP.name==name).\
filter(HTTPSOAP.connection==connection)
item = query.first()
# Optionally, raise an exception if such an object is missing
if not item:
if input.get('should_raise_if_missing', True):
raise BadRequest(self.cid, 'Could not find an object based on input -> `{}`'.format(input))
else:
self.response.payload.details = 'No such object'
return
opaque = parse_instance_opaque_attr(item)
old_name = item.name
old_transport = item.transport
old_url_path = item.url_path
old_soap_action = item.soap_action
old_http_method = item.method
old_http_accept = opaque.get('http_accept')
session.delete(item)
session.commit()
if item.connection == CONNECTION.CHANNEL:
action = CHANNEL.HTTP_SOAP_DELETE.value
else:
action = OUTGOING.HTTP_SOAP_DELETE.value
self.notify_worker_threads({
'id': self.request.input.id,
'name':old_name,
'transport':old_transport,
'old_url_path':old_url_path,
'old_soap_action':old_soap_action,
'old_http_method': old_http_method,
'old_http_accept': old_http_accept,
}, action)
self.response.payload.details = 'OK, deleted'
except Exception:
session.rollback()
self.logger.error('Object could not be deleted, e:`%s`', format_exc())
raise
# ################################################################################################################################
class Ping(AdminService):
""" Pings an HTTP/SOAP connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_http_soap_ping_request'
response_elem = 'zato_http_soap_ping_response'
input_required = 'id'
input_optional = 'ping_path'
output_required = 'id', 'is_success'
output_optional = 'info'
def handle(self):
with closing(self.odb.session()) as session:
item = session.query(HTTPSOAP).filter_by(id=self.request.input.id).one()
config_dict = getattr(self.outgoing, item.transport)
self.response.payload.id = self.request.input.id
try:
result = config_dict.get(item.name).ping(self.cid, ping_path=self.request.input.ping_path)
is_success = True
except Exception as e:
result = e.args[0]
is_success = False
finally:
self.response.payload.info = result
self.response.payload.is_success = is_success
# ################################################################################################################################
class ReloadWSDL(AdminService, _HTTPSOAPService):
""" Reloads WSDL by recreating the whole underlying queue of SOAP clients.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_http_soap_reload_wsdl_request'
response_elem = 'zato_http_soap_reload_wsdl_response'
input_required = ('id',)
def handle(self):
with closing(self.odb.session()) as session:
item = session.query(HTTPSOAP).filter_by(id=self.request.input.id).one()
sec_info = self._handle_security_info(session, item.security_id, item.connection, item.transport)
fields = to_json(item, True)['fields']
fields['sec_type'] = sec_info['sec_type']
fields['security_name'] = sec_info['security_name']
action = OUTGOING.HTTP_SOAP_CREATE_EDIT.value
self.notify_worker_threads(fields, action)
# ################################################################################################################################
class GetURLSecurity(AdminService):
""" Returns a JSON document describing the security configuration of all Zato channels.
"""
def handle(self):
response = {}
response['url_sec'] = sorted(self.worker_store.request_handler.security.url_sec.items())
response['plain_http_handler.http_soap'] = sorted(self.worker_store.request_handler.plain_http_handler.http_soap.items())
response['soap_handler.http_soap'] = sorted(self.worker_store.request_handler.soap_handler.http_soap.items())
self.response.payload = dumps(response, sort_keys=True, indent=4)
self.response.content_type = 'application/json'
# ################################################################################################################################
| 42,868
|
Python
|
.py
| 741
| 44.906883
| 130
| 0.545381
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,246
|
kv_data.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/kv_data.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from datetime import datetime
# Zato
from zato.common.odb.model import KVData
from zato.server.service import Service
# ################################################################################################################################
class AutoCleanUp(Service):
""" Cleans up expired keys in ODB (KVDB expires them up automatically so it's not needed here).
"""
def handle(self):
with closing(self.odb.session()) as session:
len_deleted = session.query(KVData).\
filter(KVData.expiry_time <= datetime.utcnow()).\
delete()
session.commit()
if len_deleted:
suffix = 's' if len_deleted > 1 else ''
self.logger.info('Deleted %i expired KV key%s from ODB', len_deleted, suffix)
# ################################################################################################################################
| 1,223
|
Python
|
.py
| 26
| 41.076923
| 130
| 0.51936
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,247
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
from contextlib import closing
from copy import deepcopy
from json import loads
from traceback import format_exc
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import basestring
# Zato
from zato.common.api import SECRET_SHADOW, ZATO_NONE
from zato.common.broker_message import MESSAGE_TYPE
from zato.common.odb.model import Cluster
from zato.common.util.api import get_response_value, replace_private_key
from zato.common.util.sql import search as sql_search
from zato.server.service import AsIs, Bool, Int, Service
# ################################################################################################################################
if 0:
from zato.common.typing_ import anylist
# ################################################################################################################################
logger = logging.getLogger('zato_admin')
# ################################################################################################################################
success_code = 0
success = '<error_code>{}</error_code>'.format(success_code)
# ################################################################################################################################
class SearchTool:
""" Optionally attached to each internal service returning a list of results responsible for extraction
and serialization of search criteria.
"""
_search_attrs = 'num_pages', 'cur_page', 'prev_page', 'next_page', 'has_prev_page', 'has_next_page', 'page_size', 'total'
def __init__(self, *criteria):
self.criteria = criteria
self.output_meta = {'search':{}}
def __nonzero__(self):
return self.output_meta['search'].get('num_pages')
def set_output_meta(self, result):
self.output_meta['search'].update(result.to_dict())
# ################################################################################################################################
class AdminSIO:
pass
# ################################################################################################################################
class GetListAdminSIO:
input_optional = (Int('cur_page'), Bool('paginate'), 'query')
# ################################################################################################################################
class AdminService(Service):
""" A Zato admin service, part of the Zato public API.
"""
output_optional = ('_meta',)
class SimpleIO(AdminSIO):
""" This empty definition is needed in case the service should be invoked through REST.
"""
def __init__(self):
super(AdminService, self).__init__()
# ################################################################################################################################
def _init(self, is_http):
if self._filter_by:
self._search_tool = SearchTool(self._filter_by)
self.ipc_api = self.server.ipc_api
super(AdminService, self)._init(is_http)
# ################################################################################################################################
def before_handle(self):
# Do not log BASE64-encoded messages
if self.name == 'zato.service.invoke':
return
if self.server.is_admin_enabled_for_info:
# Zato
from zato.server.connection.web_socket import WebSocket
# Prefer that first because it may be a generic connection
# in which case we want to access its opaque attributes
# that are not available through self.request.input.
try:
data = self.request.raw_request
if not isinstance(data, dict):
data = loads(data)
except Exception:
data = self.request.input
finally:
to_copy = {}
for k, v in data.items():
if isinstance(v, WebSocket):
v = 'WebSocket id:{}'.format(hex(id(v)))
to_copy[k] = v
data = deepcopy(to_copy)
for k, v in data.items():
v = replace_private_key(v)
if 'password' in k:
data[k] = SECRET_SHADOW
logger.info('Request; service:`%s`, data:`%s` cid:`%s`, ', self.name, data, self.cid)
# ################################################################################################################################
def handle(self, *args, **kwargs):
raise NotImplementedError('Should be overridden by subclasses (AdminService.handle -> {})'.format(self.name))
# ################################################################################################################################
def _new_zato_instance_with_cluster(self, instance_class, cluster_id=None, **kwargs):
if not cluster_id:
cluster_id = self.request.input.get('cluster_id')
cluster_id = cluster_id or self.server.cluster_id
with closing(self.odb.session()) as session:
cluster_id = cluster_id or self.request.input.cluster_id
cluster = session.query(Cluster).\
filter(Cluster.id==cluster_id).\
one()
return instance_class(cluster=cluster, **kwargs)
# ################################################################################################################################
def after_handle(self):
# Do not log BASE64-encoded messages
if self.name == 'zato.service.invoke':
return
if self.server.is_admin_enabled_for_info:
logger.info('Response; service:`%s`, data:`%s` cid:`%s`, ',
self.name, replace_private_key(get_response_value(self.response)), self.cid)
payload = self.response.payload
is_text = isinstance(payload, basestring)
needs_meta = self.request.input.get('needs_meta', True)
if needs_meta and hasattr(self, '_search_tool'):
if not is_text:
payload.zato_meta = self._search_tool.output_meta
# ################################################################################################################################
def get_data(self, *args, **kwargs):
raise NotImplementedError('Should be overridden by subclasses (AdminService.get_data)')
# ################################################################################################################################
def _search(self, search_func, session=None, cluster_id=None, *args, **kwargs) -> 'anylist':
""" Adds search criteria to an SQLAlchemy query based on the service's (self) search configuration.
"""
# Should we break the results into individual pages
needs_pagination = self.request.input.get('paginate')
if needs_pagination:
result = sql_search(search_func, self.request.input, self._filter_by, session, cluster_id, *args, **kwargs)
self._search_tool.set_output_meta(result)
else:
# No pagination requested at all
result = search_func(session, cluster_id, *args)
return result
# ################################################################################################################################
class Ping(AdminService):
""" A ping service, useful for API testing.
"""
class SimpleIO(AdminSIO):
output_required = ('pong',)
response_elem = 'zato_ping_response'
def handle(self):
self.response.payload.pong = 'zato'
def after_handle(self):
""" A no-op method because zato.ping can be used in benchmarks and the parent's .before/after_handle
would constitute about 10-15% of the overhead each. With typical admin services it is fine because
they are rarely used but in benchmarking, this is unnecessary and misleading seeing as they do things
that user-defined services don't do.
"""
before_handle = after_handle
# ################################################################################################################################
class PubPing(Ping):
""" Just like zato.ping but available by default in web-admin (because of its prefix).
"""
name = 'pub.zato.ping'
# ################################################################################################################################
class Ping2(Ping):
""" Works exactly the same as zato.ping, added to have another service for API testing.
"""
class SimpleIO(Ping.SimpleIO):
response_elem = 'zato_ping2_response'
# ################################################################################################################################
class ChangePasswordBase(AdminService):
""" A base class for handling the changing of any of the ODB passwords.
"""
# Subclasses may wish to set it to False to special-case what they need to deal with
password_required = True
class SimpleIO(AdminSIO):
input_required = 'password1', 'password2'
input_optional = Int('id'), 'name', 'type_'
output_required = AsIs('id')
def _handle(self, class_, auth_func, action, name_func=None, instance_id=None, msg_type=MESSAGE_TYPE.TO_PARALLEL_ALL,
*args, **kwargs):
instance_id = instance_id or self.request.input.get('id')
instance_name = self.request.input.name
with closing(self.odb.session()) as session:
password1 = self.request.input.get('password1', '')
password2 = self.request.input.get('password2', '')
password1_decrypted = self.server.decrypt(password1) if password1 else password1
password2_decrypted = self.server.decrypt(password2) if password2 else password2
try:
if self.password_required:
if not password1_decrypted:
raise Exception('Password must not be empty')
if not password2_decrypted:
raise Exception('Password must be repeated')
if password1_decrypted != password2_decrypted:
raise Exception('Passwords need to be the same')
# Construct a basic query ..
query = session.query(class_)
# .. look up by ID if it is given ..
if instance_id:
query = query.filter(class_.id==instance_id)
# .. try to use the name if ID is not available ..
elif instance_name:
query = query.filter(class_.name==instance_name)
# .. otherwise, we do not know how to find the instance -> raise an exception.
else:
raise Exception('Either ID or name are required on input')
# If we are here, it means that we can find the instance.
instance = query.first()
if not instance:
raise Exception('Could not find instance with id:`{}` and name:`{}` ({})'.format(
instance_id, instance_name, class_))
auth_func(instance, password1_decrypted)
session.add(instance)
session.commit()
if msg_type:
name = name_func(instance) if name_func else instance.name
self.request.input.id = instance_id
self.request.input.action = action
self.request.input.name = name
self.request.input.password = password1_decrypted
self.request.input.salt = kwargs.get('salt')
# Always return ID of the object whose password we changed
self.response.payload.id = instance_id
for attr in kwargs.get('publish_instance_attrs', []):
self.request.input[attr] = getattr(instance, attr, ZATO_NONE)
self.broker_client.publish(self.request.input)
except Exception:
self.logger.error('Could not update password, e:`%s`', format_exc())
session.rollback()
raise
# ################################################################################################################################
| 12,645
|
Python
|
.py
| 226
| 45.929204
| 130
| 0.492295
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,248
|
scheduler.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/scheduler.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from traceback import format_exc
# ciso8601
try:
from zato.common.util.api import parse_datetime
except ImportError:
from dateutil.parser import parse as parse_datetime
# crontab
from crontab import CronTab
# Zato
from zato.common.api import scheduler_date_time_format, SCHEDULER, ZATO_NONE
from zato.common.broker_message import SCHEDULER as SCHEDULER_MSG
from zato.common.exception import ServiceMissingException, ZatoException
from zato.common.odb.model import Cluster, Job, CronStyleJob, IntervalBasedJob, Service as ODBService
from zato.common.odb.query import job_by_id, job_by_name, job_list
from zato.common.util.config import get_config_object, parse_url_address, update_config_file
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO, Service
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.ext.configobj_ import ConfigObj
# ################################################################################################################################
# ################################################################################################################################
_service_name_prefix = 'zato.scheduler.job.'
# ################################################################################################################################
# ################################################################################################################################
def _create_edit(action, cid, input, payload, logger, session, broker_client, response, should_ignore_existing):
""" Creating and updating a job requires a series of very similar steps
so they've been all put here and depending on the 'action' parameter
(be it 'create'/'edit') some additional operations are performed.
"""
job_type = input.job_type
cluster_id = input.cluster_id
name = input.name
service_name = input.service
cluster = session.query(Cluster).\
filter(Cluster.id==cluster_id).\
one()
if job_type not in(SCHEDULER.JOB_TYPE.ONE_TIME, SCHEDULER.JOB_TYPE.INTERVAL_BASED, SCHEDULER.JOB_TYPE.CRON_STYLE):
msg = 'Unrecognized job type [{0}]'.format(job_type)
logger.error(msg)
raise ZatoException(cid, msg)
# For finding out if we don't have a job of that name already defined.
existing_one_base = session.query(Job).\
filter(Cluster.id==cluster_id).\
filter(Job.name==name)
if action == 'create':
existing_one = existing_one_base.\
first()
else:
job_id = input.id
existing_one = existing_one_base.\
filter(Job.id != job_id).\
first()
if existing_one:
if should_ignore_existing:
return
else:
raise ZatoException(cid, 'Job `{}` already exists on this cluster'.format(name))
# Is the service's name correct?
service = session.query(ODBService).\
filter(Cluster.id==cluster_id).\
filter(ODBService.cluster_id==Cluster.id).\
filter(ODBService.name==service_name).\
first()
if not service:
msg = 'ODBService `{}` does not exist in this cluster'.format(service_name)
logger.info(msg)
raise ServiceMissingException(cid, msg)
# We can create/edit a base Job object now and - optionally - another one
# if the job type's is either interval-based or Cron-style. The base
# instance will be enough if it's a one-time job.
extra = (input.extra or u'').encode('utf-8')
is_active = input.is_active
start_date = parse_datetime(input.start_date)
if action == 'create':
job = Job(None, name, is_active, job_type, start_date, extra, cluster=cluster, service=service)
else:
job = session.query(Job).filter_by(id=job_id).one()
old_name = job.name
job.name = name
job.is_active = is_active
job.start_date = start_date
job.service = service
job.extra = extra
try:
# Add but don't commit yet.
session.add(job)
if job_type == SCHEDULER.JOB_TYPE.INTERVAL_BASED:
ib_params = ('weeks', 'days', 'hours', 'minutes', 'seconds')
if not any(input[key] for key in ib_params):
msg = "At least one of ['weeks', 'days', 'hours', 'minutes', 'seconds'] must be given"
logger.error(msg)
raise ZatoException(cid, msg)
if action == 'create':
ib_job = IntervalBasedJob(None, job)
else:
ib_job = session.query(IntervalBasedJob).filter_by(id=job.interval_based.id).one()
for param in ib_params + ('repeats',):
value = input[param] or None
if value != ZATO_NONE:
setattr(ib_job, param, value)
value = input['repeats'] or None
if value != ZATO_NONE:
ib_job.repeats = value
session.add(ib_job)
elif job_type == SCHEDULER.JOB_TYPE.CRON_STYLE:
cron_definition = input.cron_definition.strip()
# Just to make sure it's syntactically correct
CronTab(cron_definition).next(default_utc=False)
if action == 'create':
cs_job = CronStyleJob(None, job)
else:
cs_job = session.query(CronStyleJob).filter_by(id=job.cron_style.id).one()
cs_job.cron_definition = cron_definition
session.add(cs_job)
# We can commit it all now.
session.commit()
# Now send it to the broker, but only if the job is active.
# if is_active:
msg_action = SCHEDULER_MSG.CREATE.value if action == 'create' else SCHEDULER_MSG.EDIT.value
msg = {'action': msg_action, 'job_type': job_type,
'is_active':is_active, 'start_date':start_date.isoformat(),
'extra':extra.decode('utf8'), 'service': service.name,
'id':job.id, 'name': name
}
if action == 'edit':
msg['old_name'] = old_name
if job_type == SCHEDULER.JOB_TYPE.INTERVAL_BASED:
for param in ib_params + ('repeats',):
value = input[param]
msg[param] = int(value) if value else 0
elif job_type == SCHEDULER.JOB_TYPE.CRON_STYLE:
msg['cron_definition'] = cron_definition
broker_client.publish(msg)
except Exception:
session.rollback()
logger.error('Could not complete the request, e:`%s`', format_exc())
raise
else:
response.payload.id = job.id
response.payload.name = input.name
if job_type == SCHEDULER.JOB_TYPE.CRON_STYLE:
# Needs to be returned because we might've been performing
# a substitution like changing '@hourly' into '0 * * * *'.
response.payload.cron_definition = cs_job.cron_definition
# ################################################################################################################################
# ################################################################################################################################
class _CreateEdit(AdminService):
""" A base class for both creating and editing scheduler jobs.
"""
class SimpleIO(AdminSIO):
input_required = 'cluster_id', 'name', 'is_active', 'job_type', 'service', 'start_date'
input_optional = 'id', 'extra', 'weeks', 'days', 'hours', 'minutes', 'seconds', 'repeats', \
'cron_definition', 'should_ignore_existing'
output_optional = 'id', 'name', 'cron_definition'
default_value = ''
def handle(self):
with closing(self.odb.session()) as session:
_create_edit(self.__class__.__name__.lower(), self.cid, self.request.input, self.request.payload,
self.logger, session, self.broker_client, self.response,
self.request.input.should_ignore_existing)
# ################################################################################################################################
# ################################################################################################################################
class _Get(AdminService):
class SimpleIO(AdminSIO):
input_required = ('cluster_id',)
output_required = 'id', 'name', 'is_active', 'job_type', 'start_date', 'service_id', 'service_name'
output_optional = 'extra', 'weeks', 'days', 'hours', 'minutes', 'seconds', 'repeats', 'cron_definition'
output_repeated = True
default_value = ''
date_time_format = scheduler_date_time_format
# ################################################################################################################################
# ################################################################################################################################
class GetList(_Get):
""" Returns a list of all jobs defined in the scheduler.
"""
_filter_by = Job.name,
name = _service_name_prefix + 'get-list'
class SimpleIO(_Get.SimpleIO):
request_elem = 'zato_scheduler_job_get_list_request'
response_elem = 'zato_scheduler_job_get_list_response'
input_optional = GetListAdminSIO.input_optional + ('service_name',)
def get_data(self, session):
input = self.request.input
return self._search(job_list, session, input.cluster_id, input.get('service_name'), False)
def handle(self):
with closing(self.odb.session()) as session:
data = self.get_data(session)
self.response.payload[:] = data
for item in self.response.payload:
item.start_date = item.start_date.isoformat()
# ################################################################################################################################
# ################################################################################################################################
class GetByID(_Get):
""" Returns a job by its ID.
"""
name = _service_name_prefix + 'get-by-id'
class SimpleIO(_Get.SimpleIO):
request_elem = 'zato_scheduler_job_get_by_id_request'
response_elem = None
input_required = _Get.SimpleIO.input_required + ('id',)
output_repeated = False
def get_data(self, session):
return job_by_id(session, self.server.cluster_id, self.request.input.id)
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload = self.get_data(session)
self.response.payload.start_date = self.response.payload.start_date.isoformat()
# ################################################################################################################################
# ################################################################################################################################
class GetByName(_Get):
""" Returns a job by its name.
"""
name = _service_name_prefix + 'get-by-name'
class SimpleIO(_Get.SimpleIO):
request_elem = 'zato_scheduler_job_get_by_name_request'
response_elem = None
input_required = _Get.SimpleIO.input_required + ('name',)
output_repeated = False
def get_data(self, session):
return job_by_name(session, self.server.cluster_id, self.request.input.name)
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload = self.get_data(session)
self.response.payload.start_date = self.response.payload.start_date.isoformat()
# ################################################################################################################################
# ################################################################################################################################
class Create(_CreateEdit):
""" Creates a new scheduler's job.
"""
name = _service_name_prefix + 'create'
class SimpleIO(_CreateEdit.SimpleIO):
request_elem = 'zato_scheduler_job_create_request'
response_elem = 'zato_scheduler_job_create_response'
# ################################################################################################################################
# ################################################################################################################################
class Edit(_CreateEdit):
""" Updates a scheduler's job.
"""
name = _service_name_prefix + 'edit'
class SimpleIO(_CreateEdit.SimpleIO):
request_elem = 'zato_scheduler_job_edit_request'
response_elem = 'zato_scheduler_job_edit_response'
# ################################################################################################################################
# ################################################################################################################################
class Delete(AdminService):
""" Deletes a scheduler's job.
"""
name = _service_name_prefix + 'delete'
class SimpleIO(AdminSIO):
request_elem = 'zato_scheduler_job_delete_request'
response_elem = 'zato_scheduler_job_delete_response'
input_required = ('id',)
def handle(self):
with closing(self.odb.session()) as session:
try:
job = session.query(Job).\
filter(Job.id==self.request.input.id).\
one()
session.delete(job)
session.commit()
msg = {'action': SCHEDULER_MSG.DELETE.value, 'name': job.name}
self.broker_client.publish(msg)
except Exception:
session.rollback()
self.logger.error('Could not delete the job, e:`%s`', format_exc())
raise
# ################################################################################################################################
# ################################################################################################################################
class Execute(AdminService):
""" Executes a scheduler's job.
"""
name = _service_name_prefix + 'execute'
class SimpleIO(AdminSIO):
request_elem = 'zato_scheduler_job_execute_request'
response_elem = 'zato_scheduler_job_execute_response'
input_required = ('id',)
def handle(self):
with closing(self.odb.session()) as session:
try:
job = session.query(Job).\
filter(Job.id==self.request.input.id).\
one()
msg = {'action': SCHEDULER_MSG.EXECUTE.value, 'name': job.name}
self.broker_client.publish(msg)
except Exception:
self.logger.error('Could not execute the job, e:`%s`', format_exc())
session.rollback()
raise
# ################################################################################################################################
# ################################################################################################################################
class SetActiveStatus(AdminService):
""" Actives or deactivates a job.
"""
name = _service_name_prefix + 'set-active-status'
class SimpleIO(AdminSIO):
request_elem = 'zato_scheduler_job_set_active_status_request'
response_elem = 'zato_scheduler_job_set_active_status_response'
input_required = ('id', 'is_active')
def handle(self):
with closing(self.odb.session()) as session:
try:
session.query(Job).\
filter(Job.id==self.request.input.id).\
one().is_active = self.request.input.is_active
session.commit()
except Exception:
session.rollback()
self.logger.error('Could not update is_active status, e:`%s`', format_exc())
raise
# ################################################################################################################################
# ################################################################################################################################
class _SetAddressBase(Service):
input = 'address'
output = 'msg'
address_component_type = None
def _handle(self, address:'str') -> 'None':
raise NotImplementedError()
def handle(self) -> 'None':
address = (self.request.input.address or '').strip()
self._handle(address)
self.response.payload.msg = f'OK, {self.address_component_type} address set to {self.request.input.address}'
# ################################################################################################################################
# ################################################################################################################################
class SetServerAddress(_SetAddressBase):
""" Tells the scheduler what the new address of a server it can invoke is.
"""
name = 'pub.zato.scheduler.set-server-address'
address_component_type = 'server'
def _handle(self, address:'str') -> 'None':
self.broker_client.publish({
'action': SCHEDULER_MSG.SET_SERVER_ADDRESS.value,
'address': address
})
# ################################################################################################################################
# ################################################################################################################################
class SetSchedulerAddressImpl(_SetAddressBase):
""" Per server-service that tells the server what the new address of a scheduler it can invoke is.
"""
address_component_type = 'server (impl)'
def _handle(self, address:'str') -> 'None':
# Extract information about the address we are to use ..
url = parse_url_address(address, SCHEDULER.DefaultPort)
# First, save the information to disk ..
with self.lock():
# .. extract the stanza that we need ..
config:'ConfigObj' = get_config_object(self.server.repo_location, 'server.conf') # type: ignore
# .. update its contents ..
config['scheduler']['scheduler_host'] = url.host # type: ignore
config['scheduler']['scheduler_port'] = url.port # type: ignore
config['scheduler']['scheduler_use_tls'] = url.use_tls # type: ignore
# .. we can save it back to disk ..
update_config_file(config, self.server.repo_location, 'server.conf')
# .. now, set the new address in RAM.
self.server.set_scheduler_address(address)
# ################################################################################################################################
# ################################################################################################################################
class SetSchedulerAddress(_SetAddressBase):
""" Tells all servers what the new address of a scheduler they can invoke is.
"""
name = 'pub.zato.scheduler.set-scheduler-address'
address_component_type = 'scheduler'
def _handle(self, address:'str') -> 'None':
self.broker_client.publish({
'action': SCHEDULER_MSG.SET_SCHEDULER_ADDRESS.value,
'address': address,
})
# ################################################################################################################################
# ################################################################################################################################
| 20,221
|
Python
|
.py
| 376
| 45.521277
| 130
| 0.477706
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,249
|
audit_log.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/audit_log.py
|
# -*- coding: utf-8 -*-
# stdlib
from operator import itemgetter
# Zato
from zato.server.service import AsIs, Int
from zato.server.service.internal import AdminService
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.audit_log import LogContainer
LogContainer = LogContainer
# ################################################################################################################################
# ################################################################################################################################
class GetEventList(AdminService):
name = 'zato.audit-log.event.get-list'
class SimpleIO:
input_optional = 'cluster_id', 'type_', AsIs('object_id')
output_optional = 'server_name', 'server_pid', 'type_', AsIs('object_id'), 'direction', 'timestamp', \
AsIs('msg_id'), AsIs('event_id'), AsIs('conn_id'), 'in_reply_to', 'data', Int('data_len')
# ################################################################################################################################
def handle(self):
type_ = self.request.input.type_
object_id = self.request.input.object_id
result = self.server.audit_log.get_container(type_, object_id) # type: LogContainer
if result:
result = result.to_dict() # type: dict
out = []
for value in result.values(): # type: (str, list)
for item in value: # type: dict
item['server_name'] = self.server.name
item['server_pid'] = self.server.pid
item['data_len'] = len(item['data']) if item['data'] is not None else 0
out.append(item)
out.sort(key=itemgetter('timestamp'), reverse=True)
self.response.payload[:] = out
# ################################################################################################################################
# ################################################################################################################################
| 2,327
|
Python
|
.py
| 37
| 55.081081
| 130
| 0.362357
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,250
|
updates.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/updates.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from errno import ENETUNREACH
from http.client import OK
from traceback import format_exc
# Bunch
from bunch import bunchify
# gevent
from gevent import sleep, spawn
# Requests
from requests import get as requests_get
from requests.exceptions import ConnectionError
# Zato
from zato.common.version import get_version
from zato.common.json_internal import loads
from zato.server.service import Service
# ################################################################################################################################
# Current Zato version
version = get_version()
# If a version has this prefix it means someone is running from source code
source_prefix = 'pre'
cache_key_major = 'zato.updates.major.last-notified'
cache_key_minor = 'zato.updates.minor.last-notified'
# In seconds
day = 24 * 60 * 60
# We notify users of major releases half a year = 180 days
delta_major_days = 180
delta_major = day * delta_major_days
# We notify users of minor releases bimonthly = 60 days
delta_minor_days = 60
delta_minor = day * delta_minor_days
# Download and info URLs
url_info = 'https://zato.io/support/updates/info-{}.json'
# ################################################################################################################################
class CheckUpdates(Service):
""" Checks if there are any minor or major Zato updates and notifies in server.log
if there are any, if told to and it is time to do so.
"""
# ################################################################################################################################
def handle(self):
# Run in a new greenlet in case we are invoked externally, e.g. over HTTP
_ = spawn(self._serve_forerver)
# ################################################################################################################################
def _serve_forerver(self):
try:
_version = version.replace('Zato ', '')
major = _version[:3]
minor = _version[:5]
# Each major version has its own endpoint
while True:
# Check if there are updates and notify if needed
try:
self._check_notify(url_info, major, minor, _version)
except Exception:
pass # Ignore any and all errors, e.g. due to the lack of Internet connectivity
# We can sleep for 1 day and then check again
sleep(day)
except Exception:
self.logger.warning(format_exc())
# ################################################################################################################################
def _get_current(self, _url_info, self_major, self_version):
try:
response = requests_get(_url_info.format(self_major), params={'v':self_version})
except ConnectionError as e:
# We ignore ENETUNREACH because it simply means that we could not connect to the server,
# which is fine, e.g. no Internet connectivity is allowed in that system.
if e.errno != ENETUNREACH:
raise
else:
if response.status_code == OK:
return bunchify(loads(response.text))
# ################################################################################################################################
def _check_notify(self, _url_info, self_major, self_minor, self_version, delta_major=delta_major, delta_minor=delta_minor):
_ = self._get_current(_url_info, self_major, self_version)
# ################################################################################################################################
| 3,894
|
Python
|
.py
| 79
| 43.126582
| 130
| 0.509648
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,251
|
helpers.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/helpers.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from dataclasses import dataclass
from datetime import datetime
from io import StringIO
from json import dumps, loads
from logging import DEBUG, getLogger
from tempfile import gettempdir
from time import sleep
from traceback import format_exc
from unittest import TestCase
# Zato
from zato.common.api import WEB_SOCKET
from zato.common.exception import Forbidden
from zato.common.pubsub import PUBSUB
from zato.common.test import rand_csv, rand_string
from zato.common.typing_ import cast_, intnone, list_, optional
from zato.common.util.open_ import open_rw, open_w
from zato.server.commands import CommandResult, Config
from zato.server.connection.facade import RESTInvoker
from zato.server.service import AsIs, Model, PubSubHook, Service
from zato.server.service.internal.service import Invoke
# ################################################################################################################################
# ################################################################################################################################
if 0:
from requests import Response
from zato.common.pubsub import PubSubMessage
from zato.common.typing_ import any_, anydict, anytuple
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
default_services_allowed = (
'zato.pubsub.pubapi.publish-message',
'zato.pubsub.pubapi.subscribe-wsx',
'zato.pubsub.pubapi.unsubscribe',
'zato.pubsub.resume-wsx-subscription',
'zato.pubsub.subscription.create-wsx-subscription',
'zato.ping'
)
# This is an indication to the WSX serialization layer
# that a response was produced by our gateway service.
wsx_gateway_response_elem = WEB_SOCKET.GatewayResponseElem
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class User(Model):
user_id: int
username: str
display_name: optional[str]
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class UserAccount(Model):
user: User
account_id: int
account_type: intnone
# ################################################################################################################################
# ################################################################################################################################
@dataclass
class GetUserRequest(Model):
username: str
# ################################################################################################################################
# ################################################################################################################################
@dataclass
class GetUserAccountListRequest(Model):
user_id: optional[int]
account_id: int
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class GetUserAccountListResponse(Model):
user_account_list: list_[UserAccount]
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class GetUserResponse(Model):
user: list_[User]
parent_user: list_[optional[User]]
previous_user: optional[list_[User]]
# ################################################################################################################################
# ################################################################################################################################
class Echo(Service):
""" Copies request over to response.
"""
def handle(self) -> 'None':
self.response.payload = self.request.raw_request
# ################################################################################################################################
# ################################################################################################################################
class InputLogger(Service):
""" Writes out all input data to server logs.
"""
def handle(self):
pass
def finalize_handle(self) -> 'None': # type: ignore
_ = self.log_input()
# ################################################################################################################################
# ################################################################################################################################
class PubInputLogger(InputLogger):
""" Same as InputLogger but has a publicly available name
"""
name = 'pub.helpers.input-logger'
# ################################################################################################################################
# ################################################################################################################################
class RawRequestLogger(Service):
""" Writes out self.request.raw_request to server logs.
"""
name = 'pub.helpers.raw-request-logger'
def handle(self) -> 'None':
self.logger.info('Received request: `%s`', self.request.raw_request)
# ################################################################################################################################
# ################################################################################################################################
class IBMMQLogger(Service):
""" Writes out self.request.raw_request to server logs.
"""
name = 'pub.helpers.ibm-mq-logger'
def handle(self) -> 'None':
template = """
***********************
IBM MQ message received
***********************
MsgId: `{msg_id}`
CorrelId: `{correlation_id}`
Timestamp: `{timestamp}`
PutDate: `{put_date}`
PutTime: `{put_time}`
ReplyTo: `{reply_to}`
MQMD: `{mqmd!r}`
-----------------------
Data: `{data}`
***********************
"""
mq = self.request.ibm_mq
na = 'n/a'
try:
msg_id = mq.msg_id.decode('ascii') # type: ignore
except UnicodeDecodeError:
msg_id = repr(mq.msg_id)
if mq.correlation_id:
try:
correlation_id = mq.correlation_id.decode('ascii') # type: ignore
except UnicodeDecodeError:
correlation_id = repr(mq.correlation_id)
else:
correlation_id = na
info = {
'msg_id': msg_id,
'correlation_id': correlation_id,
'timestamp': mq.timestamp,
'put_date': mq.put_date,
'put_time': mq.put_time,
'reply_to': mq.reply_to or na,
'mqmd': str(mq.mqmd).splitlines(),
'data': self.request.raw_request,
}
msg = template.format(**info)
msg_out = msg.encode('utf8')
self.logger.info(msg_out)
# ################################################################################################################################
# ################################################################################################################################
class JSONRawRequestLogger(RawRequestLogger):
""" Same as RawRequestLogger but returns a JSON response.
"""
def handle(self) -> 'None':
super(JSONRawRequestLogger, self).handle()
self.response.payload = {'status': 'OK'}
# ################################################################################################################################
# ################################################################################################################################
class SIOInputLogger(Service):
""" Writes out all SIO input parameters to server logs.
"""
def handle(self) -> 'None':
self.logger.info('%r', self.request.input)
# ################################################################################################################################
# ################################################################################################################################
class HTMLService(Service):
def before_handle(self) -> 'None': # type: ignore
# Configure Django if this service is used - not that we are not doing it
# globally for the module because the configuration takes some milliseconds
# the first time around (but later on it is not significant).
# Django
import django
from django.conf import settings
# Configure Django settings when the module is picked up
if not settings.configured:
settings.configure()
django.setup()
def set_html_payload(self, ctx:'any_', template:'str', content_type:'str'='text/html; charset=utf-8') -> 'None':
# Django
from django.template import Context, Template
# Generate HTML and return response
c = Context(ctx)
t = Template(template)
payload = t.render(c).encode('utf-8')
self.logger.debug('Ctx:[%s]', ctx)
self.logger.debug('Payload:[%s]', payload)
if self.logger.isEnabledFor(DEBUG):
buff = StringIO()
self.logger.debug(buff.getvalue())
buff.close()
self.response.payload = payload
self.response.content_type = content_type
# ################################################################################################################################
# ################################################################################################################################
class TLSLogger(Service):
""" Logs details of client TLS certificates.
"""
def handle(self) -> 'None':
has_tls = False
for k, v in sorted(self.wsgi_environ.items()):
if k.startswith('HTTP_X_ZATO_TLS_'):
has_tls = True
self.logger.info('%r: %r', k, v)
if not has_tls:
self.logger.warning('No HTTP_X_ZATO_TLS_* headers found')
# ################################################################################################################################
# ################################################################################################################################
class WebSocketsGateway(Service):
""" Dispatches incoming WebSocket requests to target services.
"""
name = 'helpers.web-sockets-gateway'
services_allowed = []
class SimpleIO:
input_required = 'service'
input_optional = AsIs('request')
output_optional = 'sub_key', AsIs(wsx_gateway_response_elem)
skip_empty_keys = True
def handle(self, _default_allowed:'anytuple'=default_services_allowed) -> 'None':
# Local aliases
input = self.request.input
service = input.service
# Make sure that the service can be invoked if we have one on input
if service:
# These services can be always invoked
is_allowed_by_default = service in _default_allowed
# Our subclasses may add more services that they allow
is_allowed_by_self_service = service in self.services_allowed
# Ensure that the input service is allowed
if not (is_allowed_by_default or is_allowed_by_self_service):
self.logger.warning('Service `%s` is not among %s', service, self.services_allowed) # noqa: E117
raise Forbidden(self.cid)
# We need to special-pub/sub subscriptions
# because they will require calling self.pubsub on behalf of the current WSX connection.
if service == 'zato.pubsub.pubapi.subscribe-wsx':
topic_name = input.request['topic_name']
unsub_on_wsx_close = input.request.get('unsub_on_wsx_close', True)
sub_key = self.pubsub.subscribe(
topic_name,
use_current_wsx=True,
unsub_on_wsx_close=unsub_on_wsx_close,
service=self,
cid=input.cid,
)
self.response.payload.sub_key = sub_key
else:
# The service that we are invoking may be interested in what the original, i.e. ours, channel was.
self.wsgi_environ['zato.orig_channel'] = self.channel
# Invoke the underlying service and get its response
response = self.invoke(
service,
self.request.input.request,
wsgi_environ=self.wsgi_environ,
skip_response_elem=True,
cid=self.cid,
)
# Use setattr to attach the response because we keep the response element's name in a variable
setattr(self.response.payload, wsx_gateway_response_elem, response)
# ################################################################################################################################
# ################################################################################################################################
class WebSocketsPubSubGateway(Service):
""" Dispatches incoming WebSocket publish/subscribe requests to target services.
"""
name = 'helpers.web-sockets-pub-sub-gateway'
class SimpleIO:
input_required = ('service',)
input_optional = (AsIs('request'),)
# ################################################################################################################################
def handle(self) -> 'None':
service = self.request.input.service
request = self.request.input.request
self.response.payload = self.invoke(service, request, wsgi_environ=self.wsgi_environ)
# ################################################################################################################################
# ################################################################################################################################
class ServiceGateway(Invoke):
""" Service to invoke other services through.
"""
name = 'helpers.service-gateway'
# ################################################################################################################################
# ################################################################################################################################
class APISpecHelperUser(Service):
""" Test support services - User.
"""
name = 'helpers.api-spec.user'
class SimpleIO:
input = GetUserRequest
output = GetUserResponse
# ################################################################################################################################
def handle(self):
# Our request
request = self.request.input # type: GetUserRequest
# Response to produce
out = GetUserResponse()
# To be returned in out.user ..
user1 = User()
user1.user_id = 111
user1.username = 'username.111'
user1.display_name = 'display_name.111.' + request.username
# .. also to be returned in out.user ..
user2 = User()
user2.user_id = 222
user2.username = 'username.222'
user2.display_name = 'display_name.222.' + request.username
# To be returned as out.parent_user
# This is an empty list on purpose becaue the field is optional
parent_user = []
# To be returned as out.previous_user
# This is an empty list on purpose becaue the field is optional as well
previous_user = []
# Note that user2 is added before user1 - this is on purpose because
# the test that invokes us will check that this is the specific order, non-ascending,
# that we are returning the data in, i.e. that nothing attempts to sort it itself
# before the data is returned to the caller (to the test).
out.user = [user2, user1]
out.parent_user = parent_user
out.previous_user = previous_user
self.response.payload = out
# ################################################################################################################################
# ################################################################################################################################
class APISpecHelperAccountList(Service):
""" Test support services - AccountList.
"""
name = 'helpers.api-spec.account-list'
class SimpleIO:
input = GetUserAccountListRequest
output = GetUserAccountListResponse
# ################################################################################################################################
def handle(self):
# Our request
request = self.request.input # type: GetUserAccountListRequest
# Response to produce
out = GetUserAccountListResponse()
user1 = User()
user1.user_id = 111
user1.username = 'username.111'
user1.display_name = 'display_name.111.{}'.format(request.user_id)
user2 = User()
user2.user_id = 222
user2.username = 'username.222'
user2.display_name = 'display_name.222.{}'.format(request.user_id)
account1 = UserAccount()
account1.user = user1
account1.account_id = 1010 + request.account_id
account1.account_type = 1111
account2 = UserAccount()
account2.user = user2
account2.account_id = 2020 + request.account_id
account2.account_type = 2222
out.user_account_list = [account2, account1]
self.response.payload = out
# ################################################################################################################################
# ################################################################################################################################
class HelperPubSubTarget(Service):
""" Test support services - PubSubTarget.
"""
name = 'helpers.pubsub.target'
def handle(self):
# Our request
msg = self.request.raw_request # type: list_[PubSubMessage]
# Whatever happens next, log what we received
self.logger.info('************************** 1) I was invoked with %r', msg)
# .. load the inner dict ..
data = msg[0].data # type: anydict | str
# .. confirm what it was ..
self.logger.info('************************** 2) Data is %r', data)
# .. this may be a dict or an empty string, the latter is the case
# .. when a message is published via self.pubsub.publish with no input ..
if data:
# .. rule out string objects ..
if isinstance(data, dict):
# .. optionally, save our input data for the external caller to check it ..
if data['target_needs_file']:
# .. this is where we will save our input data ..
file_name = data['file_name']
# .. we will save the message as a JSON one ..
json_msg = msg[0].to_json(needs_utf8_decode=True)
# .. confirm what we will be saving and where ..
self.logger.info('Saving data to file `%s` -> `%s`', file_name, json_msg)
# .. and actually save it now.
f = open_rw(file_name)
_ = f.write(json_msg)
f.close()
# ################################################################################################################################
# ################################################################################################################################
class HelperPubSubHook(PubSubHook):
""" Test support services - PubSubHook.
"""
name = 'helpers.pubsub.hook'
class SimpleIO:
input_required = AsIs('ctx')
output_required = 'hook_action'
def before_publish(self):
# Local aliases
data = self.request.input.ctx.msg.data
pub_msg_id = self.request.input.ctx.msg.pub_msg_id
# Log what we have received ..
self.logger.info('Helpers before_publish; pub_msg_id:`%s`, data:`%s`', pub_msg_id, data)
# .. unless this is a test message, load data from JSON ..
if data != PUBSUB.DEFAULT.Dashboard_Message_Body:
# .. the data may be still user-provided, in which case it may not be JSON at all ..
try:
dict_data = loads(data)
except Exception:
# This is fine, it was not JSON
pass
else:
# .. find information where we should save our input to ..
file_name = dict_data['file_name']
# .. add a suffix so as not to clash with the main recipient of the message ..
file_name = file_name + '.hook-before-publish.json'
# .. store our input in a file for the external caller to check it ..
f = open_rw(file_name)
_ = f.write(data)
f.close()
# .. and proceed with the publication
self.response.payload.hook_action = PUBSUB.HOOK_ACTION.DELIVER
# ################################################################################################################################
# ################################################################################################################################
class HelperPubSubSource(Service):
""" Test support services - PubSubSource.
"""
name = 'helpers.pubsub.source'
class SimpleIO:
input_required = 'random_data', 'file_name'
def handle(self):
# Local aliases
data = self.request.raw_request # type: dict
topic_name = '/zato/s/to/helpers_pubsub_target'
# The first time around, we need a file from the target service ..
data['target_needs_file'] = True
# Publish the message ..
self.pubsub.publish(HelperPubSubTarget, data=data)
# .. the topic for that service has to be potentially created so we wait here until it appears ..
_ = self.pubsub.wait_for_topic(topic_name)
sleep(0.1)
# .. now, once the message has been published, we know that the topic
# .. for the receiving service exists, so we can assign a hook service to it ..
response = self.invoke('zato.pubsub.topic.get', {'cluster_id': self.server.cluster_id, 'name': topic_name})
response = response['response']
# Request to edit the topic with
request = {}
# These can be taken from the previous response as-is
keys = ('has_gd', 'id','is_active', 'is_internal', 'max_depth_gd', 'max_depth_non_gd', 'name', 'on_no_subs_pub')
# .. add the default keys ..
for key in keys:
request[key] = response[key]
# .. set the helper hook service and other metadata..
request['hook_service_name'] = HelperPubSubHook.get_name()
request['cluster_id'] = self.server.cluster_id
request['depth_check_freq'] = 500
request['is_api_sub_allowed'] = True
request['pub_buffer_size_gd'] = 500
request['task_sync_interval'] = 500
request['task_delivery_interval'] = 500
# .. now, we can edit the topic to set its hooks service
response = self.invoke('zato.pubsub.topic.edit', request)
# .. once again, wait until the topic has been recreated ..
_ = self.pubsub.wait_for_topic(topic_name)
sleep(0.1)
# The second time around, the target service should not create a file
data['target_needs_file'] = False
# .. and now, we can publish the message once more, this time around expecting
# .. that the hook service will be invoked ..
self.pubsub.publish(HelperPubSubTarget, data=data)
# ################################################################################################################################
# ################################################################################################################################
@dataclass
class MyUser(Model):
user_name: str # This is a string
address_data: dict # This is a dict
prefs_dict: optional[dict] # This is an optional dict
phone_list: list # This is a list
email_list: optional[list] # This is an optional list
# ################################################################################################################################
# ################################################################################################################################
@dataclass
class MyAccount(Model):
# This description is above the field
account_no: int
account_type: str # This is an inline description
account_segment: str
""" This is a multiline description,
it has two lines.
"""
# ################################################################################################################################
# ################################################################################################################################
@dataclass
class MyAccountList(Model):
account_list: list_[MyAccount]
# ################################################################################################################################
# ################################################################################################################################
@dataclass
class MyRequest(Model):
request_id: int
user: MyUser
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class MyResponse(Model):
current_balance: int
last_account_no: int = 567
pref_account: MyAccount
account_list: MyAccountList
# ################################################################################################################################
# ################################################################################################################################
class MyDataclassService(Service):
""" This is my service.
It has a docstring.
"""
name = 'helpers.dataclass-service'
class SimpleIO:
input = MyRequest
output = MyResponse
def handle(self):
pass
# ################################################################################################################################
# ################################################################################################################################
class PubAPIInvoker(Service):
""" Tests services that WebSocket clients use.
"""
name = 'helpers.pubsub.pubapi-invoker'
def handle(self):
# stdlib
from unittest import defaultTestLoader, TestCase, TextTestRunner
# Zato
from zato.common.test.pubsub.common import FullPathTester, PubSubTestingClass
class WSXServicesTestCase(TestCase, PubSubTestingClass):
def _subscribe(_self, topic_name): # type: ignore
service = 'zato.pubsub.pubapi.subscribe'
response = self.invoke(service, {'topic_name': topic_name})
return response['sub_key']
def _unsubscribe(_self, topic_name): # type: ignore
service = 'zato.pubsub.pubapi.unsubscribe'
response = self.invoke(service, {'topic_name': topic_name})
return response
def _publish(_self, topic_name, data): # type: ignore
service = 'zato.pubsub.pubapi.publish-message'
response = self.invoke(service, {'topic_name': topic_name, 'data':data})
return response
def _receive(_self, topic_name): # type: ignore
service = 'zato.pubsub.pubapi.get-messages'
response = self.invoke(service, {'topic_name': topic_name})
return response
def test_wsx_services_full_path_subscribe_before_publication(_self):
tester = FullPathTester(_self, True)
tester.run()
def test_wsx_services_full_path_subscribe_after_publication(_self):
tester = FullPathTester(_self, False)
tester.run()
try:
iters = 10
for _ in range(iters):
suite = defaultTestLoader.loadTestsFromTestCase(WSXServicesTestCase)
result = TextTestRunner().run(suite)
if result.errors or result.failures:
errors = []
failures = []
response = {
'errors': errors,
'failures': failures,
}
for error in result.errors:
test, reason = error
test = str(test)
_error = {
'error_test': test,
'error_reason': reason,
}
self.logger.warning('Test error in %s\n%s', test, reason)
errors.append(_error)
for failure in result.failures:
test, reason = failure
test = str(test)
reason = '\n'.join(reason)
_failure = {
'failure_test': test,
'failure_reason': reason,
}
self.logger.warning('Test failure in %s\n%s', test, reason)
failures.append(_failure)
# Serialize all the warnings and errors ..
self.response.payload = dumps(response)
# .. and do resume the test.
break
# If we ar here, it means that there was no error
else:
self.response.payload = 'OK'
except Exception:
msg = 'Exception in {} -> {}'.format(self.__class__.__name__, format_exc())
self.logger.warning(msg)
self.response.payload = msg
# ################################################################################################################################
# ################################################################################################################################
class CommandsService(Service):
name = 'helpers.commands-service'
class SimpleIO:
output = CommandResult
class _CommandsServiceTestCase(TestCase):
def __init__(self, service:'Service') -> 'None':
super().__init__()
self.service = service
# ################################################################################################################################
def _test_impl(
self,
*,
cid:'str' = '',
data:'str' = '',
stdin:'str' = '',
command:'str' = '',
timeout:'float' = Config.Timeout,
is_async:'bool' = False,
encoding:'str' = Config.Encoding,
callback:'any_' = None,
use_pubsub:'bool' = False,
replace_char:'str' = Config.ReplaceChar,
is_multiline:'bool' = False,
) -> 'None':
# Local aliases
tmp_dir = gettempdir()
# Test data that we will expect to read back from a test file
data = data or rand_csv()
len_data = len(data)
# If we use the default timeout, it actually means that we do not want to have any
if timeout == Config.Timeout:
timeout = cast_('float', None)
# Where our test data is
test_file_name = rand_string(prefix='commands-test_invoke_core') + '.txt'
full_path = os.path.join(tmp_dir, test_file_name)
# Log useful details
logger.info('Saving test data `%s` to file `%s`', data, full_path)
# Populate the file with test data
with open_w(full_path) as f:
_ = f.write(data)
# Read the file back
command = f'cat {full_path}'
# Prepend new lines if the command is multiline
if is_multiline:
# Let's add two lines in front of the actual command
line1 = 'cd {} \\ \n'.format(tmp_dir)
line2 = 'cd {} \\ \n'.format(tmp_dir)
command = line1 + line2 + command
# If we have a timeout on input, let's sleep for more than that
# before running the command. We use an integer instead of a smaller number like 1.1
# because whether the sleep time is considered a float or integer is up to the underlying shell.
# To stay on the safe side, it is an integer in our test.
if timeout:
sleep_time = timeout * 2
prefix = f'sleep {sleep_time}'
command = prefix + ' && ' + command
# To check that results contain correct timestamps
now_before_test = datetime.utcnow()
func = self.service.commands.invoke_async if is_async else self.service.commands.invoke
# Invoke the commands to get the result
result = func(
command,
cid=cid,
timeout=timeout,
callback=callback,
stdin=stdin,
replace_char=replace_char,
encoding=encoding,
use_pubsub=use_pubsub,
)
logger.info('Result received -> %s', result)
# To check that results contain correct timestamps
now_after_test = datetime.utcnow()
# .. and run the actual tests now ..
self.assertEqual(result.timeout, timeout)
if timeout:
self.assertEqual(result.exit_code, -1)
self.assertFalse(result.is_ok)
self.assertFalse(result.is_async)
self.assertTrue(result.is_timeout)
expected_timeout_msg = f'Command \'{command}\' timed out after {timeout} sec.'
self.assertEqual(result.timeout_msg, expected_timeout_msg)
self.assertEqual(result.stdin, '')
self.assertEqual(result.stderr, '')
self.assertEqual(result.stdout, '')
self.assertEqual(result.len_stderr_bytes, 0)
self.assertEqual(result.len_stderr_human, '')
self.assertEqual(result.len_stdout_bytes, 0)
self.assertEqual(result.len_stdout_human, '')
else:
if is_async:
self.assertEqual(result.exit_code, -1)
self.assertTrue(result.is_ok)
self.assertTrue(result.is_async)
return
else:
self.assertEqual(result.exit_code, 0)
self.assertTrue(result.is_ok)
self.assertFalse(result.is_async)
self.assertFalse(result.is_timeout)
self.assertEqual(result.timeout_msg, '')
self.assertEqual(result.stdin, stdin)
self.assertEqual(result.stderr, '')
self.assertEqual(result.stdout, data)
self.assertEqual(result.len_stderr_bytes, 0)
self.assertEqual(result.len_stderr_human, '0 Bytes')
self.assertEqual(result.len_stdout_bytes, len_data)
self.assertEqual(result.len_stdout_human, '{} Bytes'.format(len_data))
if cid:
self.assertEqual(result.cid, cid)
else:
self.assertTrue(result.cid.startswith('zcmd'))
self.assertEqual(result.encoding, encoding)
self.assertEqual(result.replace_char, replace_char)
self.assertIsInstance(result.total_time_sec, float)
start_time_as_iso = result.start_time.isoformat()
end_time_as_iso = result.end_time.isoformat()
self.assertEqual(result.start_time_iso, start_time_as_iso)
self.assertEqual(result.end_time_iso, end_time_as_iso)
self.assertLess(result.start_time, result.end_time)
self.assertLess(now_before_test, result.start_time)
self.assertLess(now_before_test, result.end_time)
self.assertGreater(now_after_test, result.start_time)
self.assertGreater(now_after_test, result.end_time)
# ################################################################################################################################
def test_invoke_core(self):
# This is the same as the base test
self._test_impl()
# ################################################################################################################################
def test_invoke_multiline(self):
self._test_impl(is_multiline=True)
# ################################################################################################################################
def test_invoke_with_timeout(self):
self._test_impl(timeout=1)
# ################################################################################################################################
def test_invoke_with_own_cid(self):
self._test_impl(cid='abcdef')
# ################################################################################################################################
def test_invoke_with_encoding(self):
self._test_impl(encoding='ascii')
# ################################################################################################################################
def test_invoke_with_replace_char(self):
self._test_impl(replace_char='?')
# ################################################################################################################################
def test_invoke_with_stdin(self):
self._test_impl(stdin='hello')
# ################################################################################################################################
def test_invoke_with_callback_function(self):
self._test_impl(callback=self._on_command_completed)
# ################################################################################################################################
def test_invoke_with_callback_service_class(self):
# stdlib
from zato.server.service.internal.helpers import RawRequestLogger
self._test_impl(callback=RawRequestLogger)
# ################################################################################################################################
def test_invoke_with_callback_service_name(self):
# stdlib
from zato.server.service.internal.helpers import RawRequestLogger
self._test_impl(callback=RawRequestLogger.get_name())
# ################################################################################################################################
def test_invoke_with_callback_topic_name(self):
self._test_impl(callback='zato.ping', use_pubsub=True)
# ################################################################################################################################
def test_invoke_async_core(self):
self._test_impl(is_async=True)
# ################################################################################################################################
def _on_command_completed(self, result:'CommandResult') -> 'None':
pass
# ################################################################################################################################
def handle(self):
# Build and run the test suite
test_suite = self._CommandsServiceTestCase(self)
test_suite
#
# Sync invoke
#
test_suite.test_invoke_core()
test_suite.test_invoke_multiline()
test_suite.test_invoke_with_timeout()
test_suite.test_invoke_with_own_cid()
test_suite.test_invoke_with_encoding()
test_suite.test_invoke_with_replace_char()
test_suite.test_invoke_with_stdin()
test_suite.test_invoke_with_callback_function()
test_suite.test_invoke_with_callback_service_class()
test_suite.test_invoke_with_callback_service_name()
test_suite.test_invoke_with_callback_topic_name()
#
# Async invoke
#
test_suite.test_invoke_async_core()
self.response.payload = 'OK'
# ################################################################################################################################
# ################################################################################################################################
class RESTInternalTester(Service):
name = 'helpers.rest.internal.tester'
def _run_assertions(self, result:'str | Response', *, is_ping:'bool'=False) -> 'None':
# stdlib
from http.client import OK
if is_ping:
if isinstance(result, str):
if not 'HEAD' in result:
raise Exception(f'Invalid ping result (1) -> {result}')
else:
if not result.status_code == OK:
raise Exception(f'Invalid ping result (2) -> {result}')
else:
result = cast_('Response', result)
headers = result.headers
server = headers['Server']
if not server == 'Zato':
raise Exception(f'Unrecognized Server header in {headers}')
# ################################################################################################################################
def test_rest_by_getitem_no_cid(self, conn_name:'str') -> 'None':
conn = self.rest[conn_name]
result = conn.ping()
self._run_assertions(result, is_ping=True)
result = conn.get()
self._run_assertions(result)
result = conn.post('abc')
self._run_assertions(result)
# ################################################################################################################################
def test_rest_by_getitem_with_cid(self, conn_name:'str') -> 'None':
conn = self.rest[conn_name]
result = conn.ping(self.cid)
self._run_assertions(result, is_ping=True)
result = conn.get(self.cid)
self._run_assertions(result)
result = conn.post(self.cid, 'abc')
self._run_assertions(result)
# ################################################################################################################################
def test_rest_by_getattr(self, conn:'RESTInvoker') -> 'None':
result = conn.ping()
self._run_assertions(result, is_ping=True)
result = conn.get()
self._run_assertions(result)
result = conn.post('abc')
self._run_assertions(result)
# ################################################################################################################################
def handle(self) -> 'None':
conn_name = 'pubsub.demo.sample.outconn'
self.test_rest_by_getitem_no_cid(conn_name)
self.test_rest_by_getitem_with_cid(conn_name)
conn = self.rest.pubsub_demo_sample_outconn
self.test_rest_by_getattr(conn) # type: ignore
# ################################################################################################################################
# ################################################################################################################################
| 45,648
|
Python
|
.py
| 855
| 43.956725
| 130
| 0.432329
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,252
|
server.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/server.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from traceback import format_exc
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.exception import ZatoException
from zato.common.odb.model import Server
from zato.common.odb.query import server_list
from zato.server.service.internal import AdminService, AdminSIO
from zato.server.service.meta import GetListMeta
# ################################################################################################################################
elem = 'server'
model = Server
label = 'a Zato server'
get_list_docs = 'Zato servers'
list_func = server_list
skip_output_params = ['token']
def response_hook(self, input, _ignored_instance, attrs, service_type):
if service_type == 'get_list':
for item in self.response.payload:
if item.last_join_mod_date:
item.last_join_mod_date = item.last_join_mod_date.isoformat()
if item.up_mod_date:
item.up_mod_date = item.up_mod_date.isoformat()
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = Server.name,
# ################################################################################################################################
class Edit(AdminService):
""" Updates a server.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_server_edit_request'
response_elem = 'zato_server_edit_response'
input_required = ('id', 'name')
output_required = ('id', 'cluster_id', 'name', 'host')
output_optional = ('bind_host', 'bind_port', 'last_join_status',
'last_join_mod_date', 'last_join_mod_by', 'up_status', 'up_mod_date')
def handle(self):
with closing(self.odb.session()) as session:
existing_one = session.query(Server).\
filter(Server.id!=self.request.input.id).\
filter(Server.name==self.request.input.name).\
first()
if existing_one:
raise Exception('A server of that name `{}` already exists in this cluster'.format(self.request.input.name))
try:
item = session.query(Server).filter_by(id=self.request.input.id).one()
item.name = self.request.input.name
session.add(item)
session.commit()
self.response.payload = item
for name in('last_join_mod_date', 'up_mod_date'):
attr = getattr(self.response.payload, name, None)
if attr:
setattr(self.response.payload, name, attr.isoformat())
except Exception:
msg = 'Server could not be updated, id:`{}`, e:`{}`'.format(self.request.input.id, format_exc())
self.logger.error(msg)
session.rollback()
raise
# ################################################################################################################################
class GetByID(AdminService):
""" Returns a particular server
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_server_get_by_id_request'
response_elem = 'zato_server_get_by_id_response'
input_required = ('id',)
output_required = ('id', 'cluster_id', 'name', 'host')
output_optional = ('bind_host', 'bind_port', 'last_join_status',
'last_join_mod_date', 'last_join_mod_by', 'up_status', 'up_mod_date')
def get_data(self, session):
return session.query(Server).\
filter(Server.id==self.request.input.id).\
one()
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload = self.get_data(session)
for name in('last_join_mod_date', 'up_mod_date'):
attr = getattr(self.response.payload, name, None)
if attr:
setattr(self.response.payload, name, attr.isoformat())
# ################################################################################################################################
class Delete(AdminService):
""" Deletes a server's definition from ODB (not from the filesystem).
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_server_delete_request'
response_elem = 'zato_server_delete_response'
input_required = ('id',)
def handle(self):
with closing(self.odb.session()) as session:
try:
server = session.query(Server).\
filter(Server.id==self.request.input.id).\
one()
# Sanity check
if server.id == self.server.id:
msg = 'A server cannot delete itself, id:`{}`, name:`{}`'.format(server.id, server.name)
self.logger.error(msg)
raise ZatoException(self.cid, msg)
# This will cascade and delete every related object
session.delete(server)
session.commit()
except Exception:
session.rollback()
msg = 'Could not delete the server, e:`{}`'.format(format_exc())
self.logger.error(msg)
raise
# ################################################################################################################################
| 5,775
|
Python
|
.py
| 119
| 38.806723
| 130
| 0.517254
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,253
|
pickup.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pickup.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import csv
import os
from pathlib import PurePath
from time import sleep
from traceback import format_exc
# Bunch
from bunch import Bunch
# Zato
from zato.common.api import EnvFile, FILE_TRANSFER
from zato.common.broker_message import ValueConstant, HOT_DEPLOY, MESSAGE_TYPE
from zato.common.typing_ import cast_, dataclass, from_dict, optional
from zato.common.util.api import get_config, get_user_config_name
from zato.common.util.open_ import open_r
from zato.server.service import Service
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, stranydict
stranydict = stranydict
# ################################################################################################################################
# ################################################################################################################################
@dataclass
class UpdateCtx:
data: str
full_path: str
file_name: str
relative_dir: optional[str]
# ################################################################################################################################
# ################################################################################################################################
class _Logger(Service):
pickup_data_type = None
def handle(self) -> 'None':
self.logger.info('%s data received: `%s`', self.pickup_data_type, self.request.raw_request)
# ################################################################################################################################
# ################################################################################################################################
class LogJSON(_Logger):
""" Picks up JSON files and logs their contents.
"""
pickup_data_type = 'JSON'
# ################################################################################################################################
# ################################################################################################################################
class LogXML(_Logger):
""" Picks up XML files and logs their contents.
"""
pickup_data_type = 'XML'
# ################################################################################################################################
# ################################################################################################################################
class LogCSV(Service):
""" Picks up CSV files and logs their contents.
"""
def handle(self) -> 'None':
raw_request = cast_('stranydict', self.request.raw_request)
with open_r(raw_request['full_path']) as f:
reader = csv.reader(f)
for idx, line in enumerate(reader, 1):
self.logger.info('CSV line #%s `%s`', idx, line)
# ################################################################################################################################
# ################################################################################################################################
class _Updater(Service):
pickup_action: 'ValueConstant'
def handle(self) -> 'None':
raw_request = cast_('stranydict', self.request.raw_request)
self.broker_client.publish({
'action': self.pickup_action.value,
'msg_type': MESSAGE_TYPE.TO_PARALLEL_ALL,
'full_path': raw_request['full_path'],
'file_name': raw_request['file_name'],
'relative_dir': raw_request['relative_dir'],
# We use raw_data to make sure we always have access
# to what was saved in the file, even if it is not parsed.
'data': raw_request['raw_data']
})
# ################################################################################################################################
# ################################################################################################################################
class UpdateStatic(_Updater):
""" Picks up static files and distributes them to all server processes.
"""
pickup_action = HOT_DEPLOY.CREATE_STATIC
# ################################################################################################################################
# ################################################################################################################################
class UpdateUserConf(_Updater):
""" Picks up user-defined config files and distributes them to all server processes.
"""
pickup_action = HOT_DEPLOY.CREATE_USER_CONF
# ################################################################################################################################
# ################################################################################################################################
class UpdateEnmasse(Service):
""" Runs an enmasse file if its contents is changed.
"""
def handle(self) -> 'None':
# Add type hints ..
raw_request = cast_('stranydict', self.request.raw_request)
# .. extract the path to the enmasse file ..
enmasse_file_path = raw_request['full_path']
# .. ignore files with environment variables ..
if enmasse_file_path.endswith('env.ini'):
return
# .. and execute it now.
_ = self.commands.run_enmasse_async(enmasse_file_path)
# ################################################################################################################################
# ################################################################################################################################
class _OnUpdate(Service):
""" Updates user configuration in memory and file system.
"""
update_type = '<update-type-_OnUpdate>'
class SimpleIO:
input_required = ('data', 'full_path', 'file_name', 'relative_dir')
def handle(self) -> 'None':
# For later use
ctx = from_dict(UpdateCtx, self.request.input) # type: UpdateCtx
#
# First, we need to combine relative_dir with our own server's root directory.
# This is needed because other servers may be in different root directories
# yet the relative_dir to the file will be the same.
#
# For instance, we can have server directories as such
#
# /home/zato/env/server1/pickup/incoming/user-conf
# /zatoroot/server2/pickup/incoming/user-conf
# C:\prod\zatoserver3\pickup\incoming\user-conf
#
# In each case relative_dir is the same - pickup/incoming/user-conf (slashes do not matter) -
# but the path leading to it may be different.
#
# However, if we do not have relative_dir on input, or if it is the default one,
# meaning in either case that the event notifier could not build it,
# we just use the full_path from input which will be always available.
#
# Use tue full path from input ..
if (not ctx.relative_dir) or (ctx.relative_dir == FILE_TRANSFER.DEFAULT.RelativeDir):
full_path = ctx.full_path
# Build relative_dir from its constituents
else:
relative_dir = PurePath(ctx.relative_dir)
relative_dir_parts = relative_dir.parts
#
# Now, we can combine all the three elements to give us the full path to save the file under.
#
# * Our server directory
# * The relative path to the file
# * The actual file name
#
elems = []
elems.extend(relative_dir_parts)
elems.append(ctx.file_name)
full_path = os.path.join(self.server.base_dir, *elems)
# Assign the newly constructed full_path to our input for later use
ctx.full_path = full_path
#
# We have a file on input and we want to save it. However, we cannot do it under the input file_name
# because that would trigger hot-deployment again leading to an infinite loop
# of hot-deployment of the same file over and over again.
#
# This is why we first (1) add the file name to a skiplist of ignored files,
# so that our own local notifier does not want to hot-deploy it,
# then (2) we save the file, and then (3) we remove the name from the ignored ones.
#
try:
#
# Step (1) - Add the file name to ignored ones
#
self.server.worker_store.file_transfer_api.add_local_ignored_path(ctx.full_path)
#
# Step (2) - Save the file
#
with self.lock('{}-{}-{}'.format(self.name, self.server.name, ctx.full_path)): # type: ignore
with open(ctx.full_path, 'wb') as f:
_ = f.write(ctx.data.encode('utf8'))
# Reusable
update_type = self.get_update_type(ctx.full_path)
try:
# The file is saved so we can update our in-RAM mirror of it ..
self.logger.info('Syncing in-RAM contents of `%s` (%s)', ctx.full_path, update_type)
# The file is saved on disk so we can call our handler function to post-process it.
self.sync_pickup_file_in_ram(ctx)
except Exception:
self.logger.warning('Could not sync in-RAM contents of `%s`, e:`%s` (%s)',
ctx.full_path, format_exc(), update_type)
else:
self.logger.info('Successfully finished syncing in-RAM contents of `%s` (%s)',
ctx.full_path, update_type)
except Exception:
self.logger.warning('Could not update file `%s`, e:`%s`', ctx.full_path, format_exc())
#
# Step (3) - Remove the file name from the ignored ones
#
finally:
#
# No matter what happened in step (2), we always remove the file from the ignored list.
#
# Sleep for a moment to make sure the local notifier loop does not attempt
# to pick up the file again while we are modifying it.
sleep(2)
self.server.worker_store.file_transfer_api.remove_local_ignored_path(ctx.full_path)
# ################################################################################################################################
def _get_update_type(self, file_path:'str') -> 'str':
return ''
# ################################################################################################################################
def get_update_type(self, file_path:'str') -> 'str':
update_type = self._get_update_type(file_path) or self.update_type
return update_type
# ################################################################################################################################
def sync_pickup_file_in_ram(self, *args:'any_', **kwargs:'any_') -> 'None':
raise NotImplementedError('Should be implemented by subclasses')
# ################################################################################################################################
# ################################################################################################################################
class OnUpdateUserConf(_OnUpdate):
""" Updates user configuration in memory and file system.
"""
update_type = 'user config file'
def _is_env_file(self, file_path:'str') -> 'bool':
return EnvFile.Default in file_path
# ################################################################################################################################
def sync_pickup_file_in_ram(self, ctx:'UpdateCtx') -> 'None':
# We enter here if this is a file with environment variables ..
if self._is_env_file(ctx.full_path):
self.server.update_environment_variables_from_file(ctx.full_path)
# .. otherwise, this is a file with user configuration.
else:
conf_key = ctx.file_name
conf_base_dir = os.path.dirname(ctx.full_path)
conf = get_config(conf_base_dir, conf_key, raise_on_error=True, log_exception=False)
user_config_name = get_user_config_name(conf_key)
entry:'Bunch' = self.server.user_config.setdefault(user_config_name, Bunch())
entry.clear()
entry.update(conf)
# ################################################################################################################################
def _get_update_type(self, file_path:'str') -> 'str':
if self._is_env_file(file_path):
return EnvFile.Default
else:
return self.update_type
# ################################################################################################################################
# ################################################################################################################################
class OnUpdateStatic(_OnUpdate):
""" Updates a static resource in memory and file system.
"""
update_type = 'static file'
def sync_pickup_file_in_ram(self, ctx:'UpdateCtx') -> 'None':
_:'any_' = self.server.static_config.read_file(ctx.full_path, ctx.file_name)
# ################################################################################################################################
# ################################################################################################################################
| 14,089
|
Python
|
.py
| 248
| 48.870968
| 130
| 0.42827
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,254
|
info.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/info.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from base64 import b64decode
from contextlib import closing
# Zato
from zato.client import AnyServiceInvoker
from zato.common.api import INFO_FORMAT, MISC, SERVER_JOIN_STATUS, SERVER_UP_STATUS
from zato.common.component_info import format_info, get_info, get_worker_pids
from zato.common.const import ServiceConst
from zato.common.json_internal import dumps, loads
from zato.common.odb.query import server_list
from zato.common.util.config import get_url_protocol_from_config_item
from zato.server.service import List, Service
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_
# ################################################################################################################################
# ################################################################################################################################
class GetInfo(Service):
""" Like 'zato info' on command line but works across the whole cluster rather than with a single server.
"""
def handle(self):
# Let's prepare as much as we can upfront.
sec_def:'any_' = self.server.worker_store.basic_auth_get(ServiceConst.API_Admin_Invoke_Username).config
channel:'any_' = self.server.worker_store.get_channel_rest(MISC.DefaultAdminInvokeChannel)
out = {}
# We assume that if the current server uses TLS or not,
# the same will go for all the other servers in the cluster.
api_protocol = get_url_protocol_from_config_item(self.server.fs_server_config.crypto.use_tls)
with closing(self.odb.session()) as session:
_server_list = server_list(session, self.server.cluster_id, None, None, False) # type: ignore
for item in _server_list:
server_info:'any_' = out.setdefault(item.name, {})
server_info['cluster_name'] = item.cluster_name
server_info['up_mod_date'] = item.up_mod_date.isoformat() if item.up_status == SERVER_UP_STATUS.RUNNING else None
server_info['last_join_mod_date'] = item.last_join_mod_date.isoformat() if \
item.last_join_status == SERVER_JOIN_STATUS.ACCEPTED else None
for name in 'id', 'name', 'bind_host', 'bind_port', 'last_join_status', 'last_join_mod_by', 'up_status':
server_info[name] = getattr(item, name)
if item.up_status == SERVER_UP_STATUS.RUNNING:
address = f'{api_protocol}://{item.bind_host}:{item.bind_port}'
auth = (sec_def.username, sec_def.password) # type: ignore
client = AnyServiceInvoker(address, channel.url_path, auth=auth)
response = client.invoke('zato.info.get-server-info')
if response.ok:
response = loads(response.inner.text)['zato_service_invoke_response']['response']
response = b64decode(response)
response = loads(response)['response']
server_info['info'] = loads(response['info'])
else:
self.logger.warning(response)
self.response.content_type = 'application/json'
self.response.payload = dumps(out)
# ################################################################################################################################
class GetServerInfo(Service):
""" Collects information about a server it's invoked on.
"""
class SimpleIO:
output_required = ('info',)
def handle(self):
self.response.content_type = 'application/json'
self.response.payload.info = format_info(get_info(self.server.base_dir, INFO_FORMAT.JSON), INFO_FORMAT.JSON)
# ################################################################################################################################
class GetWorkerPids(Service):
""" Returns PIDs of all processes of current server.
"""
output:'any_' = List('pids')
def handle(self):
self.response.payload.pids = get_worker_pids(self.server.base_dir)
# ################################################################################################################################
| 4,640
|
Python
|
.py
| 75
| 53.053333
| 130
| 0.521481
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,255
|
crypto.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/crypto.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Zato
from zato.server.service import Integer, Service
# ################################################################################################################################
class Encrypt(Service):
""" Encrypts data given on input using the server's default key.
"""
class SimpleIO:
input_required = ('clear_text',)
output_required = ('encrypted',)
def handle(self):
self.response.payload.encrypted = self.crypto.encrypt(self.request.input.clear_text.encode('utf8'))
# ################################################################################################################################
class Decrypt(Service):
""" Decrypts data previously encrypted using the server's default key.
"""
class SimpleIO:
input_required = ('encrypted',)
output_required = ('clear_text',)
def handle(self):
self.response.payload.clear_text = self.crypto.decrypt(self.request.input.encrypted)
# ################################################################################################################################
class HashSecret(Service):
""" Hashes a secret using the server's default key
"""
class SimpleIO:
input_required = ('clear_text',)
output_required = ('hashed',)
# ################################################################################################################################
class VerifyHash(Service):
""" Returns a boolean flag indicating if given input matches the expected hash.
"""
class SimpleIO:
input_required = ('given', 'expected')
output_required = ('is_valid',)
# ################################################################################################################################
class GenerateSecret(Service):
""" Generates a new secret of input bits strength.
"""
class SimpleIO:
input_required = (Integer('bits'),)
output_required = ('secret',)
# ################################################################################################################################
class GeneratePassword(Service):
""" Generates a new password of input bits strength.
"""
class SimpleIO:
input_required = (Integer('bits'),)
output_required = ('password',)
# ################################################################################################################################
| 2,697
|
Python
|
.py
| 55
| 44.2
| 130
| 0.440335
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,256
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/sms/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
| 238
|
Python
|
.py
| 6
| 38.166667
| 82
| 0.729258
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,257
|
twilio.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/sms/twilio.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.broker_message import SMS
from zato.common.odb.model import SMSTwilio
from zato.common.odb.query import sms_twilio, sms_twilio_list
from zato.server.service.internal import AdminService, AdminSIO
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.server.service import Service
Bunch = Bunch
Service = Service
# ################################################################################################################################
elem = 'sms_twilio'
model = SMSTwilio
label = 'a Twilio connection'
get_list_docs = 'Twilio connections'
broker_message = SMS
broker_message_prefix = 'TWILIO_'
list_func = sms_twilio_list
skip_input_params = ['is_internal']
# ################################################################################################################################
def response_hook(self, input, instance, attrs, service_type):
# type: (Service, Bunch, SMSTwilio, Bunch, str)
if service_type == 'get_list':
for item in self.response.payload: # type: SMSTwilio
item.auth_token = self.server.decrypt(item.auth_token)
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = SMSTwilio.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
class Get(AdminService):
""" Returns details of an SMS Twilio connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_sms_twilio_get_request'
response_elem = 'zato_sms_twilio_get_response'
input_required = ('cluster_id', 'id')
output_required = ('name', 'is_active', 'account_sid', 'auth_token')
output_optional = ('default_from', 'default_to')
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload = sms_twilio(session, self.request.input.cluster_id, self.request.input.id)
# ################################################################################################################################
class SendMessage(AdminService):
""" Sends a text message through an SMS Twilio connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_sms_twilio_send_message_request'
response_elem = 'zato_sms_twilio_send_message_response'
input_required = ('cluster_id', 'id', 'from_', 'to', 'body')
def handle(self):
item = self.invoke(Get.get_name(), payload=self.request.input, as_bunch=True).zato_sms_twilio_get_response
msg = self.out.sms.twilio[item.name].conn.send(self.request.input.body, self.request.input.to, self.request.input.from_)
self.logger.info('Sent message %s', msg)
# ################################################################################################################################
| 4,110
|
Python
|
.py
| 79
| 48.113924
| 130
| 0.487878
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,258
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/cloud/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
| 238
|
Python
|
.py
| 6
| 38.166667
| 82
| 0.729258
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,259
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/cloud/openstack/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
| 238
|
Python
|
.py
| 6
| 38.166667
| 82
| 0.729258
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,260
|
swift.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/cloud/openstack/swift.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.server.service.internal import AdminService
class GetList(AdminService):
def handle(self):
self.response.payload = '[]'
class Create(AdminService):
pass
class Edit(AdminService):
pass
class Delete(AdminService):
pass
| 417
|
Python
|
.py
| 16
| 23.125
| 64
| 0.728426
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,261
|
s3.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/cloud/aws/s3.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from traceback import format_exc
# Zato
from zato.common.broker_message import CLOUD
from zato.common.odb.model import AWSS3
from zato.common.odb.query import cloud_aws_s3_list
from zato.server.service import Bool, SIOElem, Int
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
class GetList(AdminService):
""" Returns a list of AWS S3 connections.
"""
_filter_by = AWSS3.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_cloud_aws_s3_get_list_request'
response_elem = 'zato_cloud_aws_s3_get_list_response'
input_required = ('cluster_id',)
output_required = ('id', 'name', 'is_active', 'pool_size', 'address', Int('debug_level'), Bool('suppr_cons_slashes'),
'content_type', 'security_id', Bool('encrypt_at_rest'), 'storage_class')
output_optional = ('metadata_', 'bucket')
def get_data(self, session):
return self._search(cloud_aws_s3_list, session, self.request.input.cluster_id, False)
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
class Create(AdminService):
""" Creates a new AWS S3 connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_cloud_aws_s3_create_request'
response_elem = 'zato_cloud_aws_s3_create_response'
input_required = ('cluster_id', 'name', 'is_active', 'pool_size', 'address', Int('debug_level'),
Bool('suppr_cons_slashes'), 'content_type', 'security_id', Bool('encrypt_at_rest'), 'storage_class')
input_optional = ('metadata_', 'bucket')
output_required = ('id', 'name')
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
existing_one = session.query(AWSS3.id).\
filter(AWSS3.cluster_id==input.cluster_id).\
filter(AWSS3.name==input.name).\
first()
if existing_one:
raise Exception('An AWS S3 connection [{0}] already exists on this cluster'.format(input.name))
try:
item = AWSS3()
for name in self.SimpleIO.input_required + self.SimpleIO.input_optional:
if isinstance(name, SIOElem):
name = name.name
setattr(item, name, self.request.input.get(name))
session.add(item)
session.commit()
input.action = CLOUD.AWS_S3_CREATE_EDIT.value
input.id = item.id
input.username = item.security.username
input.password = item.security.password
self.broker_client.publish(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
except Exception:
msg = 'Could not create an AWS S3 connection, e:`{}`'.format(format_exc())
self.logger.error(msg)
session.rollback()
raise
class Edit(AdminService):
""" Updates an AWS S3 connection.
"""
class SimpleIO(Create.SimpleIO):
request_elem = 'zato_cloud_aws_s3_edit_request'
response_elem = 'zato_cloud_aws_s3_edit_response'
input_required = ('id',) + Create.SimpleIO.input_required
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
existing_one = session.query(AWSS3.id).\
filter(AWSS3.cluster_id==input.cluster_id).\
filter(AWSS3.name==input.name).\
filter(AWSS3.id!=input.id).\
first()
if existing_one:
raise Exception('An AWS S3 connection [{0}] already exists on this cluster'.format(input.name))
try:
item = session.query(AWSS3).filter_by(id=input.id).one()
old_name = item.name
for name in self.SimpleIO.input_required + self.SimpleIO.input_optional:
if isinstance(name, SIOElem):
name = name.name
setattr(item, name, self.request.input.get(name))
session.add(item)
session.commit()
input.action = CLOUD.AWS_S3_CREATE_EDIT.value
input.old_name = old_name
input.id = item.id
input.username = item.security.username
input.password = item.security.password
self.broker_client.publish(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
except Exception:
msg = 'Could not update the AWS S3 connection, e:`{}`'.format(format_exc())
self.logger.error(msg)
session.rollback()
raise
class Delete(AdminService):
""" Deletes an AWS S3 connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_cloud_aws_s3_delete_request'
response_elem = 'zato_cloud_aws_s3_delete_response'
input_required = ('id',)
def handle(self):
with closing(self.odb.session()) as session:
try:
item = session.query(AWSS3).\
filter(AWSS3.id==self.request.input.id).\
one()
session.delete(item)
session.commit()
msg = {'action': CLOUD.AWS_S3_DELETE.value, 'name': item.name, 'id':item.id}
self.broker_client.publish(msg)
except Exception:
session.rollback()
msg = 'Could not delete the AWS S3 connection, e:`{}`'.format(format_exc())
self.logger.error(msg)
raise
| 6,126
|
Python
|
.py
| 131
| 34.954198
| 125
| 0.588186
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,262
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/cloud/aws/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
| 238
|
Python
|
.py
| 6
| 38.166667
| 82
| 0.729258
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,263
|
redis.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/outgoing/redis.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
# Zato
from zato.common.util.config import get_config_object, update_config_file
from zato.server.service import AsIs, Bool, Int, SIOElem
from zato.server.service.internal import AdminService, ChangePasswordBase
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.common.ext.configobj_ import ConfigObj
from zato.common.typing_ import any_, anylist, strdict
Bunch = Bunch
ConfigObj = ConfigObj
# ################################################################################################################################
# ################################################################################################################################
class ModuleCtx:
StaticID = 12345
# ################################################################################################################################
# ################################################################################################################################
def set_kvdb_config(server_config:'strdict', input_data:'Bunch', redis_sentinels:'str') -> 'None':
server_config['kvdb']['host'] = input_data.host or ''
server_config['kvdb']['port'] = int(input_data.port) if input_data.port else ''
server_config['kvdb']['db'] = int(input_data.db) if input_data.db else ''
server_config['kvdb']['use_redis_sentinels'] = input_data.use_redis_sentinels
server_config['kvdb']['redis_sentinels'] = redis_sentinels
server_config['kvdb']['redis_sentinels_master'] = input_data.redis_sentinels_master or ''
# ################################################################################################################################
# ################################################################################################################################
class GetList(AdminService):
class SimpleIO:
input_optional = 'id', 'name'
output_optional = AsIs('id'), 'is_active', 'name', 'host', Int('port'), 'db', Bool('use_redis_sentinels'), \
AsIs('redis_sentinels'), 'redis_sentinels_master'
default_value = None
response_elem = None
# ################################################################################################################################
def get_data(self) -> 'anylist':
# Response to produce
out = []
# For now, we only return one item containing data read from server.conf
item = {
'id': ModuleCtx.StaticID,
'name': 'default',
'is_active': True,
}
config = get_config_object(self.server.repo_location, 'server.conf')
config = config['kvdb']
for elem in self.SimpleIO.output_optional:
# Extract the embedded name or use it as is
name = elem.name if isinstance(elem, SIOElem) else elem # type: ignore
# These will not exist in server.conf
if name in ('id', 'is_active', 'name'):
continue
value = config[name] # type: ignore
if name == 'redis_sentinels':
value = '\n'.join(value)
# Add it to output
item[name] = value
# Add our only item to response
out.append(item)
return out
# ################################################################################################################################
def handle(self) -> 'None':
self.response.payload[:] = self.get_data()
# ################################################################################################################################
# ################################################################################################################################
class Edit(AdminService):
class SimpleIO:
input_optional = AsIs('id'), 'name', Bool('use_redis_sentinels')
input_required = 'host', 'port', 'db', 'redis_sentinels', 'redis_sentinels_master'
output_required = 'id', 'name'
response_elem = None
def handle(self) -> 'None':
# Local alias
input = self.request.input
# If provided, turn sentinels configuration into a format expected by the underlying KVDB object
redis_sentinels:'any_' = input.redis_sentinels or ''
if redis_sentinels:
redis_sentinels = redis_sentinels.splitlines()
redis_sentinels = [str(elem).strip() for elem in redis_sentinels]
# First, update the persistent configuration on disk ..
config = get_config_object(self.server.repo_location, 'server.conf')
set_kvdb_config(config, input, redis_sentinels)
server_conf_path = os.path.join(self.server.repo_location, 'server.conf')
with open(server_conf_path, 'wb') as f:
_ = config.write(f) # type: ignore
# .. assign new in-RAM server-wide configuration ..
set_kvdb_config(self.server.fs_server_config, input, redis_sentinels)
# .. and rebuild the Redis connection object.
self.server.kvdb.reconfigure(self.server.fs_server_config.kvdb)
# Our callers expect these two
self.response.payload.id = ModuleCtx.StaticID
self.response.payload.name = self.request.input.name
# ################################################################################################################################
# ################################################################################################################################
class ChangePassword(ChangePasswordBase):
""" Changes the password of a Redis connection
"""
password_required = False
class SimpleIO(ChangePasswordBase.SimpleIO):
pass
def handle(self):
# Local alias
input = self.request.input
# Encryption requires bytes
password = (input.password1 or '').encode('utf8')
# Now, encrypt the input password
password = self.crypto.encrypt(password, needs_str=True)
# Find our secrets config
config = get_config_object(self.server.repo_location, 'secrets.conf')
# Set the new secret
config['zato']['server_conf.kvdb.password'] = password # type: ignore
# Update the on-disk configuration
update_config_file(config, self.server.repo_location, 'secrets.conf') # type: ignore
# Change in-RAM password
self.server.kvdb.set_password(password) # type: ignore
self.response.payload.id = self.request.input.id
# ################################################################################################################################
# ################################################################################################################################
| 7,192
|
Python
|
.py
| 125
| 50.312
| 130
| 0.456438
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,264
|
sftp.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/outgoing/sftp.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
# Zato
from zato.common.broker_message import OUTGOING
from zato.common.odb.model import GenericConn as ModelGenericConn
from zato.common.util.sql import get_instance_by_id
from zato.server.service import Int
from zato.server.service.internal import AdminService, AdminSIO
# ################################################################################################################################
# ################################################################################################################################
class Execute(AdminService):
""" Executes SFTP command(s) using a relevant connector.
"""
class SimpleIO(AdminSIO):
input_required = 'id', 'data', Int('log_level')
output_optional = 'response_time', 'stdout', 'stderr', 'command_no'
response_elem = None
def handle(self):
msg = self.request.input.deepcopy()
msg['action'] = OUTGOING.SFTP_EXECUTE.value
msg['cid'] = self.cid
with closing(self.odb.session()) as session:
instance = get_instance_by_id(session, ModelGenericConn, self.request.input.id)
conn = self.out.sftp[instance.name].conn
response = conn.execute(self.request.input.data, self.request.input.log_level)
self.response.payload = response.to_dict()
# ################################################################################################################################
# ################################################################################################################################
| 1,863
|
Python
|
.py
| 34
| 49.970588
| 130
| 0.506322
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,265
|
sap.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/outgoing/sap.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from time import time
from uuid import uuid4
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.broker_message import OUTGOING
from zato.common.odb.model import OutgoingSAP
from zato.common.odb.query import out_sap_list
from zato.common.util.api import ping_sap
from zato.server.service.internal import AdminService, AdminSIO, ChangePasswordBase
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
# ################################################################################################################################
elem = 'email_imap'
model = OutgoingSAP
label = 'a SAP RFC connection'
get_list_docs = 'SAP RFC connections'
broker_message = OUTGOING
broker_message_prefix = 'SAP_'
list_func = out_sap_list
skip_input_params = ['password']
# ################################################################################################################################
def instance_hook(service, input, instance, attrs):
if 'create' in service.get_name().lower():
instance.password = uuid4().hex
# ################################################################################################################################
def broker_message_hook(service, input, instance, attrs, service_type):
if service_type == 'create_edit':
input.password = instance.password
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = OutgoingSAP.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
class ChangePassword(ChangePasswordBase):
""" Changes the password of an SAP connection
"""
password_required = False
class SimpleIO(ChangePasswordBase.SimpleIO):
request_elem = 'zato_outgoing_sap_change_password_request'
response_elem = 'zato_outgoing_sap_change_password_response'
def handle(self):
def _auth(instance, password):
instance.password = password
return self._handle(OutgoingSAP, _auth, OUTGOING.SAP_CHANGE_PASSWORD.value,
publish_instance_attrs=['host', 'sysnr', 'client', 'sysid', 'user', 'password', 'router', 'pool_size'])
# ################################################################################################################################
class Ping(AdminService):
""" Pings a SAP connection to check its configuration.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_sap_ping_request'
response_elem = 'zato_outgoing_sap_ping_response'
input_required = ('id',)
output_required = ('info',)
def handle(self):
with closing(self.odb.session()) as session:
item = session.query(OutgoingSAP).filter_by(id=self.request.input.id).one()
with self.outgoing.sap[item.name].conn.client() as client:
start_time = time()
ping_sap(client)
response_time = time() - start_time
self.response.payload.info = 'Ping OK, took:`{0:03.4f} s`'.format(response_time)
# ################################################################################################################################
| 4,232
|
Python
|
.py
| 83
| 46.698795
| 130
| 0.49344
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,266
|
sql.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/outgoing/sql.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from operator import itemgetter
from traceback import format_exc
from uuid import uuid4
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import unicode
# Zato
from zato.common.api import ZATO_ODB_POOL_NAME
from zato.common.exception import ZatoException
from zato.common.broker_message import OUTGOING
from zato.common.odb.model import Cluster, SQLConnectionPool
from zato.common.odb.query import out_sql_list
from zato.common.util.api import get_sql_engine_display_name
from zato.server.service import AsIs, Integer
from zato.server.service.internal import AdminService, AdminSIO, ChangePasswordBase, GetListAdminSIO
class _SQLService:
""" A common class for various SQL-related services.
"""
def notify_worker_threads(self, params, action=OUTGOING.SQL_CREATE_EDIT.value):
""" Notify worker threads of new or updated parameters.
"""
params['action'] = action
self.broker_client.publish(params)
def validate_extra(self, cid, extra):
if extra and not '=' in extra:
raise ZatoException(cid,
'extra should be a list of key=value parameters, possibly one-element long, instead of `{}`'.format(
extra))
class GetList(AdminService):
""" Returns a list of outgoing SQL connections.
"""
_filter_by = SQLConnectionPool.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_outgoing_sql_get_list_request'
response_elem = 'zato_outgoing_sql_get_list_response'
input_required = ('cluster_id',)
output_required = ('id', 'name', 'is_active', 'cluster_id', 'engine', 'host', Integer('port'), 'db_name', 'username',
Integer('pool_size'))
output_optional = ('extra', 'engine_display_name')
def get_data(self, session):
return self._search(out_sql_list, session, self.request.input.cluster_id, False)
def handle(self):
with closing(self.odb.session()) as session:
data = self.get_data(session)
for item in data:
item.extra = item.extra.decode('utf8') if isinstance(item.extra, bytes) else item.extra
item.engine_display_name = get_sql_engine_display_name(item.engine, self.server.fs_sql_config)
self.response.payload[:] = data
class Create(AdminService, _SQLService):
""" Creates a new outgoing SQL connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_sql_create_request'
response_elem = 'zato_outgoing_sql_create_response'
input_required = ('name', 'is_active', 'cluster_id', 'engine', 'host', Integer('port'), 'db_name', 'username',
Integer('pool_size'))
input_optional = ('extra',)
output_required = ('id', 'name', 'display_name')
def handle(self):
input = self.request.input
input.password = uuid4().hex
input.extra = input.extra.encode('utf-8') if input.extra else b''
self.validate_extra(self.cid, input.extra.decode('utf-8'))
with closing(self.odb.session()) as session:
existing_one = session.query(SQLConnectionPool.id).\
filter(SQLConnectionPool.cluster_id==input.cluster_id).\
filter(SQLConnectionPool.name==input.name).\
first()
if existing_one:
raise Exception('An outgoing SQL connection [{0}] already exists on this cluster'.format(input.name))
try:
cluster = session.query(Cluster).filter_by(id=input.cluster_id).one()
item = SQLConnectionPool(cluster=cluster)
item.name = input.name
item.is_active = input.is_active
item.engine = input.engine
item.host = input.host
item.port = input.port
item.db_name = input.db_name
item.username = input.username
item.password = input.password
item.pool_size = input.pool_size
item.extra = input.extra
session.add(item)
session.commit()
# Make sure not to use bytes when notifying other threads
input.extra = input.extra.decode('utf8') if isinstance(input.extra, bytes) else input.extra
self.notify_worker_threads(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
self.response.payload.display_name = get_sql_engine_display_name(input.engine, self.server.fs_sql_config)
except Exception:
self.logger.error('SQL connection could not be created, e:`{}`', format_exc())
session.rollback()
raise
class Edit(AdminService, _SQLService):
""" Updates an outgoing SQL connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_sql_edit_request'
response_elem = 'zato_outgoing_sql_edit_response'
input_required = ('id', 'name', 'is_active', 'cluster_id', 'engine', 'host', Integer('port'), 'db_name', 'username',
Integer('pool_size'))
input_optional = ('extra',)
output_required = ('id', 'name', 'display_name')
def handle(self):
input = self.request.input
input.extra = input.extra.encode('utf-8') if input.extra else b''
self.validate_extra(self.cid, input.extra.decode('utf-8'))
with closing(self.odb.session()) as session:
existing_one = session.query(SQLConnectionPool.id).\
filter(SQLConnectionPool.cluster_id==input.cluster_id).\
filter(SQLConnectionPool.name==input.name).\
filter(SQLConnectionPool.id!=input.id).\
first()
if existing_one:
raise Exception('An outgoing SQL connection [{0}] already exists on this cluster'.format(input.name))
try:
item = session.query(SQLConnectionPool).filter_by(id=input.id).one()
old_name = item.name
item.name = input.name
item.is_active = input.is_active
item.cluster_id = input.cluster_id
item.engine = input.engine
item.host = input.host
item.port = input.port
item.db_name = input.db_name
item.username = input.username
item.pool_size = input.pool_size
item.extra = input.extra.encode('utf8') if isinstance(input.extra, unicode) else input.extra
session.add(item)
session.commit()
input.password = item.password
input.old_name = old_name
# Make sure not to use bytes when notifying other threads
input.extra = input.extra.decode('utf8') if isinstance(input.extra, bytes) else input.extra
self.notify_worker_threads(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
self.response.payload.display_name = get_sql_engine_display_name(input.engine, self.server.fs_sql_config)
except Exception:
self.logger.error('SQL connection could not be updated, e:`{}`', format_exc())
session.rollback()
raise
class Delete(AdminService, _SQLService):
""" Deletes an outgoing SQL connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_sql_delete_request'
response_elem = 'zato_outgoing_sql_delete_response'
input_required = ('id',)
def handle(self):
with closing(self.odb.session()) as session:
try:
item = session.query(SQLConnectionPool).\
filter(SQLConnectionPool.id==self.request.input.id).\
one()
old_name = item.name
session.delete(item)
session.commit()
self.notify_worker_threads({'name':old_name}, OUTGOING.SQL_DELETE.value)
except Exception:
session.rollback()
self.logger.error('SQL connection could not be deleted, e:`{}`', format_exc())
raise
class ChangePassword(ChangePasswordBase):
""" Changes the password of an outgoing SQL connection.
"""
class SimpleIO(ChangePasswordBase.SimpleIO):
request_elem = 'zato_outgoing_sql_change_password_request'
response_elem = 'zato_outgoing_sql_change_password_response'
def handle(self):
def _auth(instance, password):
instance.password = password
self._handle(SQLConnectionPool, _auth, OUTGOING.SQL_CHANGE_PASSWORD.value)
class Ping(AdminService):
""" Pings an SQL database
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_sql_ping_request'
response_elem = 'zato_outgoing_sql_ping_response'
input_required = 'id', 'should_raise_on_error'
output_optional = 'id', 'response_time'
def handle(self):
with closing(self.odb.session()) as session:
try:
item = session.query(SQLConnectionPool).\
filter(SQLConnectionPool.id==self.request.input.id).\
one()
ping = self.outgoing.sql.get(item.name, False).pool.ping
self.response.payload.id = self.request.input.id
response_time = ping(self.server.fs_sql_config)
if response_time:
self.response.payload.response_time = str(response_time)
except Exception as e:
# Always roll back ..
session.rollback()
# .. and log or raise, depending on what we are instructed to do.
log_msg = 'SQL connection `{}` could not be pinged, e:`{}`'
if self.request.input.should_raise_on_error:
self.logger.warning(log_msg.format(item.name, format_exc()))
raise e
else:
self.logger.warning(log_msg.format(item.name, e.args[0]))
class AutoPing(AdminService):
""" Invoked periodically from the scheduler - pings all the existing SQL connections.
"""
def handle(self):
try:
self.server.sql_pool_store[ZATO_ODB_POOL_NAME].pool.ping(self.server.fs_sql_config)
except Exception:
self.logger.warning('Could not ping ODB, e:`%s`', format_exc())
response = self.invoke(GetList.get_name(), {'cluster_id':self.server.cluster_id})
response = response['zato_outgoing_sql_get_list_response']
for item in response:
if not item.get('is_active'):
continue
try:
self.invoke(Ping.get_name(), {
'id': item['id'],
'should_raise_on_error': False,
})
except Exception:
self.logger.warning('Could not auto-ping SQL pool `%s`, config:`%s`, e:`%s`', item['name'], item, format_exc())
class GetEngineList(AdminService):
""" Returns a list of all engines defined in sql.conf.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_sql_get_engine_list_request'
response_elem = 'zato_outgoing_sql_get_engine_list_response'
output_required = (AsIs('id'), 'name')
output_repeated = True
def get_data(self):
out = []
for id, value in self.server.fs_sql_config.items():
out.append({
'id': id,
'name': value['display_name']
})
return sorted(out, key=itemgetter('name'))
def handle(self):
self.response.payload[:] = self.get_data()
| 12,036
|
Python
|
.py
| 248
| 37.157258
| 127
| 0.60689
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,267
|
odoo.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/outgoing/odoo.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Python 2/3 compatibility
from six import add_metaclass
# stdlib
from contextlib import closing
from time import time
from uuid import uuid4
# Zato
from zato.common.broker_message import OUTGOING
from zato.common.odb.model import OutgoingOdoo
from zato.common.odb.query import out_odoo_list
from zato.common.util.api import ping_odoo
from zato.server.service.internal import AdminService, AdminSIO, ChangePasswordBase
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
# ################################################################################################################################
elem = 'email_imap'
model = OutgoingOdoo
label = 'an Odoo connection'
get_list_docs = 'Odoo connections'
broker_message = OUTGOING
broker_message_prefix = 'ODOO_'
list_func = out_odoo_list
skip_input_params = ['password', 'client_type']
# ################################################################################################################################
def instance_hook(service, input, instance, attrs):
if 'create' in service.get_name().lower():
instance.password = uuid4().hex
# ################################################################################################################################
def broker_message_hook(service, input, instance, attrs, service_type):
if service_type == 'create_edit':
input.password = instance.password
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = OutgoingOdoo.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
class ChangePassword(ChangePasswordBase):
""" Changes the password of an Odoo connection
"""
password_required = False
class SimpleIO(ChangePasswordBase.SimpleIO):
request_elem = 'zato_outgoing_odoo_change_password_request'
response_elem = 'zato_outgoing_odoo_change_password_response'
def handle(self):
def _auth(instance, password):
instance.password = password
return self._handle(OutgoingOdoo, _auth, OUTGOING.ODOO_CHANGE_PASSWORD.value,
publish_instance_attrs=['host', 'protocol', 'port', 'database', 'user', 'password', 'pool_size', 'is_active'])
# ################################################################################################################################
class Ping(AdminService):
""" Pings an Odoo connection to check its configuration.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_odoo_ping_request'
response_elem = 'zato_outgoing_odoo_ping_response'
input_required = ('id',)
output_required = ('info',)
def handle(self):
with closing(self.odb.session()) as session:
item = session.query(OutgoingOdoo).filter_by(id=self.request.input.id).one()
with self.outgoing.odoo[item.name].conn.client() as client:
start_time = time()
ping_odoo(client)
response_time = time() - start_time
self.response.payload.info = 'Ping OK, took:`{0:03.4f} s`'.format(response_time)
# ################################################################################################################################
| 4,268
|
Python
|
.py
| 83
| 47.13253
| 130
| 0.496869
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,268
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/outgoing/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,269
|
ftp.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/outgoing/ftp.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from traceback import format_exc
# Zato
from zato.common.broker_message import OUTGOING
from zato.common.odb.model import OutgoingFTP
from zato.common.odb.query import out_ftp, out_ftp_list
from zato.common.util.sql import elems_with_opaque, set_instance_opaque_attrs
from zato.server.service import Boolean
from zato.server.service.internal import AdminService, AdminSIO, ChangePasswordBase, GetListAdminSIO
# ################################################################################################################################
# ################################################################################################################################
_get_output_required = 'id', 'name', 'is_active', 'host', 'port'
_get_output_optional = 'user', 'acct', 'timeout', Boolean('dircache'), 'default_directory'
# ################################################################################################################################
# ################################################################################################################################
class _FTPService(AdminService):
""" A common class for various FTP-related services.
"""
def notify_worker_threads(self, params, action=OUTGOING.FTP_CREATE_EDIT.value):
""" Notify worker threads of new or updated parameters.
"""
params['action'] = action
self.broker_client.publish(params)
# ################################################################################################################################
# ################################################################################################################################
class GetByID(AdminService):
""" Returns an FTP connection by its ID.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_ftp_get_by_id_request'
response_elem = None
input_required = 'cluster_id', 'id'
output_required = _get_output_required
output_optional = _get_output_optional
output_repeated = False
def get_data(self, session):
return out_ftp(session, self.server.cluster_id, self.request.input.id)
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload = self.get_data(session)
# ################################################################################################################################
# ################################################################################################################################
class GetList(AdminService):
""" Returns a list of outgoing FTP connections.
"""
_filter_by = OutgoingFTP.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_outgoing_ftp_get_list_request'
response_elem = 'zato_outgoing_ftp_get_list_response'
input_required = 'cluster_id'
output_required = _get_output_required
output_optional = _get_output_optional
output_repeated = True
def get_data(self, session):
return elems_with_opaque(self._search(out_ftp_list, session, self.request.input.cluster_id, False))
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
# ################################################################################################################################
# ################################################################################################################################
class Create(_FTPService):
""" Creates a new outgoing FTP connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_ftp_create_request'
response_elem = 'zato_outgoing_ftp_create_response'
input_required = 'cluster_id', 'name', 'is_active', 'host', 'port', Boolean('dircache')
input_optional = 'user', 'acct', 'timeout', 'default_directory'
output_required = 'id', 'name'
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
existing_one = session.query(OutgoingFTP.id).\
filter(OutgoingFTP.cluster_id==input.cluster_id).\
filter(OutgoingFTP.name==input.name).\
first()
if existing_one:
raise Exception('Outgoing FTP connection `{}` already exists'.format(input.name))
try:
item = OutgoingFTP()
item.name = input.name
item.is_active = input.is_active
item.cluster_id = input.cluster_id
item.dircache = input.dircache
item.host = input.host
item.port = input.port
item.user = input.user
item.acct = input.acct
item.timeout = input.timeout or None
# Opaque attributes
set_instance_opaque_attrs(item, input)
session.add(item)
session.commit()
self.notify_worker_threads(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
except Exception:
self.logger.error('Outgoing FTP connection could not be created, e:`{}`', format_exc())
session.rollback()
raise
# ################################################################################################################################
# ################################################################################################################################
class Edit(_FTPService):
""" Updates an outgoing FTP connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_ftp_edit_request'
response_elem = 'zato_outgoing_ftp_edit_response'
input_required = 'id', 'cluster_id', 'name', 'is_active', 'host', 'port', Boolean('dircache')
input_optional = 'user', 'acct', 'timeout', 'default_directory'
output_required = 'id', 'name'
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
existing_one = session.query(OutgoingFTP.id).\
filter(OutgoingFTP.cluster_id==input.cluster_id).\
filter(OutgoingFTP.name==input.name).\
filter(OutgoingFTP.id!=input.id).\
first()
if existing_one:
raise Exception('Outgoing FTP connection `{}` already exists'.format(input.name))
try:
item = session.query(OutgoingFTP).filter_by(id=input.id).one()
old_name = item.name
item.name = input.name
item.is_active = input.is_active
item.cluster_id = input.cluster_id
item.dircache = input.dircache
item.host = input.host
item.port = input.port
item.user = input.user
item.acct = input.acct
item.timeout = input.timeout or None
input.password = item.password
input.old_name = old_name
# Opaque attributes
set_instance_opaque_attrs(item, input)
session.add(item)
session.commit()
self.notify_worker_threads(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
except Exception:
self.logger.error('Could not update the outgoing FTP connection, e:`{}`', format_exc())
session.rollback()
raise
# ################################################################################################################################
# ################################################################################################################################
class Delete(_FTPService):
""" Deletes an outgoing FTP connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_ftp_delete_request'
response_elem = 'zato_outgoing_ftp_delete_response'
input_required = 'id'
def handle(self):
with closing(self.odb.session()) as session:
try:
item = session.query(OutgoingFTP).\
filter(OutgoingFTP.id==self.request.input.id).\
one()
old_name = item.name
session.delete(item)
session.commit()
self.notify_worker_threads({'name':old_name}, OUTGOING.FTP_DELETE.value)
except Exception:
session.rollback()
self.logger.error('Could not delete the outgoing FTP connection, e:`{}`', format_exc())
raise
# ################################################################################################################################
# ################################################################################################################################
class ChangePassword(ChangePasswordBase):
""" Changes the password of an outgoing FTP connection.
"""
class SimpleIO(ChangePasswordBase.SimpleIO):
request_elem = 'zato_outgoing_ftp_change_password_request'
response_elem = 'zato_outgoing_ftp_change_password_response'
def handle(self):
def _auth(instance, password):
instance.password = password
self._handle(OutgoingFTP, _auth, OUTGOING.FTP_CHANGE_PASSWORD.value)
# ################################################################################################################################
# ################################################################################################################################
| 10,170
|
Python
|
.py
| 190
| 43.742105
| 130
| 0.469314
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,270
|
jms_wmq.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/outgoing/jms_wmq.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from traceback import format_exc
# Zato
from zato.common.broker_message import OUTGOING
from zato.common.odb.model import ConnDefWMQ, OutgoingWMQ
from zato.common.odb.query import out_wmq, out_wmq_list
from zato.server.service import AsIs, Integer
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
# ################################################################################################################################
_base_required = ('id', Integer('delivery_mode'), Integer('priority'))
_get_required = _base_required + ('name', 'is_active', 'def_id', 'def_name')
_optional = (Integer('expiration'),)
# ################################################################################################################################
class _GetSIO(AdminSIO):
output_required = _get_required
output_optional = _optional + ('def_name_full_text',)
# ################################################################################################################################
class Get(AdminService):
""" Returns details of a single outgoing IBM MQ connection.
"""
class SimpleIO(_GetSIO):
request_elem = 'zato_outgoing_jms_wmq_get_request'
response_elem = 'zato_outgoing_jms_wmq_get_response'
input_required = ('cluster_id', 'id')
output_optional = _GetSIO.output_optional
def handle(self):
with closing(self.odb.session()) as session:
item = out_wmq(session, self.request.input.cluster_id, self.request.input.id)
self.response.payload = item
# ################################################################################################################################
class GetList(AdminService):
""" Returns a list of outgoing IBM MQ connections.
"""
_filter_by = OutgoingWMQ.name,
class SimpleIO(_GetSIO, GetListAdminSIO):
request_elem = 'zato_outgoing_jms_wmq_get_list_request'
response_elem = 'zato_outgoing_jms_wmq_get_list_response'
input_required = ('cluster_id',)
def get_data(self, session):
return self._search(out_wmq_list, session, self.request.input.cluster_id, False)
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
# ################################################################################################################################
class Create(AdminService):
""" Creates a new outgoing IBM MQ connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_jms_wmq_create_request'
response_elem = 'zato_outgoing_jms_wmq_create_response'
input_required = ('cluster_id', 'name', 'is_active', 'def_id', Integer('delivery_mode'), Integer('priority'))
input_optional = ('expiration',)
output_required = ('id', 'name')
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
if not(input.priority >= 0 and input.priority <= 9):
msg = 'Priority should be between 0 and 9, not [{0}]'.format(repr(input.priority))
raise ValueError(msg)
existing_one = session.query(OutgoingWMQ.id).\
filter(ConnDefWMQ.cluster_id==input.cluster_id).\
filter(OutgoingWMQ.def_id==ConnDefWMQ.id).\
filter(OutgoingWMQ.name==input.name).\
first()
if existing_one:
raise Exception('An outgoing IBM MQ connection `{}` already exists on this cluster'.format(input.name))
try:
item = OutgoingWMQ()
item.name = input.name
item.is_active = input.is_active
item.def_id = input.def_id
item.delivery_mode = input.delivery_mode
item.priority = input.priority
item.expiration = input.expiration
# Commit to DB
session.add(item)
session.commit()
# Notify other servers
input.id = item.id
input.action = OUTGOING.WMQ_CREATE.value
self.broker_client.publish(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
except Exception:
self.logger.error('Could not create an outgoing IBM MQ connection, e:`%s`', format_exc())
session.rollback()
raise
# ################################################################################################################################
class Edit(AdminService):
""" Updates an outgoing IBM MQ connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_jms_wmq_edit_request'
response_elem = 'zato_outgoing_jms_wmq_edit_response'
input_required = ('id', 'cluster_id', 'name', 'is_active', 'def_id', Integer('delivery_mode'), Integer('priority'))
input_optional = ('expiration',)
output_required = ('id', 'name')
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
if not(input.priority >= 0 and input.priority <= 9):
msg = 'Priority should be between 0 and 9, not [{0}]'.format(repr(input.priority))
raise ValueError(msg)
existing_one = session.query(OutgoingWMQ.id).\
filter(ConnDefWMQ.cluster_id==input.cluster_id).\
filter(OutgoingWMQ.def_id==ConnDefWMQ.id).\
filter(OutgoingWMQ.name==input.name).\
filter(OutgoingWMQ.id!=input.id).\
first()
if existing_one:
raise Exception('An outgoing IBM MQ connection `{}` already exists on this cluster'.format(input.name))
try:
item = session.query(OutgoingWMQ).filter_by(id=input.id).one()
old_name = item.name
item.name = input.name
item.is_active = input.is_active
item.def_id = input.def_id
item.delivery_mode = input.delivery_mode
item.priority = input.priority
item.expiration = input.expiration
# Commit to DB
session.add(item)
session.commit()
input.action = OUTGOING.WMQ_EDIT.value
input.old_name = old_name
# Notify other servers
self.broker_client.publish(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
except Exception:
self.logger.error('Could not update IBM MQ definition, e:`%s`', format_exc())
session.rollback()
raise
# ################################################################################################################################
class Delete(AdminService):
""" Deletes an outgoing IBM MQ connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_jms_wmq_delete_request'
response_elem = 'zato_outgoing_jms_wmq_delete_response'
input_required = ('id',)
def handle(self):
with closing(self.odb.session()) as session:
try:
item = session.query(OutgoingWMQ).\
filter(OutgoingWMQ.id==self.request.input.id).\
one()
# Commit to DB
session.delete(item)
session.commit()
# Notify other servers
self.broker_client.publish({
'action': OUTGOING.WMQ_DELETE.value,
'name': item.name,
'old_name': item.name,
'id':item.id
})
except Exception:
session.rollback()
self.logger.error('Could not delete outgoing IBM MQ connection, e:`{}`', format_exc())
raise
# ################################################################################################################################
class SendMessage(AdminService):
""" Sends a message to an IBM MQ queue.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_jms_wmq_send_message_request'
response_elem = 'zato_outgoing_jms_wmq_message_response'
input_required = _base_required + ('cluster_id', 'queue_name', 'data')
input_optional = _optional + ('reply_to', AsIs('correl_id'), AsIs('msg_id'))
def handle(self):
self.server.connector_ibm_mq.send_wmq_message(self.request.input)
# ################################################################################################################################
| 9,163
|
Python
|
.py
| 180
| 40.177778
| 130
| 0.520824
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,271
|
amqp_.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/outgoing/amqp_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from traceback import format_exc
# Zato
from zato.common.broker_message import OUTGOING
from zato.common.odb.model import ConnDefAMQP, OutgoingAMQP
from zato.common.odb.query import out_amqp_list
from zato.server.service import AsIs
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
# ################################################################################################################################
class GetList(AdminService):
""" Returns a list of outgoing AMQP connections.
"""
name = 'zato.outgoing.amqp.get-list'
_filter_by = OutgoingAMQP.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_outgoing_amqp_get_list_request'
response_elem = 'zato_outgoing_amqp_get_list_response'
input_required = ('cluster_id',)
output_required = ('id', 'name', 'is_active', 'def_id', 'delivery_mode', 'priority', 'def_name', 'pool_size')
output_optional = ('content_type', 'content_encoding', 'expiration', AsIs('user_id'), AsIs('app_id'))
def get_data(self, session):
return self._search(out_amqp_list, session, self.request.input.cluster_id, False)
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
# ################################################################################################################################
class Create(AdminService):
""" Creates a new outgoing AMQP connection.
"""
name = 'zato.outgoing.amqp.create'
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_amqp_create_request'
response_elem = 'zato_outgoing_amqp_create_response'
input_required = ('cluster_id', 'name', 'is_active', 'def_id', 'delivery_mode', 'priority', 'pool_size')
input_optional = ('content_type', 'content_encoding', 'expiration', AsIs('user_id'), AsIs('app_id'))
output_required = ('id', 'name')
def handle(self):
input = self.request.input
input.delivery_mode = int(input.delivery_mode)
input.priority = int(input.priority)
input.expiration = int(input.expiration) if input.expiration else None
if not(input.priority >= 0 and input.priority <= 9):
msg = 'Priority should be between 0 and 9, not [{0}]'.format(repr(input.priority))
raise ValueError(msg)
with closing(self.odb.session()) as session:
# Let's see if we already have a definition of that name before committing
# any stuff into the database.
existing_one = session.query(OutgoingAMQP.id).\
filter(ConnDefAMQP.cluster_id==input.cluster_id).\
filter(OutgoingAMQP.def_id==ConnDefAMQP.id).\
filter(OutgoingAMQP.name==input.name).\
first()
if existing_one:
raise Exception('An outgoing AMQP connection `{}` already exists on this cluster'.format(input.name))
try:
item = OutgoingAMQP()
item.name = input.name
item.is_active = input.is_active
item.def_id = input.def_id
item.delivery_mode = input.delivery_mode
item.priority = input.priority
item.content_type = input.content_type
item.content_encoding = input.content_encoding
item.expiration = input.expiration
item.pool_size = input.pool_size
item.user_id = input.user_id
item.app_id = input.app_id
session.add(item)
session.commit()
input.action = OUTGOING.AMQP_CREATE.value
input.def_name = item.def_.name
self.broker_client.publish(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
except Exception:
self.logger.error('Could not create an outgoing AMQP connection, e:`%s`', format_exc())
session.rollback()
raise
# ################################################################################################################################
class Edit(AdminService):
""" Updates an outgoing AMQP connection.
"""
name = 'zato.outgoing.amqp.edit'
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_amqp_edit_request'
response_elem = 'zato_outgoing_amqp_edit_response'
input_required = ('id', 'cluster_id', 'name', 'is_active', 'def_id', 'delivery_mode', 'priority', 'pool_size')
input_optional = ('content_type', 'content_encoding', 'expiration', AsIs('user_id'), AsIs('app_id'))
output_required = ('id', 'name')
def handle(self):
input = self.request.input
input.delivery_mode = int(input.delivery_mode)
input.priority = int(input.priority)
input.expiration = int(input.expiration) if input.expiration else None
if not(input.priority >= 0 and input.priority <= 9):
msg = 'Priority should be between 0 and 9, not [{0}]'.format(repr(input.priority))
raise ValueError(msg)
with closing(self.odb.session()) as session:
# Let's see if we already have a definition of that name before committing
# any stuff into the database.
existing_one = session.query(OutgoingAMQP.id).\
filter(ConnDefAMQP.cluster_id==input.cluster_id).\
filter(OutgoingAMQP.def_id==ConnDefAMQP.id).\
filter(OutgoingAMQP.name==input.name).\
filter(OutgoingAMQP.id!=input.id).\
first()
if existing_one:
raise Exception('An outgoing AMQP connection `{}` already exists on this cluster'.format(input.name))
try:
item = session.query(OutgoingAMQP).filter_by(id=input.id).one()
old_name = item.name
item.name = input.name
item.is_active = input.is_active
item.def_id = input.def_id
item.delivery_mode = input.delivery_mode
item.priority = input.priority
item.content_type = input.content_type
item.content_encoding = input.content_encoding
item.expiration = input.expiration
item.pool_size = input.pool_size
item.user_id = input.user_id
item.app_id = input.app_id
session.add(item)
session.commit()
input.action = OUTGOING.AMQP_EDIT.value
input.def_name = item.def_.name
input.old_name = old_name
self.broker_client.publish(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
except Exception:
self.logger.error('Could not update the outgoing AMQP connection, e:`%s`', format_exc())
session.rollback()
raise
# ################################################################################################################################
class Delete(AdminService):
""" Deletes an outgoing AMQP connection.
"""
name = 'zato.outgoing.amqp.delete'
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_amqp_delete_request'
response_elem = 'zato_outgoing_amqp_delete_response'
input_required = ('id',)
def handle(self):
with closing(self.odb.session()) as session:
try:
item = session.query(OutgoingAMQP).\
filter(OutgoingAMQP.id==self.request.input.id).\
one()
item_id = item.id
def_name = item.def_.name
session.delete(item)
session.commit()
self.broker_client.publish({
'action': OUTGOING.AMQP_DELETE.value,
'name': item.name,
'id':item_id,
'def_name':def_name,
})
except Exception:
session.rollback()
self.logger.error('Could not delete the outgoing AMQP connection, e:`%s`', format_exc())
raise
# ################################################################################################################################
class Publish(AdminService):
""" Publishes a message to an AMQP broker.
"""
name = 'zato.outgoing.amqp.publish'
class SimpleIO:
input_required = 'request_data', 'conn_name', 'exchange', 'routing_key'
output_optional = 'response_data'
response_elem = None
def handle(self):
input = self.request.input
self.out.amqp.send(input.request_data, input.conn_name, input.exchange, input.routing_key)
self.response.payload.response_data = '{"result": "OK"}'
# ################################################################################################################################
# ################################################################################################################################
| 9,570
|
Python
|
.py
| 185
| 40.497297
| 130
| 0.543399
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,272
|
zmq.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/outgoing/zmq.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from traceback import format_exc
# Zato
from zato.common.broker_message import OUTGOING
from zato.common.odb.model import OutgoingZMQ
from zato.common.odb.query import out_zmq_list
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
# ################################################################################################################################
class GetList(AdminService):
""" Returns a list of outgoing ZeroMQ connections.
"""
_filter_by = OutgoingZMQ.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_outgoing_zmq_get_list_request'
response_elem = 'zato_outgoing_zmq_get_list_response'
input_required = ('cluster_id',)
output_required = ('id', 'name', 'is_active', 'address', 'socket_type', 'socket_method')
def get_data(self, session):
return self._search(out_zmq_list, session, self.request.input.cluster_id, False)
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
# ################################################################################################################################
class Create(AdminService):
""" Creates a new outgoing ZeroMQ connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_zmq_create_request'
response_elem = 'zato_outgoing_zmq_create_response'
input_required = ('cluster_id', 'name', 'is_active', 'address', 'socket_type', 'socket_method')
input_optional = ('msg_source',)
output_required = ('id', 'name')
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
existing_one = session.query(OutgoingZMQ.id).\
filter(OutgoingZMQ.cluster_id==input.cluster_id).\
filter(OutgoingZMQ.name==input.name).\
first()
if existing_one:
raise Exception('An outgoing ZeroMQ connection `{}` already exists on this cluster'.format(input.name))
try:
item = self._new_zato_instance_with_cluster(OutgoingZMQ)
item.name = input.name
item.is_active = input.is_active
item.address = input.address
item.socket_type = input.socket_type
item.socket_method = input.socket_method
item.cluster_id = input.cluster_id
session.add(item)
session.commit()
input.action = OUTGOING.ZMQ_CREATE.value
input.id = item.id
self.broker_client.publish(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
except Exception:
msg = 'Could not create an outgoing ZeroMQ connection, e:`{}`'.format(format_exc())
self.logger.error(msg)
session.rollback()
raise
# ################################################################################################################################
class Edit(AdminService):
""" Updates an outgoing ZeroMQ connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_zmq_edit_request'
response_elem = 'zato_outgoing_zmq_edit_response'
input_required = ('id', 'cluster_id', 'name', 'is_active', 'address', 'socket_type', 'socket_method')
input_optional = ('msg_source',)
output_required = ('id', 'name')
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
existing_one = session.query(OutgoingZMQ.id).\
filter(OutgoingZMQ.cluster_id==input.cluster_id).\
filter(OutgoingZMQ.name==input.name).\
filter(OutgoingZMQ.id!=input.id).\
first()
if existing_one:
raise Exception('An outgoing ZeroMQ connection `{}` already exists on this cluster'.format(input.name))
try:
item = session.query(OutgoingZMQ).filter_by(id=input.id).one()
old_name = item.name
item.name = input.name
item.is_active = input.is_active
item.address = input.address
item.socket_type = input.socket_type
item.socket_method = input.socket_method
session.add(item)
session.commit()
input.action = OUTGOING.ZMQ_EDIT.value
input.id = item.id
input.old_name = old_name
self.broker_client.publish(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
except Exception:
msg = 'Could not update the outgoing ZeroMQ connection, e:`{}`'.format(format_exc())
self.logger.error(msg)
session.rollback()
raise
# ################################################################################################################################
class Delete(AdminService):
""" Deletes an outgoing ZeroMQ connection.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_outgoing_zmq_delete_request'
response_elem = 'zato_outgoing_zmq_delete_response'
input_required = ('id',)
def handle(self):
with closing(self.odb.session()) as session:
try:
item = session.query(OutgoingZMQ).\
filter(OutgoingZMQ.id==self.request.input.id).\
one()
session.delete(item)
session.commit()
msg = {'action': OUTGOING.ZMQ_DELETE.value, 'name': item.name, 'id':item.id}
self.broker_client.publish(msg)
except Exception:
session.rollback()
msg = 'Could not delete the outgoing ZeroMQ connection, e:`{}`'.format(format_exc())
self.logger.error(msg)
raise
# ################################################################################################################################
| 6,553
|
Python
|
.py
| 133
| 38.082707
| 130
| 0.535966
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,273
|
session_attr.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/sso/session_attr.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.server.service.internal.sso.attr import _Attr, _AttrExists, _AttrNames
# ################################################################################################################################
class SessionAttr(_Attr):
_api_entity = 'session'
# ################################################################################################################################
class SessionAttrExists(_AttrExists):
_api_entity = 'session'
# ################################################################################################################################
class SessionAttrNames(_AttrNames):
_api_entity = 'session'
# ################################################################################################################################
| 958
|
Python
|
.py
| 17
| 54.058824
| 130
| 0.322234
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,274
|
user.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/sso/user.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from traceback import format_exc
from uuid import uuid4
# dateutil
from dateutil.parser import parser as DateTimeParser
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import unicode
# Zato
from zato.common import NotGiven
from zato.common.broker_message import SSO as BROKER_MSG_SSO
from zato.common.simpleio_ import drop_sio_elems
from zato.common.util import asbool
from zato.server.service import AsIs, Bool, Int, List, Opaque, SIOElem
from zato.server.service.internal.sso import BaseService, BaseRESTService, BaseSIO
from zato.sso import status_code, SearchCtx, SignupCtx, ValidationError
from zato.sso.user import super_user_attrs, update
# ################################################################################################################################
if 0:
from zato.sso import User as UserEntity
UserEntity = UserEntity
# ################################################################################################################################
_create_user_attrs = sorted(('username', 'password', Bool('password_must_change'), 'display_name', 'first_name', 'middle_name',
'last_name', 'email', 'is_locked', 'sign_up_status', 'is_rate_limit_active', 'rate_limit_def', 'is_totp_enabled',
'totp_label', 'totp_key'))
_date_time_attrs = sorted(('approv_rej_time', 'locked_time', 'password_expiry', 'password_last_set', 'sign_up_time',
'approval_status_mod_time'))
# ################################################################################################################################
# A marker that indicates a value that will never exist
_invalid = '_invalid.{}'.format(uuid4().hex)
# ################################################################################################################################
dt_parser = DateTimeParser()
# ################################################################################################################################
# ################################################################################################################################
class Login(BaseService):
""" Logs an SSO user in.
"""
class SimpleIO(BaseSIO):
input_required = ('username', 'password', 'current_app')
input_optional = ('totp_code', 'new_password', 'remote_addr', 'user_agent')
output_required = ('status',)
output_optional = tuple(drop_sio_elems(BaseSIO.output_optional, 'status')) + ('ust',)
# ################################################################################################################################
def _handle_sso(self, ctx):
input = ctx.input
input.cid = self.cid
has_remote_addr = bool(input.get('remote_addr'))
user_provided_user_agent = input.get('user_agent')
has_user_agent = bool(user_provided_user_agent)
user_agent = user_provided_user_agent if has_user_agent else ctx.user_agent
input.has_remote_addr = has_remote_addr
input.has_user_agent = has_user_agent
if not has_remote_addr:
wsgi_remote_addr = self.wsgi_environ['zato.http.remote_addr']
wsgi_remote_addr = wsgi_remote_addr.decode('utf8') if not isinstance(wsgi_remote_addr, unicode) else wsgi_remote_addr
input.remote_addr = wsgi_remote_addr
out = self.sso.user.login(input.cid, input.username, input.password, input.current_app, input.remote_addr,
user_agent, has_remote_addr, has_user_agent, input.new_password, input.totp_code)
if out:
self.response.payload.ust = out.ust
if out.has_w_about_to_exp:
self.environ['status_changed'] = True
self.response.payload.status = status_code.warning
self.response.payload.sub_status = [status_code.password.w_about_to_exp]
# ################################################################################################################################
# ################################################################################################################################
class Logout(BaseService):
""" Logs a user out of SSO.
"""
class SimpleIO(BaseSIO):
input_required = ('ust', 'current_app')
output_required = ('status',)
output_optional = tuple(drop_sio_elems(BaseSIO.output_optional, 'status'))
def _handle_sso(self, ctx):
# Note that "ok" is always returned no matter the outcome - this is to thwart any attempts
# to learn which USTs are/were valid or not.
try:
self.sso.user.logout(self.cid, ctx.input.ust, ctx.input.current_app, ctx.remote_addr)
except Exception:
self.logger.warning('CID: `%s`, e:`%s`', self.cid, format_exc())
finally:
self.response.payload.status = status_code.ok
# ################################################################################################################################
# ################################################################################################################################
class User(BaseRESTService):
""" User manipulation through REST.
"""
class SimpleIO(BaseSIO):
input_required = ('ust', 'current_app')
input_optional = (AsIs('user_id'), 'username', 'password', Bool('password_must_change'), 'password_expiry',
'display_name', 'first_name', 'middle_name', 'last_name', 'email', 'is_locked', 'sign_up_status',
'approval_status', 'is_rate_limit_active', 'rate_limit_def', 'is_totp_enabled', 'totp_key', 'totp_label',
Bool('auto_approve'))
output_optional = BaseSIO.output_optional + (AsIs('user_id'), 'username', 'email', 'display_name', 'first_name',
'middle_name', 'last_name', 'is_active', 'is_internal', 'is_super_user', 'is_approval_needed',
'approval_status', 'approval_status_mod_time', 'approval_status_mod_by', 'is_locked', 'locked_time',
'creation_ctx', 'locked_by', 'approv_rej_time', 'approv_rej_by', 'password_expiry', Bool('password_is_set'),
Bool('password_must_change'), 'password_last_set', 'sign_up_status','sign_up_time', 'is_totp_enabled',
'totp_label')
default_value = _invalid
# ################################################################################################################################
def _handle_sso_GET(self, ctx):
""" Returns details of a particular user by UST or ID.
"""
user_id = ctx.input.get('user_id')
attrs = []
if user_id != self.SimpleIO.default_value:
func = self.sso.user.get_user_by_id
attrs.append(user_id)
else:
func = self.sso.user.get_current_user
# These will be always needed, no matter which function is used
attrs += [ctx.input.ust, ctx.input.current_app, ctx.remote_addr]
# Get the dict describing this user
user_entity = func(self.cid, *attrs) # type: UserEntity
out = user_entity.to_dict()
# Make sure regular users do not receive super-user specific details
if not user_entity.is_current_super_user:
for name in super_user_attrs:
out.pop(name, None)
# Func will return a dictionary describing the required user, already taking permissions into account
self.response.payload = out
# ################################################################################################################################
def _handle_sso_POST(self, ctx, _create_user_attrs=_create_user_attrs, _date_time_attrs=_date_time_attrs):
""" Creates a new regular user (will not create super-users).
"""
# Create input data explicitly, field-by-field, to make sure only well known parameters can be used
# to create a new user.
data = {}
for name in _create_user_attrs:
name = name.name if isinstance(name, SIOElem) else name
value = ctx.input.get(name)
if value != self.SimpleIO.default_value:
if name == 'password':
value = self.server.decrypt(value)
data[name] = value
auto_approve = self.request.input.auto_approve
if auto_approve == self.SimpleIO.default_value:
auto_approve = False
# This will update 'data' in place ..
user_id = self.sso.user.create_user(
self.cid, data, ctx.input.ust, ctx.input.current_app, ctx.remote_addr, auto_approve=auto_approve)
# .. and we can now assign it to response..
# .. first making sure that password is not returned
data.pop('password', None)
# .. then serializing all datetime objects to string ..
for name in _date_time_attrs:
value = data.get(name)
if value:
data[name] = value.isoformat()
# .. if rate-limiting is active, let all servers know about it ..
if ctx.input.is_rate_limit_active:
self.broker_client.publish({
'action': BROKER_MSG_SSO.USER_CREATE.value,
'user_id': user_id,
'is_rate_limit_active': True,
'rate_limit_def': ctx.input.rate_limit_def if ctx.input.rate_limit_def != _invalid else None
})
# .. and finally we can create the response.
self.response.payload = data
# ################################################################################################################################
def _handle_sso_DELETE(self, ctx):
""" Deletes an existing user.
"""
# Will take care of permissions / access rights and if everything is successful,
# the user pointed to by user_id will be deleted.
self.sso.user.delete_user_by_id(self.cid, ctx.input.user_id, ctx.input.ust, ctx.input.current_app, ctx.remote_addr)
# ################################################################################################################################
def _handle_sso_PATCH(self, ctx, _not_given=NotGiven):
""" Updates an existing user.
"""
current_ust = ctx.input.pop('ust')
current_app = ctx.input.pop('current_app')
# Explicitly provide only what we know is allowed
data = {}
for name in sorted(update.all_attrs):
# No such key on input, we can ignore it
if name not in self.request.payload:
continue
value = ctx.input.get(name, _not_given)
# Just to be doubly sure, check the value too
if value is _not_given:
continue
if value != _invalid:
# Boolean values will never be None on input (SIO will convert them to a default value of an empty string)..
if name in update.boolean_attrs:
if value is None:
continue
else:
value = asbool(value)
# .. same goes for datetime ones.
elif name in update.datetime_attrs:
if value is None:
continue
value = dt_parser.parse(value)
data[name] = value
user_id = data.pop('user_id', None)
if user_id:
self.sso.user.update_user_by_id(self.cid, user_id, data, current_ust, current_app, ctx.remote_addr)
user = self.sso.user.get_user_by_id(self.cid, user_id, current_ust, current_app, ctx.remote_addr)
else:
self.sso.user.update_current_user(self.cid, data, current_ust, current_app, ctx.remote_addr)
user = self.sso.user.get_current_user(self.cid, current_ust, current_app, ctx.remote_addr)
user_id = user.user_id
# Always notify all servers about this event in case we need to disable rate limiting
self.broker_client.publish({
'action': BROKER_MSG_SSO.USER_EDIT.value,
'user_id': user_id,
'is_rate_limit_active': ctx.input.is_rate_limit_active,
'rate_limit_def': ctx.input.rate_limit_def if ctx.input.rate_limit_def != _invalid else None,
})
# ################################################################################################################################
# ################################################################################################################################
class TOTP(BaseRESTService):
""" TOTP key management.
"""
name = 'zato.server.service.internal.sso.user.totp'
class SimpleIO(BaseSIO):
input_required = ('ust', 'current_app', 'totp_key', 'totp_label')
input_optional = AsIs('user_id'),
output_optional = BaseSIO.output_optional + ('totp_key',)
def _handle_sso_PATCH(self, ctx):
""" Resets a user's TOTP key.
"""
self.response.payload.totp_key = self.sso.user.reset_totp_key(
self.cid, ctx.input.ust, ctx.input.user_id,
ctx.input.totp_key,
ctx.input.totp_label,
ctx.input.current_app,
ctx.remote_addr)
# ################################################################################################################################
# ################################################################################################################################
class Lock(BaseRESTService):
""" Locks or unlocks a user account.
"""
name = 'zato.server.service.internal.sso.user.lock'
class SimpleIO(BaseSIO):
input_required = 'ust', 'current_app', AsIs('user_id')
# ################################################################################################################################
def _handle_sso_POST(self, ctx):
""" Locks a user account.
"""
self.sso.user.lock_user(self.cid, ctx.input.user_id, ctx.input.ust, ctx.input.current_app, ctx.remote_addr)
# ################################################################################################################################
def _handle_sso_DELETE(self, ctx):
""" Unlocks a user account.
"""
self.sso.user.unlock_user(self.cid, ctx.input.user_id, ctx.input.ust, ctx.input.current_app, ctx.remote_addr)
# ################################################################################################################################
# ################################################################################################################################
class Password(BaseRESTService):
""" User password management.
"""
class SimpleIO(BaseSIO):
input_required = ('ust', 'current_app', 'new_password')
input_optional = (AsIs('user_id'), 'old_password', Int('password_expiry'), Bool('must_change'))
# ################################################################################################################################
def _log_invalid_password_expiry(self, value):
self.logger.warning('CID: `%s`, invalid password_expiry `%r`, forcing default of `%s`',
self.cid, value, self.sso.password_expiry)
# ################################################################################################################################
def _handle_sso_PATCH(self, ctx):
""" Changes a user's password.
"""
data = {
'old_password': ctx.input.get('old_password') or uuid4().hex, # So it will never match anything
'new_password': ctx.input['new_password'],
}
user_id = ctx.input.get('user_id')
if user_id:
data['user_id'] = user_id
password_expiry = ctx.input.get('password_expiry')
if password_expiry != '':
if password_expiry < 0:
self._log_invalid_password_expiry(password_expiry)
password_expiry = self.sso.password_expiry
data['password_expiry'] = password_expiry
if 'must_change' in self.request.payload:
must_change = ctx.input.get('must_change')
must_change = asbool(must_change)
data['must_change'] = must_change
self.sso.user.change_password(self.cid, data, ctx.input.ust, ctx.input.current_app, ctx.remote_addr)
# ################################################################################################################################
# ################################################################################################################################
class _ChangeApprovalStatus(BaseRESTService):
""" Base class for services changing a user's approval_status.
"""
func_name = None
class SimpleIO(BaseSIO):
input_required = ('ust', 'current_app', AsIs('user_id'))
def _handle_sso_POST(self, ctx):
func = getattr(self.sso.user, self.func_name)
func(self.cid, ctx.input.user_id, ctx.input.ust, ctx.input.current_app, ctx.remote_addr)
# ################################################################################################################################
# ################################################################################################################################
class Approve(_ChangeApprovalStatus):
""" Approves a user - changes his or her approval_status to 'approved'
"""
func_name = 'approve_user'
# ################################################################################################################################
# ################################################################################################################################
class Reject(_ChangeApprovalStatus):
""" Rejects a user - changes his or her approval_status to 'rejected'
"""
func_name = 'reject_user'
# ################################################################################################################################
# ################################################################################################################################
class _CtxInputUsing(BaseService):
""" Base class for services that create context objects based on their self.request.input
which may possibly use _invalid default values.
"""
class SimpleIO(BaseSIO):
default_value = _invalid
def _get_ctx_from_input(self, CtxClass, skip=None, _invalid=_invalid):
ctx = CtxClass()
skip = skip or []
for key, value in sorted(self.request.input.items()):
if key not in skip:
if value != _invalid:
setattr(ctx, key, value)
return ctx
# ################################################################################################################################
# ################################################################################################################################
class Search(_CtxInputUsing):
""" Looks up SSO users by input criteria.
"""
class SimpleIO(_CtxInputUsing.SimpleIO):
input_required = ('ust', 'current_app')
input_optional = (AsIs('user_id'), 'username', 'email', 'display_name', 'first_name', 'middle_name', 'last_name',
'sign_up_status', 'approval_status', Bool('paginate'), Int('cur_page'), Int('page_size'), 'name_op',
'is_name_exact')
output_required = ('status',)
output_optional = tuple(drop_sio_elems(BaseSIO.output_optional, 'status')) + (Int('total'), Int('num_pages'),
Int('page_size'), Int('cur_page'), 'has_next_page', 'has_prev_page', Int('next_page'), Int('prev_page'),
List('result'))
default_value = _invalid
# ################################################################################################################################
def _handle_sso(self, ctx, _skip_ctx=('ust', 'current_app')):
# Search data built from all input parameters that were given on input
search_ctx = self._get_ctx_from_input(SearchCtx, _skip_ctx)
# Assign to response all the matching elements
self.response.payload = self.sso.user.search(self.cid,
search_ctx, ctx.input.ust, ctx.input.current_app, ctx.remote_addr, serialize_dt=True)
# All went fine, return status code OK
self.response.payload.status = status_code.ok
# ################################################################################################################################
# ################################################################################################################################
class Signup(BaseRESTService, _CtxInputUsing):
""" Lets users sign up with the system and confirm it afterwards.
"""
class SimpleIO(_CtxInputUsing.SimpleIO):
input_optional = ('confirm_token', 'email', 'username', 'password', 'current_app', List('app_list'))
output_optional = _CtxInputUsing.SimpleIO.output_optional + ('confirm_token',)
# ################################################################################################################################
def _handle_sso_POST(self, ctx):
self.response.payload.confirm_token = self.sso.user.signup(self.cid,
self._get_ctx_from_input(SignupCtx), ctx.input.current_app, ctx.remote_addr)
self.response.payload.status = status_code.ok
# ################################################################################################################################
def _handle_sso_PATCH(self, ctx):
try:
self.sso.user.confirm_signup(self.cid, ctx.input.confirm_token, ctx.input.current_app, ctx.remote_addr)
except ValidationError as e:
self.logger.info(format_exc())
self.response.payload.status = status_code.error
if e.return_status:
self.response.payload.sub_status = e.sub_status
else:
self.response.payload.sub_status = status_code.auth.not_allowed
else:
self.response.payload.status = status_code.ok
# ################################################################################################################################
# ################################################################################################################################
class LinkedAuth(BaseRESTService):
class SimpleIO(BaseSIO):
input_required = 'current_app', 'ust'
input_optional = Opaque('user_id'), 'auth_type', 'auth_username', 'is_active',
output_optional = BaseSIO.output_optional + ('result',)
default_value = _invalid
skip_empty_keys = True
# ################################################################################################################################
def _handle_sso_GET(self, ctx):
user_id = ctx.input.user_id
user_id = user_id if user_id != _invalid else None
out = []
result = self.sso.user.get_linked_auth_list(self.cid, ctx.input.ust, ctx.input.current_app, ctx.remote_addr, user_id)
for item in result:
item['creation_time'] = item['creation_time'].isoformat()
for name in 'auth_principal', 'auth_source':
if item[name] == 'reserved':
del item[name]
item.pop('is_internal', None)
item.pop('auth_id', None)
item.pop('user_id', None)
item.pop('auth_principal', None)
item.pop('has_ext_principal', None)
out.append(item)
self.response.payload.result = out
# ################################################################################################################################
def _handle_sso_POST(self, ctx):
user_id, auth_id = self.sso.user.create_linked_auth(self.cid, ctx.input.ust, ctx.input.user_id, ctx.input.auth_type,
ctx.input.auth_username, ctx.input.is_active, ctx.input.current_app, ctx.remote_addr)
# With data saved to SQL, we can now notify all the servers about the new link
msg = {}
msg['action'] = BROKER_MSG_SSO.LINK_AUTH_CREATE.value
msg['auth_type'] = ctx.input.auth_type
msg['user_id'] = user_id
msg['auth_id'] = auth_id
self.broker_client.publish(msg)
# ################################################################################################################################
def _handle_sso_DELETE(self, ctx):
auth_id = self.sso.user.delete_linked_auth(self.cid, ctx.input.ust, ctx.input.user_id, ctx.input.auth_type,
ctx.input.auth_username, ctx.input.current_app, ctx.remote_addr)
# With data saved to SQL, we can now notify all the servers about the new link
msg = {}
msg['action'] = BROKER_MSG_SSO.LINK_AUTH_DELETE.value
msg['auth_type'] = ctx.input.auth_type
msg['auth_username'] = ctx.input.auth_username
msg['user_id'] = ctx.input.user_id
msg['auth_id'] = auth_id
self.broker_client.publish(msg)
# ################################################################################################################################
# ################################################################################################################################
| 25,602
|
Python
|
.py
| 427
| 51.358314
| 130
| 0.473524
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,275
|
user_attr.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/sso/user_attr.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.server.service.internal.sso.attr import _Attr, _AttrExists, _AttrNames
# ################################################################################################################################
class UserAttr(_Attr):
_api_entity = 'user'
# ################################################################################################################################
class UserAttrExists(_AttrExists):
_api_entity = 'user'
# ################################################################################################################################
class UserAttrNames(_AttrNames):
_api_entity = 'user'
# ################################################################################################################################
| 940
|
Python
|
.py
| 17
| 53
| 130
| 0.308872
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,276
|
attr.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/sso/attr.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from traceback import format_exc
from uuid import uuid4
# Zato
from zato.server.service import AsIs, Bool, Int, Opaque
from zato.server.service.internal.sso import BaseRESTService, BaseSIO
from zato.sso.api import status_code, ValidationError
# ################################################################################################################################
_invalid = 'invalid.{}'.format(uuid4().hex)
# ################################################################################################################################
class _DataElem:
def __init__(self, func, elem_name, elem_value):
self.func = func
self.elem_name = elem_name
self.elem_value = elem_value
# ################################################################################################################################
class _AttrBase:
""" Utility base class for attribute-related services.
"""
_api_entity = None
class SimpleIO(BaseSIO):
input_required = 'current_app',
input_optional = 'ust', 'current_ust', AsIs('user_id'), 'name', 'value', Opaque('data'), Bool('decrypt'), \
Bool('serialize_dt'), Int('expiration'), Bool('encrypt'), 'target_ust'
output_optional = BaseSIO.output_optional + (Bool('found'), 'result', 'name', 'value', 'creation_time',
'last_modified', 'expiration_time', 'is_encrypted')
default_value = _invalid
# ################################################################################################################################
def get_api_call_data(self, cid, ctx, api_name, logger, needs_input):
if needs_input:
if ctx.input.name != _invalid:
func_name = api_name
data_elem_name = 'name'
data_elem_value = ctx.input.name
elif ctx.input.data != _invalid:
func_name = '{}_many'.format(api_name)
data_elem_name = 'data'
data_elem_value = ctx.input.data
else:
logger.info('Could not find input in `name` nor `data`')
raise ValidationError(status_code.common.invalid_input)
else:
func_name = api_name
data_elem_name = None
data_elem_value = None
if self._api_entity == 'user':
entity = self.sso.user.get_user_by_id(cid, ctx.input.user_id, ctx.input.ust, ctx.input.current_app,
ctx.remote_addr)
elif self._api_entity == 'session':
entity = self.sso.user.session.get(self.cid, ctx.input.target_ust, ctx.input.current_ust,
ctx.input.current_app, ctx.remote_addr, user_agent=None)
else:
logger.warning('Could not establish API entity to use out of `%s`', self._api_entity)
raise ValidationError(status_code.common.internal_error)
func = getattr(entity.attr, func_name)
return _DataElem(func, data_elem_name, data_elem_value)
# ################################################################################################################################
def _access_sso_attr(self, ctx, api_name, needs_encrypt=True, needs_expiration=True, needs_result=False, needs_input=True,
force_elem_name_data=False):
""" A common function for most calls accessing attributes.
"""
call_data = self.get_api_call_data(self.cid, ctx, api_name, self.logger, needs_input)
if force_elem_name_data:
elem_name = 'data'
else:
elem_name = call_data.elem_name
kwargs = {
elem_name: call_data.elem_value,
}
if needs_expiration:
kwargs['expiration'] = ctx.input.expiration if ctx.input.expiration != _invalid else None
if needs_encrypt:
kwargs['encrypt'] = ctx.input.encrypt if ctx.input.encrypt != _invalid else False
if elem_name == 'name':
kwargs['value'] = ctx.input.value
try:
result = call_data.func(**(kwargs if needs_input else {}))
except Exception as e:
self.logger.warning(format_exc())
if isinstance(e, ValidationError):
raise
else:
raise ValidationError(status_code.common.invalid_input)
else:
if needs_result:
self.response.payload.result = result
# ################################################################################################################################
class _Attr(_AttrBase, BaseRESTService):
""" Handles access to most of attribute-related REST APIs.
"""
# ################################################################################################################################
def _handle_sso_POST(self, ctx):
""" Creates a new attribute.
"""
self._access_sso_attr(ctx, 'create')
# ################################################################################################################################
def _handle_sso_PATCH(self, ctx):
""" Updates an existing attribute.
"""
self._access_sso_attr(ctx, 'update')
# ################################################################################################################################
def _handle_sso_PUT(self, ctx):
""" Creates a new or updates an existing attribute.
"""
self._access_sso_attr(ctx, 'set')
# ################################################################################################################################
def _handle_sso_DELETE(self, ctx):
""" Deletes an existing attribute.
"""
self._access_sso_attr(ctx, 'delete', False, False, force_elem_name_data=True)
# ################################################################################################################################
def _handle_sso_GET(self, ctx):
""" Returns data of and metadata about an attribute.
"""
call_data = self.get_api_call_data(self.cid, ctx, 'get', self.logger, True)
decrypt = ctx.input.decrypt
decrypt = True if (decrypt == _invalid or decrypt == '') else ctx.input.decrypt
kwargs = {
'decrypt': decrypt,
'serialize_dt':True,
'data': call_data.elem_value,
}
try:
result = call_data.func(**kwargs)
except Exception:
self.logger.warning(format_exc())
raise ValidationError(status_code.common.invalid_input)
else:
if result:
if isinstance(result, list):
self.response.payload.result = result
else:
result = result.to_dict()
self.response.payload.found = True
self.response.payload.name = result['name']
self.response.payload.value = result['value']
self.response.payload.creation_time = result['creation_time']
self.response.payload.last_modified = result['last_modified']
self.response.payload.expiration_time = result['expiration_time']
self.response.payload.is_encrypted = result['is_encrypted']
else:
self.response.payload.found = False
# ################################################################################################################################
class _AttrExists(_AttrBase, BaseRESTService):
""" Checks if an attribute or attributes given on input actually exist(s).
"""
def _handle_sso_GET(self, ctx):
self._access_sso_attr(ctx, 'exists', False, False, True, force_elem_name_data=True)
# ################################################################################################################################
class _AttrNames(_AttrBase, BaseRESTService):
""" Returns names of all attributes defined.
"""
def _handle_sso_GET(self, ctx):
self._access_sso_attr(ctx, 'names', False, False, True, False, force_elem_name_data=True)
# ################################################################################################################################
| 8,440
|
Python
|
.py
| 160
| 43.33125
| 130
| 0.467767
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,277
|
password_reset.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/sso/password_reset.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from datetime import datetime, timedelta
from traceback import format_exc
# Zato
from zato.common.util.api import spawn_greenlet
from zato.common.odb.model import SSOUser as UserModel
from zato.server.service import List, Service
from zato.server.service.internal.sso import BaseRESTService
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.sso.common import SSOCtx
SSOCtx = SSOCtx
# ################################################################################################################################
# ################################################################################################################################
UserModelTable = UserModel.__table__
UserModelTableSelect = UserModelTable.select
# ################################################################################################################################
# ################################################################################################################################
class PasswordReset(BaseRESTService):
""" Session manipulation through REST.
"""
class SimpleIO:
# These elements are actually needed but we make them optional here to ensure
# that SimpleIO does not raise any exceptions when they are not sent.
input_optional = 'current_app', 'credential', 'token', 'reset_key', 'password'
output_required = 'status', 'cid'
output_optional = ('reset_key', List('sub_status'))
# Do not wrap elements in a top-level root element
response_elem = None
# Do not return keys that we have no values for
skip_empty_keys = True
# ################################################################################################################################
def _handle_sso_POST(self, ctx:'SSOCtx') -> 'None':
""" Creates a new PRT, assuming the incoming credential points to a valid user.
"""
# Run asynchronously in a separate greenlet
try:
_ = spawn_greenlet(
self.sso.password_reset.create_token,
self.cid,
ctx.input.credential,
ctx.input.current_app,
ctx.remote_addr,
ctx.user_agent
)
except Exception:
# Log the exception but do not return it
self.logger.warning('Exception in FlowPRT._handle_sso_POST `%s`', format_exc())
# ################################################################################################################################
def _handle_sso_PATCH(self, ctx:'SSOCtx') -> 'None':
""" Accesses a PRT, returning its access key on output.
"""
# This will be encrypted by SIO
ctx.input.token = self.server.decrypt(ctx.input.token)
# Try to get a reset key for the input PRT ..
access_token_ctx = self.sso.password_reset.access_token(
self.cid, ctx.input.token, ctx.input.current_app, ctx.remote_addr, ctx.user_agent)
# .. if we are here, it means that the PRT was accepted
# and we can return the reset key to the client.
self.response.payload.reset_key = access_token_ctx.reset_key
# ################################################################################################################################
def _handle_sso_DELETE(self, ctx:'SSOCtx') -> 'None':
""" Updates a password based on a PRT and reset key.
"""
# This will be encrypted by SIO
ctx.input.token = self.server.decrypt(ctx.input.token)
ctx.input.password = self.server.decrypt(ctx.input.password)
# Try to get a reset key for the input PRT and reset key ..
self.sso.password_reset.change_password(
self.cid, ctx.input.password, ctx.input.token, ctx.input.reset_key,
ctx.input.current_app, ctx.remote_addr, ctx.user_agent)
# .. if we are here, it means that the PRT and reset key
# were accepted, there is nothing else for us to do, we can return,
# so let's be explicit about it.
return
# ################################################################################################################################
# ################################################################################################################################
class PasswordExpiryHandler(Service):
name = 'pub.zato.sso.password-expiry.handler'
def handle(self) -> 'None':
# We need to be invoked with a service that will process any users that find.
# Without this service, we cannot continue.
raw = self.request.raw_request
is_dict_no_input = isinstance(raw, dict) and len(raw) == 1 and 'skip_response_elem' in raw
if (not raw) or is_dict_no_input:
self.logger.info('Service `%s` needs to be invoked with a processor service on input', self.name)
return
else:
processor_service = self.request.raw_request
self.logger.info('Running `%s` with a processor service on input -> `%s`', self.name, processor_service)
# Local aliases
now = datetime.utcnow()
sso_conf = self.sso.sso_conf
to_process = []
# We look up users in the SSO database
select = UserModelTableSelect()
# Get all the users - processing them in Python is the most cross-platform way
query = select.\
with_only_columns((
UserModelTable.c.username,
UserModelTable.c.email,
UserModelTable.c.display_name,
UserModelTable.c.password_expiry,
)).\
where(UserModelTable.c.is_locked.is_(False)).\
order_by(UserModelTable.c.username)
# Obtain a new SQL session ..
with closing(self.odb.session()) as session:
# .. run the query ..
result = session.execute(query)
# .. go through each of the users ..
for item in result:
# .. build a threshold time specific to each user ..
threshold_time = item.password_expiry - timedelta(days=sso_conf.password.about_to_expire_threshold)
# .. if it has been reached ..
if now > threshold_time:
# .. transform the item to a Python dict ..
item_as_dict = dict(item.items())
# .. add flags indicating whether the password has already expired or it is about to ..
item_as_dict['is_password_expired'] = item_as_dict['password_expiry'] < now
item_as_dict['is_password_about_to_expire'] = item_as_dict['password_expiry'] > now
# .. append the user to the list of users to process ..
to_process.append(item_as_dict)
if to_process:
self.logger.info('Sending %s result(s) to service `%s`', len(to_process), processor_service)
self.invoke(processor_service, to_process)
else:
self.logger.info('No results to send to service `%s`', processor_service)
# ################################################################################################################################
# ################################################################################################################################
| 7,850
|
Python
|
.py
| 137
| 48.087591
| 130
| 0.486835
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,278
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/sso/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from copy import deepcopy
from http.client import FORBIDDEN
from traceback import format_exc
# Zato
from zato.common.api import NO_REMOTE_ADDRESS
from zato.server.service import List, Service
from zato.sso import status_code, ValidationError
from zato.sso.common import SSOCtx
from zato.sso.model import RequestCtx
# ################################################################################################################################
if 0:
from zato.common.typing_ import type_
from zato.common.crypto.totp_ import TOTPManager
from zato.common.test.config import TestConfig
TestConfig = TestConfig
TOTPManager = TOTPManager
# ################################################################################################################################
class BaseSIO:
""" A set of attributes common to all SSO services.
"""
encrypt_secrets = False
response_elem = None
skip_empty_keys = True
output_optional = ('cid', 'status', List('sub_status'))
# ################################################################################################################################
class BaseService(Service):
""" Base class for SSO sevices.
"""
class SimpleIO(BaseSIO):
pass
# ################################################################################################################################
def _set_response_ok(self):
self.response.payload.status = status_code.ok
# ################################################################################################################################
def _set_response_error(self, sub_status=status_code.auth.not_allowed, status=status_code.error):
self.response.status_code = FORBIDDEN
self.response.payload.status = status
# BaseRESTService._handle_sso may have set it already so we need an if check
if not self.response.payload.sub_status:
if isinstance(sub_status, list):
self.response.payload.sub_status.extend(sub_status)
else:
self.response.payload.sub_status.append(sub_status)
# ################################################################################################################################
def before_handle(self):
# Assume that all calls always fail unless explicitly set to status_code.ok
self.response.payload.status = status_code.error
self.response.payload.sub_status = []
# Will be set to True if the default value of status_code.ok should not be returned
self.environ['status_changed'] = False
# ################################################################################################################################
def after_handle(self):
# If status is an error set in before_handle or _handle_sso and there is no sub_status
# set yet, return generic information that user is not allowed to access this resource.
status = self.response.payload.status
sub_status = self.response.payload.sub_status
if status != status_code.ok and (not sub_status):
self.response.payload.sub_status = status_code.auth.not_allowed
# Always returned if any response is produced at all
self.response.payload.cid = self.cid
# ################################################################################################################################
def handle(self):
sso_conf = self.server.sso_config
# Basic checks, applicable to all requests
if self.request.input.current_app not in sso_conf.apps.all:
self.response.payload.status = status_code.error
if sso_conf.apps.inform_if_app_invalid:
self.response.payload.sub_status.append(status_code.app_list.invalid)
return
# Parse remote_addr into a series of IP address objects, possibly more than one,
# depending on how many were sent in the request.
remote_addr = self.wsgi_environ['zato.http.remote_addr']
if remote_addr == NO_REMOTE_ADDRESS:
remote_addr = None
# OK, we can proceed to the actual call now
sso_ctx = SSOCtx(
self.cid,
remote_addr,
self.wsgi_environ.get('HTTP_USER_AGENT'),
self.request.input,
sso_conf
)
_ = self._call_sso_api(self._handle_sso, 'Could not call service', ctx=sso_ctx)
# ################################################################################################################################
def _call_sso_api(self, func, log_prefix, **kwargs):
try:
# Call the business functionality
out = func(**kwargs)
except ValidationError as e:
# Log only if needed (likely WARN will be always enabled but still, it's better to check it first)
if self.server.is_enabled_for_warn:
kwargs['cid'] = self.cid
log_msg = log_prefix.format(**kwargs)
log_msg = log_msg + ', cid:`{}`, e:`{}`'.format(self.cid, format_exc())
self.logger.warning(log_msg)
# Make sure we don't return specific status codes if we are not allowed to
if e.return_status:
self._set_response_error(e.sub_status, e.status)
else:
self._set_response_error()
# All went fine, we can set status OK and return business data
else:
if not self.environ.get('status_changed'):
self._set_response_ok()
return out
# ################################################################################################################################
def _handle_sso(self, ctx):
raise NotImplementedError('Must be implemented in subclasses')
# ################################################################################################################################
class BaseRESTService(BaseService):
""" Base class for services reacting to specific HTTP verbs.
"""
def _handle_sso(self, ctx):
http_verb = self.wsgi_environ['REQUEST_METHOD']
try:
getattr(self, '_handle_sso_{}'.format(http_verb))(ctx)
except Exception as e:
self.response.payload.status = status_code.error
sub_status = e.sub_status if isinstance(e, ValidationError) else [status_code.auth.not_allowed]
self.response.payload.sub_status = sub_status
raise
else:
self.response.payload.status = status_code.ok
finally:
self.response.payload.cid = self.cid
# ################################################################################################################################
class SSOTestService(Service):
def handle(self):
# Zato
from zato.common.crypto.totp_ import TOTPManager
from zato.common.test.config import TestConfig
# Run the test suite
self._test_login(TestConfig, TOTPManager)
self._test_get_user_attrs(TestConfig, TOTPManager)
self._test_validate_totp_code(TestConfig, TOTPManager)
# ################################################################################################################################
def _test_login(self, config, totp_manager):
# type: (TestConfig, type_[TOTPManager]) -> None
# We want to ensure that both str and bytes passwords can be used
password1 = config.super_user_password
password2 = config.super_user_password.encode('utf8')
self.logger.info('SSO login with password1 (str)')
# Check the str password
_ = self.sso.user.login(
self.cid, config.super_user_name, password1, config.current_app,
'127.0.0.1', 'Zato', totp_code=totp_manager.get_current_totp_code(config.super_user_totp_key))
self.logger.info('SSO login with password2 (bytes)')
# Check the bytes password
_ = self.sso.user.login(
self.cid, config.super_user_name, password2, config.current_app,
'127.0.0.1', 'Zato', totp_code=totp_manager.get_current_totp_code(config.super_user_totp_key))
# ################################################################################################################################
def _test_get_user_attrs(self, config, totp_manager):
# type: (TestConfig, type_[TOTPManager]) -> None
# Zato
from zato.common.test import rand_string
from zato.sso.user import super_user_attrs
all_attrs = deepcopy(super_user_attrs)
all_attrs['totp_key'] = None
remote_addr = '127.0.0.1'
user_agent = 'My User Agent'
username = 'test.attrs.{}'.format(rand_string())
password = rand_string()
display_name = 'My Display Name'
totp_label = 'My TOTP Label'
# Attributes that can be None for this particular newly created user
# even if return_all_attrs=True
none_allowed = {'email', 'first_name', 'last_name', 'locked_by',
'locked_time', 'middle_name', 'rate_limit_def', 'status'}
data = {
'username': username,
'password': password,
'display_name': display_name,
'totp_label': totp_label
}
# Log in the super user first ..
super_user_session = self.sso.user.login(
self.cid, config.super_user_name, config.super_user_password, config.current_app,
remote_addr, 'Zato', totp_code=totp_manager.get_current_totp_code(config.super_user_totp_key))
# .. create the new user ..
_ = self.sso.user.create_user(self.cid, data, ust=super_user_session.ust,
current_app=config.current_app, auto_approve=True)
# .. log the account in ..
user_session = self.sso.user.login(self.cid, username, password, config.current_app, remote_addr, user_agent)
# .. get the user back, but without requiring for all the attributes to be returned ..
sso_user_regular_attrs = self.sso.user.get_current_user(self.cid, user_session.ust,
config.current_app, remote_addr, return_all_attrs=False)
# .. make sure that none of the super-user-only attributes were returned ..
for name in all_attrs:
value = getattr(sso_user_regular_attrs, name)
if value:
raise Exception('Value of {} should not be given'.format(name))
# .. however, regular attrs should be still available
if sso_user_regular_attrs.is_current_super_user is not False:
raise Exception('Value of sso_user_regular_attrs.is_current_super_user should be False')
if sso_user_regular_attrs.username != username:
raise Exception('Value of sso_user_regular_attrs.username should be equal to `{}` instead of `{}`'.format(
username, sso_user_regular_attrs.username))
if sso_user_regular_attrs.display_name != display_name:
raise Exception('Value of sso_user_regular_attrs.display_name should be equal to `{}` instead of `{}`'.format(
display_name, sso_user_regular_attrs.display_name))
if sso_user_regular_attrs.totp_label != totp_label:
raise Exception('Value of sso_user_regular_attrs.totp_label should be equal to `{}` instead of `{}`'.format(
totp_label, sso_user_regular_attrs.totp_label))
# .. now, get the user back but without requiring for all the attributes to be returned ..
sso_user_all_attrs = self.sso.user.get_current_user(self.cid, user_session.ust,
config.current_app, remote_addr, return_all_attrs=True)
# .. now, all of the super-user-only attributes should have been returned ..
for name in all_attrs:
value = getattr(sso_user_all_attrs, name)
if value is None:
if name not in none_allowed:
raise Exception('Value of {} should not be None'.format(name))
# ################################################################################################################################
def _test_validate_totp_code(self, config, totp_manager):
# type: (TestConfig, type_[TOTPManager]) -> None
# Local aliases
password = config.super_user_password
req_ctx = RequestCtx()
req_ctx.cid = self.cid
req_ctx.current_app = config.current_app
req_ctx.remote_addr = '127.0.0.1'
self.logger.info('SSO is_totp_token_valid')
# Logging the user in should work
info = self.sso.user.login(
self.cid, config.super_user_name, password, config.current_app,
'127.0.0.1', 'Zato', totp_code=totp_manager.get_current_totp_code(config.super_user_totp_key))
# Let's get the latest code
code = totp_manager.get_current_totp_code(config.super_user_totp_key)
# Validate the code via UST - it will raise an exception if the code is invalid
self.sso.totp.validate_code(req_ctx, code=code, ust=info.ust)
# Validate the code via username - it will also raise an exception if the code is invalid
self.sso.totp.validate_code(req_ctx, code=code, username=config.super_user_name)
# ################################################################################################################################
| 13,658
|
Python
|
.py
| 240
| 47.845833
| 130
| 0.545107
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,279
|
cleanup.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/sso/cleanup.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from datetime import datetime
from traceback import format_exc
# gevent
from gevent import sleep
# Zato
from zato.common.odb.model import SSOAttr, SSOPasswordReset, SSOSession
from zato.server.service import Service
# ################################################################################################################################
class Cleanup(Service):
""" Cleans up expired SSO objects, such as sessions or attributes.
"""
def handle(self):
sleep_time = int(self.request.raw_request)
if not self.server.is_sso_enabled:
self.logger.info('SSO not enabled, cleanup task skipped')
return
while True:
try:
sleep(sleep_time)
with closing(self.odb.session()) as session:
# Get current time
now = datetime.utcnow()
# Clean up expired sessions
self._cleanup_sessions(session, now)
# Clean up expired attributes
self._cleanup_attrs(session, now)
# Clean up expired password reset tokens (PRT)
self._cleanup_flow_prt(session, now)
# Commit all deletes
session.commit()
except Exception:
self.logger.warning('Error in SSO cleanup: `%s`', format_exc())
sleep(sleep_time)
else:
self.logger.debug('SSO cleanup completed successfully')
# ################################################################################################################################
def _cleanup_sessions(self, session, now):
return session.query(SSOSession).\
filter(SSOSession.expiration_time <= now).\
delete()
# ################################################################################################################################
def _cleanup_attrs(self, session, now):
return session.query(SSOAttr).\
filter(SSOAttr.expiration_time <= now).\
delete()
# ################################################################################################################################
def _cleanup_flow_prt(self, session, now):
return session.query(SSOPasswordReset).\
filter(SSOPasswordReset.expiration_time <= now).\
delete()
# ################################################################################################################################
| 2,758
|
Python
|
.py
| 58
| 37.810345
| 130
| 0.453866
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,280
|
session.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/sso/session.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from uuid import uuid4
# Zato
from zato.common.api import GENERIC
from zato.common.json_internal import loads
from zato.server.service import DateTime, ListOfDicts
from zato.server.service.internal.sso import BaseRESTService, BaseSIO
from zato.sso import status_code, ValidationError
# ################################################################################################################################
# A marker that indicates a value that will never exist
_invalid = '_invalid.{}'.format(uuid4().hex)
# ################################################################################################################################
class BaseGetSIO(BaseSIO):
input_required = 'current_app',
input_optional = 'target_ust', 'current_ust', 'ust'
output_optional = BaseSIO.output_optional + (DateTime('creation_time'), DateTime('expiration_time'), 'remote_addr',
'user_agent', 'is_valid', ListOfDicts('session_state_change_list'), 'result')
default_value = _invalid
skip_empty_keys = True
# ################################################################################################################################
class SessionList(BaseRESTService):
""" Returns a list of sessions for current user or another one.
"""
SimpleIO = BaseGetSIO
def _handle_sso_GET(self, ctx):
# Local aliases
default_value = self.SimpleIO.default_value
# We either have a single UST on input or both target and current ones, but not both kinds
if ctx.input.ust:
if ctx.input.current_ust != default_value or ctx.input.target_ust != default_value:
raise ValidationError(status_code.common.invalid_input)
else:
# Without ctx.input.ust we must require both of the other elements
if not (ctx.input.current_ust != default_value and ctx.input.target_ust != default_value):
raise ValidationError(status_code.common.invalid_input)
result = self.sso.user.session.get_list(
self.cid, ctx.input.ust, ctx.input.target_ust, ctx.input.current_ust, ctx.input.current_app, ctx.remote_addr)
for item in result: # type: dict
item['creation_time'] = item['creation_time'].isoformat()
item['expiration_time'] = item['expiration_time'].isoformat()
opaque = item.pop(GENERIC.ATTR_NAME, None)
if opaque:
opaque = loads(opaque) # type: dict
item['session_state_change_list'] = opaque.get('session_state_change_list', [])
self.response.payload.result = result
# ################################################################################################################################
class Session(BaseRESTService):
""" Session manipulation through REST.
"""
SimpleIO = BaseGetSIO
# ################################################################################################################################
def _handle_sso_GET(self, ctx):
""" Returns details of a particular session.
"""
# Make sure target UST actually was given on input
if ctx.input.target_ust == _invalid:
raise ValidationError(status_code.session.no_such_session)
# Get result
result = self.sso.user.session.get(self.cid, ctx.input.target_ust, ctx.input.current_ust,
ctx.input.current_app, ctx.remote_addr, self.wsgi_environ.get('HTTP_USER_AGENT'))
# Return output
self.response.payload = result.to_dict()
# ################################################################################################################################
def _handle_sso_POST(self, ctx):
""" Verifies whether an input session exists or not.
"""
# Make sure target UST actually was given on input
if ctx.input.target_ust == _invalid:
raise ValidationError(status_code.session.no_such_session)
self.response.payload.is_valid = self.sso.user.session.verify(self.cid, ctx.input.target_ust, ctx.input.current_ust,
ctx.input.current_app, ctx.remote_addr, self.wsgi_environ.get('HTTP_USER_AGENT'))
# ################################################################################################################################
def _handle_sso_PATCH(self, ctx):
""" Renews a session given on input.
"""
self.response.payload.expiration_time = self.sso.user.session.renew(self.cid, ctx.input.ust,
ctx.input.current_app, ctx.remote_addr, self.wsgi_environ.get('HTTP_USER_AGENT')).isoformat()
# ################################################################################################################################
| 5,006
|
Python
|
.py
| 84
| 52.5
| 130
| 0.533538
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,281
|
signup.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/sso/signup.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from uuid import uuid4
# regexp
import regex as re
# Zato
from zato.server.service import List, Service
from zato.sso import status_code, ValidationError
from zato.sso.odb.query import user_exists
# ################################################################################################################################
class Validate(Service):
""" Validates creation of user data in accordance with configuration from sso.conf.
"""
class SimpleIO:
input_required = ('username', 'password', List('app_list'))
input_optional = ('email',)
output_required = ('is_valid',)
output_optional = (List('sub_status'), 'return_status')
encrypt_secrets = False
response_elem = None
# ################################################################################################################################
def _has_whitespace(self, data, _regexp=re.compile(r'\s', re.MULTILINE | re.UNICODE)):
""" Returns True if data contains ASCII whitespace of any sort.
"""
return _regexp.search(data)
# ################################################################################################################################
def _validate_username_email(self, session, sso_conf, username, email, check_email):
""" Validation common to usernames and emails.
"""
# Check if user exists either by username or email
user = user_exists(session, username, email, check_email)
if user:
if check_email:
if user.username == username and user.email == email:
sub_status = [status_code.username.exists, status_code.email.exists]
return_status = sso_conf.signup.inform_if_user_exists and sso_conf.signup.inform_if_email_exists
elif user.username == username:
sub_status = status_code.username.exists
return_status = sso_conf.signup.inform_if_user_exists
elif user.email == email:
sub_status = status_code.email.exists
return_status = sso_conf.signup.inform_if_email_exists
raise ValidationError(sub_status, return_status)
# ################################################################################################################################
def _validate_username(self, session, sso_conf, username):
""" Raises ValidationError if username is invalid, e.g. is not too long.
"""
# Username must not be too long
if len(username) > sso_conf.signup.max_length_username:
raise ValidationError(status_code.username.too_long, sso_conf.signup.inform_if_user_invalid)
# Username must not contain whitespace
if self._has_whitespace(username):
raise ValidationError(status_code.username.has_whitespace, sso_conf.signup.inform_if_user_invalid)
# Username must not contain restricted keywords
for elem in sso_conf.user_validation.reject_username:
if elem in username:
raise ValidationError(status_code.username.invalid, sso_conf.signup.inform_if_user_invalid)
# ################################################################################################################################
def _validate_email(self, session, sso_conf, email):
""" Raises ValidationError if email is invalid, e.g. already exists.
"""
# E-mail may be required
if sso_conf.signup.is_email_required and not email:
raise ValidationError(status_code.email.missing, sso_conf.signup.inform_if_email_invalid)
# E-mail must not be too long
if len(email) > sso_conf.signup.max_length_email:
raise ValidationError(status_code.email.too_long, sso_conf.signup.inform_if_email_invalid)
# E-mail must not contain whitespace
if self._has_whitespace(email):
raise ValidationError(status_code.email.has_whitespace, sso_conf.signup.inform_if_email_invalid)
# E-mail must not contain restricted keywords
for elem in sso_conf.user_validation.reject_email:
if elem in email:
raise ValidationError(status_code.email.invalid, sso_conf.signup.inform_if_email_invalid)
# ################################################################################################################################
def _validate_password(self, session, sso_conf, password):
""" Raises ValidationError if password is invalid, e.g. it is too simple.
"""
# This may be encrypted while we need to validate its clear-text form
password = self.server.decrypt(password)
# Password may not be too short
if len(password) < sso_conf.password.min_length:
raise ValidationError(status_code.password.too_short, sso_conf.password.inform_if_invalid)
# Password may not be too long
if len(password) > sso_conf.password.max_length:
raise ValidationError(status_code.password.too_long, sso_conf.password.inform_if_invalid)
# Password's default complexity is checked case-insensitively
password = password.lower()
# Password may not contain most commonly used ones
for elem in sso_conf.password.reject_list:
if elem in password:
raise ValidationError(status_code.password.invalid, sso_conf.password.inform_if_invalid)
# ################################################################################################################################
def _validate_app_list(self, session, sso_conf, current_app, app_list):
""" Raises ValidationError if input app_list is invalid, e.g. includes an unknown one.
"""
# All of input apps must have been already defined in configuration
for app in app_list:
if app not in sso_conf.apps.all:
raise ValidationError(status_code.app_list.invalid, sso_conf.signup.inform_if_app_invalid)
# Current app, the one the user is signed up through, must allow user signup
if current_app not in sso_conf.apps.signup_allowed:
raise ValidationError(status_code.app_list.no_signup, sso_conf.signup.inform_if_app_invalid)
# ################################################################################################################################
def handle(self, _invalid=uuid4().hex):
# Local aliases
sso_conf = self.server.sso_config
input = self.request.input
email = input.get('email') or _invalid # To make sure it never matches anything if not given on input
# If e-mails are encrypted, we cannot look them up without decrypting them all,
# which is not currently implemented.
check_email = not sso_conf.main.encrypt_email
with closing(self.odb.session()) as session:
# Each of these calls may raise ValidationError, which we catch and return its subcode to our caller.
try:
# This one checks if username or email are not already taken using one SQL query
self._validate_username_email(session, sso_conf, input.username, email, check_email)
# These check individual elements
self._validate_username(session, sso_conf, input.username)
self._validate_password(session, sso_conf, input.password)
if check_email:
self._validate_email(session, sso_conf, email)
except ValidationError as e:
self.logger.warning('Could not validate user `%s`, sub_status `%s`', input.username, e.sub_status)
self.response.payload.is_valid = False
if e.return_status:
self.response.payload.sub_status = e.sub_status
else:
self.response.payload.is_valid = True
# ################################################################################################################################
| 8,359
|
Python
|
.py
| 136
| 51.838235
| 130
| 0.569455
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,282
|
sync.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/common/sync.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from dataclasses import dataclass
# Zato
from zato.common.broker_message import Common as BrokerMessageCommon
from zato.server.service import Model, Service
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class SyncObjectsRequest(Model):
security: 'bool'= True
pubsub: 'bool' = True
# ################################################################################################################################
# ################################################################################################################################
class SyncObjectsImpl(Service):
""" Syncs in-RAM objects with what is in the ODB.
"""
name = 'pub.zato.common.sync-objects-impl'
input = SyncObjectsRequest
def handle(self):
# Local aliases
input:'SyncObjectsRequest' = self.request.input
# Optionally, synchronize in-RAM state of security definitions
if input.security:
self.logger.info('Synchronizing security definitions')
self.server.worker_store.sync_security()
# Optionally, synchronize in-RAM state of pub/sub
if input.pubsub:
self.logger.info('Synchronizing pub/sub objects')
self.server.worker_store.sync_pubsub()
# ################################################################################################################################
# ################################################################################################################################
class SyncObjects(Service):
""" Syncs in-RAM objects with what is in the ODB.
"""
name = 'pub.zato.common.sync-objects'
input = SyncObjectsRequest
def handle(self):
# Local aliases
input:'SyncObjectsRequest' = self.request.input
# Build a dict that we can publish ..
msg = input.to_dict()
# .. enrich it with additional details ..
msg['action'] = BrokerMessageCommon.Sync_Objects.value
# .. and do publish the request now.
self.broker_client.publish(msg)
# ################################################################################################################################
# ################################################################################################################################
| 2,722
|
Python
|
.py
| 52
| 46.961538
| 130
| 0.416226
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,283
|
delete.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/common/delete.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from copy import deepcopy
from dataclasses import dataclass
# gevent
from gevent import sleep
# SQLAlchemy
from sqlalchemy import delete
# Zato
from zato.common.api import CommonObject
from zato.common.odb.model.base import Base as BaseTable
from zato.common.odb.query.common import get_object_list_by_id_list, get_object_list_by_name_list, \
get_object_list_by_name_contains
from zato.common.test.config import TestConfig
from zato.common.typing_ import any_, anylist, callable_, intlistnone, intnone, strdict, strlistnone, strnone, type_
from zato.server.connection.http_soap import BadRequest
from zato.server.service import Model, Service
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from sqlalchemy.orm import Session as SASession
from zato.common.typing_ import strlist
Bunch = Bunch
strlist = strlist
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class BaseDeleteObjectsRequest(Model):
id: intnone
id_list: intlistnone
name: strnone
name_list: strlistnone
pattern: strnone
@dataclass(init=False)
class BaseDeleteObjectsResponse(Model):
objects: anylist
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class DeleteObjectsImplRequest(BaseDeleteObjectsRequest):
table: BaseTable
delete_class: type_[Service]
@dataclass(init=False)
class DeleteObjectsImplResponse(BaseDeleteObjectsResponse):
pass
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class DeleteObjectsRequest(BaseDeleteObjectsRequest):
object_type: str
@dataclass(init=False)
class DeleteObjectsResponse(BaseDeleteObjectsResponse):
pass
# ################################################################################################################################
# ################################################################################################################################
class DeleteObjectsImpl(Service):
class SimpleIO:
input = DeleteObjectsImplRequest
output = DeleteObjectsImplResponse
def _get_object_data(self, query:'any_', table:'BaseTable', where:'any_') -> 'anylist':
with closing(self.odb.session()) as session:
object_data = query(session, table, where)
object_data = [dict(elem) for elem in object_data]
return object_data
# ################################################################################################################################
def _delete_object_list(self, table:'BaseTable', object_id_list:'anylist') -> 'anylist':
# Make sure we have a list of integers on input
object_id_list = [int(elem) for elem in object_id_list]
# We want to return a list of their IDs along with names so that the API users can easily understand what was deleted
# which means that we need to construct the list upfront as otherwise, once we delete an object,
# such information will be no longer available.
object_data = self._get_object_data(get_object_list_by_id_list, table, object_id_list)
# Our response to produce
out:'anylist' = []
# A list of object IDs that we were able to delete
objects = []
# Go through each of the input object IDs ..
for object_id in object_id_list:
# .. invoke the service that will delete the object ..
try:
self.invoke(self.request.input.delete_class.get_name(), {
'id': object_id
})
except Exception as e:
self.logger.warn('Exception while deleting object `%s` -> `%s`', object_id, e)
else:
# If we are here, it means that the object was deleted
# in which case we add its ID for later use ..
objects.append(object_id)
# .. sleep for a while in case to make sure there is no sudden surge of deletions ..
sleep(0.01)
# Go through each of the IDs given on input and return it on output too
# as long as we actually did delete such an object.
for elem in object_data:
if elem['id'] in objects:
out.append(elem)
# Return the response to our caller
return out
# ################################################################################################################################
def _get_object_id_list(self, query:'any_', table:'BaseTable', where:'any_') -> 'anylist':
object_data = self._get_object_data(query, table, where)
out = [elem['id'] for elem in object_data]
return out
# ################################################################################################################################
def handle(self) -> 'None':
# Type checks
object_id_list:'anylist'
# Local aliases
input = self.request.input # type: DeleteObjectsImplRequest
# We can be given several types of input elements in the incoming request
# and we always need to build a list of IDs out of them, unless we already
# have a list of IDs on input.
# This is a list - use it as-is
if input.id_list:
object_id_list = input.id_list
# It is an individual object ID - we can turn it into a list as-is
elif input.id:
object_id_list = [input.id]
# It is an individual object name - turn it into a list look it up in the database
elif input.name:
query:'callable_' = get_object_list_by_name_list
where = [input.name]
object_id_list = self._get_object_id_list(query, input.table, where)
# It is a list of names - look up objects matching them now
elif input.name_list:
query:'callable_' = get_object_list_by_name_list
where = input.name_list if isinstance(input.name_list, list) else [input.name_list] # type: ignore
object_id_list = self._get_object_id_list(query, input.table, where)
# This is a list of patterns but not necessarily full object names as above
elif input.pattern:
query:'callable_' = get_object_list_by_name_contains
where = input.pattern
object_id_list = self._get_object_id_list(query, input.table, where)
else:
raise BadRequest(self.cid, 'No deletion criteria were given on input')
# No matter how we arrived at this result, we have a list of object IDs
# and we can delete each of them now ..
objects = self._delete_object_list(input.table, object_id_list)
# .. now, we can produce a response for our caller ..
response = DeleteObjectsImplResponse()
response.objects = objects
# .. and return it on output
self.response.payload = response
# ################################################################################################################################
# ################################################################################################################################
class DeleteObjects(Service):
name = 'zato.common.delete-objects'
class SimpleIO:
input = DeleteObjectsRequest
output = DeleteObjectsResponse
def handle(self) -> 'None':
# Zato
from zato.common.odb.model import PubSubTopic
from zato.server.service.internal.pubsub.topic import Delete as DeleteTopic
# Add type hints
input:'DeleteObjectsRequest' = self.request.input
# Maps incoming string names of objects to their actual ODB classes
odb_map:'strdict' = {
CommonObject.PubSub_Topic: PubSubTopic.__table__,
}
# Maps incoming string names of objects to services that actually delete them
service_map = {
CommonObject.PubSub_Topic: DeleteTopic,
}
# Get a class that represents the input object ..
table = odb_map[input.object_type]
# .. get a class that represents the service that actually handles input ..
delete_class = service_map[input.object_type]
# .. clone our input to form the basis of the request that the implementation will receive ..
request:'strdict' = deepcopy(input.to_dict())
# .. remove elements that the implementation does not need .
_ = request.pop('object_type', None)
# .. prepare extra parameters that the implementation expects ..
extra = {
'table': table,
'delete_class': delete_class,
}
# .. build a request for the implementation service ..
request_impl:'DeleteObjectsImplRequest' = DeleteObjectsImplRequest.from_dict(request, extra)
# .. invoke the implementation service now ..
result = self.invoke(DeleteObjectsImpl, request_impl)
# .. and return the result to our caller.
self.response.payload = result
# ################################################################################################################################
# ################################################################################################################################
class DeleteMany(Service):
name = 'pub.zato.common.delete-many'
input = '-name'
# ################################################################################################################################
def _delete(self, session:'SASession', tables:'any_', pattern:'strlist') -> 'None':
if not pattern:
raise Exception('No pattersn were given on input')
for table, columns in tables.items():
for column in columns: # type: ignore
for elem in pattern:
filter = column.contains(elem) # type: ignore
delete_query = delete(table).where(filter)
session.execute(delete_query)
session.commit()
# ################################################################################################################################
def _delete_rest(self, session:'SASession', pattern:'strlist') -> 'None':
# Zato
from zato.common.odb.model import HTTPSOAP
tables:'any_' = {
HTTPSOAP.__table__: [HTTPSOAP.name],
}
self._delete(session, tables, pattern)
# ################################################################################################################################
def _delete_security(self, session:'SASession', pattern:'strlist') -> 'None':
# Zato
from zato.common.odb.model import SecurityBase
tables:'any_' = {
SecurityBase.__table__: [SecurityBase.name],
}
self._delete(session, tables, pattern)
# ################################################################################################################################
def _delete_pubsub(self, session:'SASession', pattern:'strlist') -> 'None':
# Zato
from zato.common.odb.model import PubSubEndpoint, PubSubTopic
tables:'any_' = {
PubSubEndpoint.__table__: [PubSubEndpoint.name],
PubSubTopic.__table__: [PubSubTopic.name],
}
self._delete(session, tables, pattern)
# ################################################################################################################################
def _delete_sql(self, session:'SASession', pattern:'strlist') -> 'None':
# Zato
from zato.common.odb.model import SQLConnectionPool
tables:'any_' = {
SQLConnectionPool.__table__: [SQLConnectionPool.name],
}
self._delete(session, tables, pattern)
# ################################################################################################################################
def _delete_wsx(self, session:'SASession', pattern:'strlist') -> 'None':
# Zato
from zato.common.odb.model import ChannelWebSocket
tables:'any_' = {
ChannelWebSocket.__table__: [ChannelWebSocket.name],
}
self._delete(session, tables, pattern)
# ################################################################################################################################
def _delete_scheduler(self, session:'SASession', pattern:'strlist') -> 'None':
# Zato
from zato.common.odb.model import Job
tables:'any_' = {
Job.__table__: [Job.name],
}
self._delete(session, tables, pattern)
# ################################################################################################################################
def _delete_generic(self, session:'SASession', pattern:'strlist') -> 'None':
# Zato
from zato.common.odb.model import GenericConn, GenericConnDef, GenericObject
tables:'any_' = {
GenericConn.__table__: [GenericConn.name],
GenericConnDef.__table__: [GenericConnDef.name],
GenericObject.__table__: [GenericObject.name],
}
self._delete(session, tables, pattern)
# ################################################################################################################################
def _delete_misc(self, session:'SASession', pattern:'strlist') -> 'None':
# Zato
from zato.common.odb.model import Cache, ChannelAMQP, ChannelWMQ, ConnDefAMQP, ConnDefWMQ, IMAP, OutgoingAMQP, \
OutgoingFTP, OutgoingOdoo, OutgoingSAP, OutgoingWMQ, RBACClientRole, RBACPermission, RBACRole, TLSCACert, \
Service, SMTP
tables:'any_' = {
Cache.__table__: [Cache.name],
ChannelAMQP.__table__: [ChannelAMQP.name],
ChannelWMQ.__table__: [ChannelWMQ.name],
ConnDefAMQP.__table__: [ConnDefAMQP.name],
ConnDefWMQ.__table__: [ConnDefWMQ.name],
IMAP.__table__: [IMAP.name],
OutgoingAMQP.__table__: [OutgoingAMQP.name],
OutgoingFTP.__table__: [OutgoingFTP.name],
OutgoingOdoo.__table__: [OutgoingOdoo.name],
OutgoingSAP.__table__: [OutgoingSAP.name],
OutgoingWMQ.__table__: [OutgoingWMQ.name],
RBACClientRole.__table__: [RBACClientRole.name],
RBACPermission.__table__: [RBACPermission.name],
RBACRole.__table__: [RBACRole.name],
TLSCACert.__table__: [TLSCACert.name],
Service.__table__: [Service.name],
SMTP.__table__: [SMTP.name],
}
self._delete(session, tables, pattern)
# ################################################################################################################################
def handle(self) -> 'None':
# Local variables
name = self.request.input.get('name') or '' # type: ignore
name = [elem.strip() for elem in name.split()] # type: ignore
if not name:
name:'strlist' = [
'.abc-123-',
'.complex-',
'Demo.',
'Enmasse',
'Enmasse',
'enmasse',
'enmasse1',
'/test/api/complex',
'/test/complex',
'Test Basic Auth',
'Test.Complex',
'-test-cli-',
'test.wsx',
'zato-test',
TestConfig.pubsub_topic_name_perf_auto_create,
TestConfig.pubsub_topic_name_unique_auto_create,
]
with closing(self.odb.session()) as session:
self._delete_rest(session, name)
self._delete_security(session, name)
self._delete_pubsub(session, name)
self._delete_sql(session, name)
self._delete_wsx(session, name)
self._delete_scheduler(session, name)
self._delete_generic(session, name)
self._delete_misc(session, name)
session.commit()
# ################################################################################################################################
# ################################################################################################################################
| 17,419
|
Python
|
.py
| 326
| 44.51227
| 130
| 0.486182
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,284
|
create.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/common/create.py
|
# -*- coding: utf-8 -*-
# stdlib
from dataclasses import dataclass
# Zato
from zato.common.api import CommonObject, PUBSUB
from zato.common.exception import BadRequest
from zato.common.typing_ import any_, anylist, anylistnone, intlistnone, intnone, strdict, strlistnone, strnone
from zato.server.service import Model, Service
# ################################################################################################################################
# ################################################################################################################################
_ps_default = PUBSUB.DEFAULT
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class DataItem(Model):
name: str
object_type: str
initial_data: any_
@dataclass(init=False)
class CreateObjectsRequest(Model):
object_type: str
id: intnone
id_list: intlistnone
name: strnone
name_list: strlistnone
object_list: anylistnone
pattern: strnone
initial_data: any_
@dataclass(init=False)
class CreateObjectsResponse(Model):
objects: anylist
# ################################################################################################################################
# ################################################################################################################################
class CreateObjects(Service):
name = 'zato.common.create-objects'
input = CreateObjectsRequest
output = CreateObjectsResponse
# ################################################################################################################################
def _get_basic_pubsub_endpoint(self, name:'str', initial_data:'strdict') -> 'strdict':
request = {
'role': PUBSUB.ROLE.PUBLISHER_SUBSCRIBER.id,
'is_active': True,
'is_internal': False,
'endpoint_type': PUBSUB.ENDPOINT_TYPE.REST.id
}
return request
# ################################################################################################################################
def _get_basic_pubsub_publish(self, name:'str', initial_data:'strdict') -> 'strdict':
request:'strdict' = {}
return request
# ################################################################################################################################
def _get_basic_pubsub_subscription(self, name:'str', initial_data:'strdict') -> 'strdict':
request = {
'is_active': True,
'is_internal': False,
'endpoint_type': PUBSUB.ENDPOINT_TYPE.REST.id,
}
return request
# ################################################################################################################################
def _get_basic_pubsub_topic(self, name:'str', initial_data:'strdict') -> 'strdict':
request = {
'has_gd': True,
'is_active': True,
'is_api_sub_allowed': True,
'cluster_id': 1,
'task_sync_interval': _ps_default.TASK_SYNC_INTERVAL,
'task_delivery_interval': _ps_default.TASK_DELIVERY_INTERVAL,
'depth_check_freq': _ps_default.DEPTH_CHECK_FREQ,
'max_depth_gd': _ps_default.TOPIC_MAX_DEPTH_GD,
'max_depth_non_gd': _ps_default.TOPIC_MAX_DEPTH_NON_GD,
'pub_buffer_size_gd': _ps_default.PUB_BUFFER_SIZE_GD,
}
return request
# ################################################################################################################################
def _get_basic_security_basic_auth(self, name:'str', initial_data:'strdict') -> 'strdict':
request = {
'is_active': True,
'username': 'zato-test-' + name,
'realm': 'Zato.Test',
}
return request
# ################################################################################################################################
def _extract_response_items(self, response:'strdict') -> 'strdict':
# Our response to produce
out:'strdict' = {}
if len(response) == 1:
keys = list(response)
response_wrapper = keys[0]
if response_wrapper.startswith('zato'):
response = response[response_wrapper]
out.update(response)
return out
# ################################################################################################################################
def _turn_names_into_objects_list(self, input:'CreateObjectsRequest') -> 'CreateObjectsRequest':
# Requests of these types will not have any names on input ..
no_name_requests = {
CommonObject.PubSub_Publish,
CommonObject.PubSub_Subscription,
}
# .. populate empty names per the above ..
if input.object_type in no_name_requests:
input.name_list = ['']
# .. or build a list of names out of what we have on input ..
else:
input.name_list = input.name_list or []
# .. at this point, we know that we have a list of names ..
# .. so we can turn them into objects, unless we already have objects on input ..
if not input.object_list:
# .. a list for us to populate ..
object_list = []
# .. go through each input name ..
for name in input.name_list:
# .. turn it into an object ..
data = DataItem()
# .. populate its fields ..
data.name = name
data.object_type = input.object_type
data.initial_data = input.initial_data
# .. append it for later use ..
object_list.append(data)
# .. assign the object list to what we are to return ..
input.object_list = object_list
# .. finally, we can return everything to our caller.
return input
# ################################################################################################################################
def handle(self):
# Zato
from zato.server.service.internal.pubsub.endpoint import Create as CreateEndpoint
from zato.server.service.internal.pubsub.publish import Publish as PublishMessage
from zato.server.service.internal.pubsub.subscription import Create as CreateSubscription
from zato.server.service.internal.pubsub.topic import Create as CreateTopic
from zato.server.service.internal.security.basic_auth import Create as SecBasicAuthCreate
# Local variables
input:'CreateObjectsRequest' = self.request.input
# Our response to produce:
out = CreateObjectsResponse()
out.objects = []
# Maps object types to services that create them
# Maps incoming string names of objects to services that actually delete them
service_map = {
CommonObject.PubSub_Endpoint: CreateEndpoint,
CommonObject.PubSub_Publish: PublishMessage,
CommonObject.PubSub_Subscription: CreateSubscription,
CommonObject.PubSub_Topic: CreateTopic,
CommonObject.Security_Basic_Auth: SecBasicAuthCreate,
}
# Maps incoming string names of objects to functions that prepare basic create requests
request_func_map = {
CommonObject.PubSub_Endpoint: self._get_basic_pubsub_endpoint,
CommonObject.PubSub_Publish: self._get_basic_pubsub_publish,
CommonObject.PubSub_Subscription: self._get_basic_pubsub_subscription,
CommonObject.PubSub_Topic: self._get_basic_pubsub_topic,
CommonObject.Security_Basic_Auth: self._get_basic_security_basic_auth,
}
# Get the service that will create the object
service = service_map[input.object_type]
# Turn names into objects
input = self._turn_names_into_objects_list(input)
# At this point, we know we have a list of objects, even if empty.
if not input.object_list:
return
# Log what we are about to do
self.logger.info('Creating objects -> len=%s', len(input.object_list))
# .. go through each name we are given on input ..
for data in input.object_list:
# .. get a request with basic details ..
request_func = request_func_map[data.object_type]
request = request_func(data.name, data.initial_data)
# .. add the name from input ..
request['name'] = data.name
# .. populate the request with initial data ..
if data.initial_data:
for key, value in data.initial_data.items():
request[key] = value
# .. create an object now ..
try:
response = self.invoke(service.get_name(), request)
response = self._extract_response_items(response)
out.objects.append(response)
except BadRequest as e:
# .. ignore objects that already exist ..
self.logger.info('Ignoring -> %s', e)
else:
# .. finally, store information in logs that we are done.
self.logger.info('Object created -> %s -> %s', data.name, response)
# Produce the response for our caller
self.response.payload = out
# ################################################################################################################################
# ################################################################################################################################
| 9,923
|
Python
|
.py
| 189
| 43.026455
| 130
| 0.485203
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,285
|
import_.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/common/import_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from copy import deepcopy
from dataclasses import dataclass
from json import dumps
# SQLAlchemy
from sqlalchemy import insert
# Zato
from zato.common.api import GENERIC, PUBSUB, Sec_Def_Type, Zato_No_Security
from zato.common.odb.model import HTTPBasicAuth, PubSubEndpoint, PubSubSubscription, PubSubTopic, SecurityBase
from zato.common.odb.query.common import get_object_list, get_object_list_by_columns, get_object_list_by_name_list
from zato.common.pubsub import new_sub_key
from zato.common.crypto.api import CryptoManager
from zato.common.typing_ import cast_
from zato.server.service import Model, Service
# ################################################################################################################################
# ################################################################################################################################
if 0:
from sqlalchemy.orm.session import Session as SASession
from zato.common.typing_ import any_, dictlist, strdict
# ################################################################################################################################
# ################################################################################################################################
HTTPBasicAuthTable:'any_' = HTTPBasicAuth.__table__
SecurityBaseTable:'any_' = SecurityBase.__table__
PubSubEndpointTable:'any_' = PubSubEndpoint.__table__
PubSubSubscriptionTable:'any_' = PubSubSubscription.__table__
PubSubTopicTable:'any_' = PubSubTopic.__table__
HTTPBasicAuthInsert = HTTPBasicAuthTable.insert
# ################################################################################################################################
# ################################################################################################################################
Default = PUBSUB.DEFAULT
Generic_Attr_Name = GENERIC.ATTR_NAME
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class ObjectContainer(Model):
pubsub_topic: 'dictlist | None' = None
pubsub_endpoint: 'dictlist | None' = None
pubsub_subscription: 'dictlist | None' = None
basic_auth: 'dictlist | None' = None
# ################################################################################################################################
def get_topic_id_by_name(self, name:'str') -> 'any_':
for item in self.pubsub_topic: # type: ignore
if item['name'] == name:
return item['id']
else:
raise Exception(f'Topic not found -> {name}')
# ################################################################################################################################
def get_endpoint_by_name(self, name:'str') -> 'any_':
for item in self.pubsub_endpoint: # type: ignore
if item['name'] == name:
return item
else:
raise Exception(f'Endpoint not found -> {name}')
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class ItemsInfo(Model):
to_add: 'dictlist'
to_update: 'dictlist'
# ################################################################################################################################
# ################################################################################################################################
class ImportObjects(Service):
""" Imports multiple pub/sub objects en masse.
"""
name = 'zato.common.import-objects'
def handle(self):
# data = test_data
data = self.request.raw_request
# Data that we received on input
input:'ObjectContainer' = ObjectContainer.from_dict(data)
has_input:'any_' = input.basic_auth or input.pubsub_topic or input.pubsub_endpoint or input.pubsub_subscription
if not has_input:
return
self.logger.info('*' * 60)
# Data that already exists
with closing(self.odb.session()) as session:
# All security definitions that currently exist
sec_list = self._get_sec_list(session)
# If we have security definitions on input,
# import them first, as they may be needed in subsequent steps.
if input.basic_auth:
sec_info = self._handle_basic_auth_input(input.basic_auth, sec_list)
if sec_info.to_add:
self.create_basic_auth(session, sec_info.to_add, input.basic_auth)
if sec_info.to_update:
self.update_objects(session, SecurityBase, sec_info.to_update)
if sec_info.to_add:
self.logger.info('Basic Auth created: %s', len(sec_info.to_add))
if sec_info.to_update:
self.logger.info('Basic Auth updated: %s', len(sec_info.to_update))
session.commit()
# Rebuild it now because we may have added some above
sec_list = self._get_sec_list(session)
# All pub/sub objects that currently exist
existing = self._get_existing_data(session, needs_subs=True)
# Make sure we always have lists of dicts
input_topics = input.pubsub_topic or []
existing_topics = existing.pubsub_topic or []
input_endpoints = input.pubsub_endpoint or []
existing_endpoints = existing.pubsub_endpoint or []
topics_info = self._find_items(input_topics, existing_topics)
endpoints_info = self._find_items(input_endpoints, existing_endpoints)
self._enrich_topics(topics_info.to_add)
self._enrich_topics(topics_info.to_update)
self._enrich_endpoints(endpoints_info.to_add, sec_list)
self._enrich_endpoints(endpoints_info.to_update, sec_list)
if topics_info.to_add:
topics_insert = self.create_objects(PubSubTopicTable, topics_info.to_add)
session.execute(topics_insert)
if topics_info.to_update:
self.update_objects(session, PubSubTopic, topics_info.to_update)
if endpoints_info.to_add:
endpoints_insert = self.create_objects(PubSubEndpointTable, endpoints_info.to_add)
session.execute(endpoints_insert)
if endpoints_info.to_update:
self.update_objects(session, PubSubEndpoint, endpoints_info.to_update)
# Commit topics and endpoints so that we can find them when we handle subscriptions below
session.commit()
# Load it again, now that we added topics and endpoints
existing = self._get_existing_data(session, needs_subs=True)
input_subscriptions = input.pubsub_subscription or []
existing_subscriptions = existing.pubsub_subscription or []
input_subscriptions = self._resolve_input_subscriptions(input_subscriptions, existing)
subscriptions_info = self._find_subscriptions(input_subscriptions, existing_subscriptions)
if subscriptions_info.to_add:
subscriptions_insert = self.create_objects(PubSubSubscriptionTable, subscriptions_info.to_add)
session.execute(subscriptions_insert)
if subscriptions_info.to_update:
self.update_objects(session, PubSubSubscription, subscriptions_info.to_update)
# Commit once more, this time around, it will include subscriptions
session.commit()
if topics_info.to_add:
self.logger.info('Topics created: %s', len(topics_info.to_add))
if topics_info.to_update:
self.logger.info('Topics updated: %s', len(topics_info.to_update))
if endpoints_info.to_add:
self.logger.info('Endpoints created: %s', len(endpoints_info.to_add))
if endpoints_info.to_update:
self.logger.info('Endpoints updated: %s', len(endpoints_info.to_update))
if subscriptions_info.to_add:
self.logger.info('Subscriptions created: %s', len(subscriptions_info.to_add))
if subscriptions_info.to_update:
self.logger.info('Subscriptions updated: %s', len(subscriptions_info.to_update))
# ################################################################################################################################
def _get_rest_conn_id_by_name(self, name:'str') -> 'int':
if conn := self.server.worker_store.get_outconn_rest(name):
conn_config = conn['config']
conn_id = conn_config['id']
return conn_id
else:
raise Exception(f'Outgoing REST connection not found -> {name}')
# ################################################################################################################################
def _get_rest_conn_id_by_item(self, item:'strdict') -> 'int | None':
if rest_connection := item.get('rest_connection'): # type: ignore
rest_connection_id = self._get_rest_conn_id_by_name(rest_connection)
return rest_connection_id
# ################################################################################################################################
def _resolve_input_subscriptions(self, input_subscriptions:'dictlist', existing:'ObjectContainer') -> 'dictlist':
out:'dictlist' = []
for item in deepcopy(input_subscriptions):
_ = item.pop('name', None)
endpoint_name = item.pop('endpoint_name')
endpoint = existing.get_endpoint_by_name(endpoint_name)
endpoint_id = endpoint['id']
endpoint_type = endpoint['endpoint_type']
has_gd = item.get('has_gd', Default.Has_GD)
wrap_one_msg_in_list = item.get('wrap_one_msg_in_list', Default.Wrap_One_Msg_In_List)
delivery_err_should_block = item.get('delivery_err_should_block', Default.Delivery_Err_Should_Block)
# Resolve a potential outgoing REST connection
out_http_soap_id = self._get_rest_conn_id_by_item(item)
# A new item needs to be created for each topic this endpoint is subscribed to ..
for topic_name in item.pop('topic_list_json'):
# .. turn a topic's name into its ID ..
topic_id = existing.get_topic_id_by_name(topic_name)
# .. build basic information about the subscription ..
new_item:'strdict' = {
'topic_id': topic_id,
'endpoint_id': endpoint_id,
'delivery_method': item['delivery_method'],
'creation_time': self.time.utcnow_as_float(),
'sub_key': new_sub_key(endpoint_type),
'sub_pattern_matched': 'auto-import',
'has_gd': has_gd,
'wrap_one_msg_in_list': wrap_one_msg_in_list,
'delivery_err_should_block': delivery_err_should_block,
'out_http_soap_id': out_http_soap_id,
}
# .. append the item for later use ..
out.append(new_item)
# .. now, we can return everything to our caller.
return out
# ################################################################################################################################
def _find_subscriptions(self, incoming:'dictlist', existing:'dictlist') -> 'ItemsInfo':
# Our response to produce
out = ItemsInfo()
out.to_add = []
out.to_update = []
for new_item in deepcopy(incoming):
for existing_item in existing:
subscription_id, topic_id, endpoint_id = existing_item
if new_item['topic_id'] == topic_id and new_item['endpoint_id'] == endpoint_id:
new_item['id'] = subscription_id
_ = new_item.pop('sub_key', None)
_ = new_item.pop('creation_time', None)
_ = new_item.pop('sub_pattern_matched', None)
out.to_update.append(new_item)
break
else:
new_item['cluster_id'] = self.server.cluster_id
new_item[Generic_Attr_Name] = None
out.to_add.append(new_item)
# .. now, we can return the response to our caller.
return out
# ################################################################################################################################
def _handle_basic_auth_input(self, incoming:'dictlist', existing:'dictlist') -> 'ItemsInfo':
# Our response to produce
out = ItemsInfo()
out.to_add = []
out.to_update = []
# Go through each item that we potentially need to create and see if there is a match
for new_item in deepcopy(incoming):
for existing_item in existing:
if existing_item['sec_type'] == Sec_Def_Type.BASIC_AUTH:
if new_item['name'] == existing_item['name']:
new_item['id'] = existing_item['id']
new_item['sec_type'] = existing_item['sec_type']
new_item['cluster_id'] = self.server.cluster_id
_ = new_item.pop('realm', None)
out.to_update.append(new_item)
break
# .. if we are here, it means that there was no match, which means that this item truly is new ..
else:
# .. passwords are optional on input ..
if not 'password' in new_item:
new_item['password'] = self.name + ' ' + cast_('str', CryptoManager.generate_secret(as_str=True))
new_item['sec_type'] = Sec_Def_Type.BASIC_AUTH
new_item['cluster_id'] = self.server.cluster_id
out.to_add.append(new_item)
# .. now, we can return the response to our caller.
return out
# ################################################################################################################################
def _get_basic_auth_realm_by_sec_name(self, incoming:'dictlist', name:'str') -> 'str':
for item in incoming:
if item['name'] == name:
return item['realm']
else:
raise Exception(f'Security definition not found (realm) -> {name}')
# ################################################################################################################################
def create_basic_auth(self, session:'SASession', values:'dictlist', incoming:'dictlist') -> 'None':
# We need to create a new list with only these values
# that the base table can support.
sec_base_values = []
for item in values:
sec_base_item = deepcopy(item)
_ = sec_base_item.pop('realm', None)
sec_base_values.append(sec_base_item)
# First, insert rows in the base table ..
sec_base_insert = insert(SecurityBase).values(sec_base_values)
session.execute(sec_base_insert)
session.commit()
# .. now, get all of their IDs ..
name_list:'any_' = [item['name'] for item in values]
newly_added = get_object_list_by_name_list(session, SecurityBaseTable, name_list)
to_add_basic_auth = []
for item in values:
for newly_added_item in newly_added:
if item['name'] == newly_added_item['name']:
to_add_item = {
'id': newly_added_item['id'],
'realm': item['realm'],
}
to_add_basic_auth.append(to_add_item)
break
session.execute(HTTPBasicAuthInsert().values(to_add_basic_auth))
session.commit()
# ################################################################################################################################
def _enrich_endpoints(self, endpoints:'dictlist', sec_list:'dictlist') -> 'None':
for item in endpoints:
service = item.pop('service', None)
service_name = item.pop('service_name', None)
service_name = service or service_name
_ = item.pop('ws_channel_name', None)
_ = item.pop('sec_def', None)
if service_name:
service_id = self.server.service_store.get_service_id_by_name(service_name)
item['service_id'] = service_id
if sec_name := item.pop('sec_name', None):
for sec_item in sec_list:
if sec_name == sec_item['name']:
security_id = sec_item['id']
item['security_id'] = security_id
break
else:
if sec_name != Zato_No_Security:
raise Exception(f'Security definition not found -> {sec_name}')
# ################################################################################################################################
def _enrich_topics(self, topics:'dictlist') -> 'None':
for item in topics:
if not Generic_Attr_Name in item:
item[Generic_Attr_Name] = {}
opaque1 = item[Generic_Attr_Name]
opaque1['on_no_subs_pub'] = item.pop('on_no_subs_pub', None)
opaque1['hook_service_name'] = item.pop('hook_service_name', None)
item[Generic_Attr_Name] = dumps(opaque1)
# ################################################################################################################################
def create_objects(self, table:'any_', values:'dictlist') -> 'any_':
result = insert(table).values(values)
return result
# ################################################################################################################################
def update_objects(self, session:'SASession', table:'any_', values:'dictlist') -> 'any_':
session.bulk_update_mappings(table, values)
# ################################################################################################################################
def _find_items(self, incoming:'dictlist', existing:'dictlist') -> 'ItemsInfo':
# Our response to produce
out = ItemsInfo()
out.to_add = []
out.to_update = []
# Go through each item that we potentially need to create and see if there is a match
for new_item in incoming:
# Turn WSX channel names into their IDs
if ws_channel_name := new_item.get('ws_channel_name'):
ws_channel_id:'int' = self.server.worker_store.get_web_socket_channel_id_by_name(ws_channel_name)
new_item['ws_channel_id'] = ws_channel_id
for existing_item in existing:
if new_item['name'] == existing_item['name']:
new_item['id'] = existing_item['id']
new_item['cluster_id'] = self.server.cluster_id
out.to_update.append(new_item)
break
# .. if we are here, it means that there was no match, which means that this item truly is new ..
else:
new_item['cluster_id'] = self.server.cluster_id
out.to_add.append(new_item)
# .. now, we can return the response to our caller.
return out
# ################################################################################################################################
def _get_sec_list(self, session:'SASession') -> 'dictlist':
columns = [SecurityBaseTable.c.id, SecurityBaseTable.c.name, SecurityBaseTable.c.sec_type]
out = get_object_list_by_columns(session, columns)
return out
# ################################################################################################################################
def _get_existing_topics(self, session:'SASession') -> 'dictlist':
out = get_object_list(session, PubSubTopicTable)
return out
# ################################################################################################################################
def _get_existing_endpoints(self, session:'SASession') -> 'dictlist':
columns = [
PubSubEndpointTable.c.id,
PubSubEndpointTable.c.name,
PubSubEndpointTable.c.endpoint_type,
]
out = get_object_list_by_columns(session, columns)
return out
# ################################################################################################################################
def _get_existing_subscriptions(self, session:'SASession') -> 'dictlist':
columns = [
PubSubSubscriptionTable.c.id,
PubSubSubscriptionTable.c.topic_id,
PubSubSubscriptionTable.c.endpoint_id,
]
out = get_object_list_by_columns(session, columns)
return out
# ################################################################################################################################
def _get_existing_data(self, session:'SASession', *, needs_subs:'bool') -> 'ObjectContainer':
# Our response to produce
out = ObjectContainer()
out.pubsub_topic = []
out.pubsub_endpoint = []
out.pubsub_subscription = []
existing_topics = self._get_existing_topics(session)
existing_endpoints = self._get_existing_endpoints(session)
out.pubsub_topic.extend(existing_topics)
out.pubsub_endpoint.extend(existing_endpoints)
if needs_subs:
existing_subscriptions = self._get_existing_subscriptions(session)
out.pubsub_subscription.extend(existing_subscriptions)
return out
# ################################################################################################################################
# ################################################################################################################################
| 22,773
|
Python
|
.py
| 385
| 47.950649
| 130
| 0.486442
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,286
|
cassandra.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/definition/cassandra.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.broker_message import DEFINITION
from zato.common.odb.model import CassandraConn
from zato.common.odb.query import cassandra_conn_list
from zato.server.service.internal import AdminService, ChangePasswordBase
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
elem = 'definition_cassandra'
model = CassandraConn
label = 'a Cassandra connection'
get_list_docs = 'Cassandra connections'
broker_message = DEFINITION
broker_message_prefix = 'CASSANDRA_'
list_func = cassandra_conn_list
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = CassandraConn.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
class ChangePassword(ChangePasswordBase):
""" Changes the password of a Cassandra connection definition.
"""
password_required = False
class SimpleIO(ChangePasswordBase.SimpleIO):
request_elem = 'zato_definition_cassandra_change_password_request'
response_elem = 'zato_definition_cassandra_change_password_response'
def handle(self):
def _auth(instance, password):
instance.password = password
return self._handle(CassandraConn, _auth, DEFINITION.CASSANDRA_CHANGE_PASSWORD.value)
# ################################################################################################################################
| 2,466
|
Python
|
.py
| 50
| 46.32
| 130
| 0.527963
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,287
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/definition/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,288
|
jms_wmq.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/definition/jms_wmq.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from datetime import datetime
from traceback import format_exc
from uuid import uuid4
# Zato
from zato.common.broker_message import DEFINITION
from zato.common.odb.model import Cluster, ConnDefWMQ
from zato.common.odb.query import definition_wmq, definition_wmq_list
from zato.server.service import Boolean, Int
from zato.server.service.internal import AdminService, AdminSIO, ChangePasswordBase, GetListAdminSIO
# ################################################################################################################################
class GetList(AdminService):
""" Returns a list of IBM MQ definitions available.
"""
_filter_by = ConnDefWMQ.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_definition_jms_wmq_get_list_request'
response_elem = 'zato_definition_jms_wmq_get_list_response'
input_required = ('cluster_id',)
output_required = ('id', 'name', 'host', 'port', 'channel', Boolean('cache_open_send_queues'),
Boolean('cache_open_receive_queues'), Boolean('use_shared_connections'), Boolean('ssl'), 'needs_mcd',
Int('max_chars_printed'), Boolean('use_jms'))
output_optional = ('ssl_cipher_spec', 'ssl_key_repository', 'queue_manager', 'username')
def get_data(self, session):
return self._search(definition_wmq_list, session, self.request.input.cluster_id, False)
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
# ################################################################################################################################
class GetByID(AdminService):
""" Returns a particular IBM MQ definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_definition_jms_wmq_get_by_id_request'
response_elem = 'zato_definition_jms_wmq_get_by_id_response'
input_required = ('id', 'cluster_id',)
output_required = ('id', 'name', 'host', 'port', 'channel', Boolean('cache_open_send_queues'),
Boolean('cache_open_receive_queues'), Boolean('use_shared_connections'), Boolean('ssl'), 'needs_mcd',
Int('max_chars_printed'))
output_optional = ('ssl_cipher_spec', 'ssl_key_repository', 'queue_manager', 'username', Boolean('use_jms'))
def get_data(self, session):
return definition_wmq(session, self.request.input.cluster_id, self.request.input.id)
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload = self.get_data(session)
# ################################################################################################################################
class Create(AdminService):
""" Creates a new IBM MQ definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_definition_jms_wmq_create_request'
response_elem = 'zato_definition_jms_wmq_create_response'
input_required = ('cluster_id', 'name', 'host', 'port', 'channel', Boolean('cache_open_send_queues'),
Boolean('cache_open_receive_queues'), Boolean('use_shared_connections'), Boolean('ssl'), 'needs_mcd',
Int('max_chars_printed'), Boolean('use_jms'))
input_optional = ('ssl_cipher_spec', 'ssl_key_repository', 'queue_manager', 'username')
output_required = ('id', 'name')
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
# Let's see if we already have an object of that name before committing
# any stuff into the database.
existing_one = session.query(ConnDefWMQ).\
filter(ConnDefWMQ.cluster_id==Cluster.id).\
filter(ConnDefWMQ.name==input.name).\
first()
if existing_one:
raise Exception('IBM MQ definition `{}` already exists on this cluster'.format(input.name))
try:
input.password = uuid4().hex
input.use_jms = input.use_jms or False
definition = self._new_zato_instance_with_cluster(ConnDefWMQ)
definition.name = input.name
definition.host = input.host
definition.port = input.port
definition.queue_manager = input.queue_manager
definition.channel = input.channel
definition.cache_open_send_queues = input.cache_open_send_queues
definition.cache_open_receive_queues = input.cache_open_receive_queues
definition.use_shared_connections = input.use_shared_connections
definition.ssl = input.ssl
definition.ssl_cipher_spec = input.ssl_cipher_spec
definition.ssl_key_repository = input.ssl_key_repository
definition.needs_mcd = input.needs_mcd
definition.max_chars_printed = input.max_chars_printed
definition.cluster_id = input.cluster_id
definition.username = input.username
definition.password = input.password
definition.use_jms = input.use_jms
session.add(definition)
session.commit()
input.id = definition.id
input.action = DEFINITION.WMQ_CREATE.value
self.broker_client.publish(input)
self.response.payload.id = definition.id
self.response.payload.name = definition.name
except Exception:
self.logger.error('Could not create an IBM MQ definition, e:`%s`', format_exc())
session.rollback()
raise
# ################################################################################################################################
class Edit(AdminService):
""" Updates an IBM MQ definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_definition_jms_wmq_edit_request'
response_elem = 'zato_definition_jms_wmq_edit_response'
input_required = (Int('id'), 'cluster_id', 'name', 'host', 'port', 'channel',
Boolean('cache_open_send_queues'), Boolean('cache_open_receive_queues'), Boolean('use_shared_connections'),
Boolean('ssl'), 'needs_mcd', Int('max_chars_printed'), Boolean('use_jms'))
input_optional = ('queue_manager', 'username')
output_required = ('id', 'name')
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
# Let's see if we already have an object of that name before committing any stuff into the database.
existing_one = session.query(ConnDefWMQ).\
filter(ConnDefWMQ.cluster_id==Cluster.id).\
filter(ConnDefWMQ.id!=input.id).\
filter(ConnDefWMQ.name==input.name).\
first()
if existing_one:
raise Exception('IBM MQ definition `{}` already exists on this cluster'.format(input.name))
try:
def_ = session.query(ConnDefWMQ).filter_by(id=input.id).one()
old_name = def_.name
def_.name = input.name
def_.host = input.host
def_.port = input.port
def_.queue_manager = input.queue_manager
def_.channel = input.channel
def_.cache_open_send_queues = input.cache_open_send_queues
def_.cache_open_receive_queues = input.cache_open_receive_queues
def_.use_shared_connections = input.use_shared_connections
def_.ssl = input.ssl
def_.ssl_cipher_spec = input.get('ssl_cipher_spec')
def_.ssl_key_repository = input.get('ssl_key_repository')
def_.needs_mcd = input.needs_mcd
def_.max_chars_printed = input.max_chars_printed
def_.username = input.username
def_.use_jms = input.use_jms or False
session.add(def_)
session.commit()
input.id = def_.id
input.action = DEFINITION.WMQ_EDIT.value
input.old_name = old_name
self.broker_client.publish(input)
self.response.payload.id = def_.id
self.response.payload.name = def_.name
except Exception:
self.logger.error('Could not update IBM MQ definition, e:`%s`', format_exc())
session.rollback()
raise
# ################################################################################################################################
class Delete(AdminService):
""" Deletes an IBM MQ definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_definition_jms_wmq_delete_request'
response_elem = 'zato_definition_jms_wmq_delete_response'
input_required = (Int('id'),)
def handle(self):
with closing(self.odb.session()) as session:
try:
def_ = session.query(ConnDefWMQ).\
filter(ConnDefWMQ.id==self.request.input.id).\
one()
session.delete(def_)
session.commit()
msg = {'action': DEFINITION.WMQ_DELETE.value, 'id': self.request.input.id}
self.broker_client.publish(msg)
except Exception:
session.rollback()
self.logger.error('Could not delete IBM MQ definition, e:`%s`', format_exc())
raise
# ################################################################################################################################
class ChangePassword(ChangePasswordBase):
""" Changes the password of an IBM MQ connection definition.
"""
password_required = False
class SimpleIO(ChangePasswordBase.SimpleIO):
request_elem = 'zato_definition_jms_wmq_change_password_request'
response_elem = 'zato_definition_jms_wmq_change_password_response'
def handle(self):
def _auth(instance, password):
instance.password = password
return self._handle(ConnDefWMQ, _auth, DEFINITION.WMQ_CHANGE_PASSWORD.value)
# ################################################################################################################################
class Ping(AdminService):
""" Pings a remote queue manager a given connection definition ID points to.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_definition_jms_wmq_ping_request'
response_elem = 'zato_definition_jms_wmq_ping_response'
input_required = (Int('id'),)
output_optional = ('info',)
def handle(self):
start_time = datetime.utcnow()
self.server.connector_ibm_mq.ping_wmq(self.request.input.id)
response_time = datetime.utcnow() - start_time
self.response.payload.info = 'Ping OK, took:`{}` s'.format(response_time.total_seconds())
# ################################################################################################################################
| 11,452
|
Python
|
.py
| 209
| 43.91866
| 130
| 0.563064
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,289
|
amqp_.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/definition/amqp_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from uuid import uuid4
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.broker_message import DEFINITION
from zato.common.odb.model import ConnDefAMQP
from zato.common.odb.query import definition_amqp, definition_amqp_list
from zato.server.service.internal import AdminService, AdminSIO, ChangePasswordBase
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
# ################################################################################################################################
elem = 'definition_amqp'
model = ConnDefAMQP
label = 'an AMQP definition'
get_list_docs = 'AMQP definitions'
broker_message = DEFINITION
broker_message_prefix = 'AMQP_'
list_func = definition_amqp_list
skip_input_params = ('password',)
# ################################################################################################################################
def broker_message_hook(self, input, instance, attrs, service_type):
input.source_server = self.server.get_full_name()
input.config_cid = 'definition.amqp.{}.{}.{}'.format(service_type, input.source_server, self.cid)
if service_type == 'create_edit':
with closing(self.odb.session()) as session:
def_ = definition_amqp(session, instance.cluster_id, instance.id)
input.password = def_.password
# ################################################################################################################################
def instance_hook(self, input, instance, attrs):
if 'create' in self.get_name().lower():
instance.password = uuid4().hex
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
name = 'zato.definition.amqp.get-list'
_filter_by = ConnDefAMQP.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
name = 'zato.definition.amqp.create'
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
name = 'zato.definition.amqp.edit'
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
name = 'zato.definition.amqp.delete'
# ################################################################################################################################
class ChangePassword(ChangePasswordBase):
""" Changes the password of an AMQP connection definition.
"""
name = 'zato.definition.amqp.change-password'
class SimpleIO(ChangePasswordBase.SimpleIO):
request_elem = 'zato_definition_amqp_change_password_request'
response_elem = 'zato_definition_amqp_change_password_response'
def handle(self):
def _auth(instance, password):
instance.password = password
return self._handle(ConnDefAMQP, _auth, DEFINITION.AMQP_CHANGE_PASSWORD.value)
# ################################################################################################################################
class GetByID(AdminService):
""" Returns a particular AMQP definition by its ID.
"""
name = 'zato.definition.amqp.get-by-id'
class SimpleIO(AdminSIO):
request_elem = 'zato_definition_amqp_get_by_id_request'
response_elem = 'zato_definition_amqp_get_by_id_response'
input_required = ('id', 'cluster_id')
output_required = ('id', 'name', 'host', 'port', 'vhost', 'username', 'frame_max', 'heartbeat')
def get_data(self, session):
return definition_amqp(session, self.request.input.cluster_id, self.request.input.id)
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload = self.get_data(session)
# ################################################################################################################################
| 4,433
|
Python
|
.py
| 82
| 49.95122
| 130
| 0.509954
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,290
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pattern/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,291
|
invoke_retry.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pattern/invoke_retry.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# pylint: disable=attribute-defined-outside-init
# Arrow
from arrow import utcnow
# Bunch
from bunch import Bunch
# gevent
from gevent import spawn, spawn_later
# Zato
from zato.common.json_internal import loads
from zato.server.service import Service
from zato.server.pattern.invoke_retry import RetryFailed, retry_failed_msg, retry_limit_reached_msg
# ################################################################################################################################
class InvokeRetry(Service):
# ################################################################################################################################
def _retry(self, remaining):
try:
response = self.invoke(self.req_bunch.target, *self.req_bunch.args, **self.req_bunch.kwargs)
except Exception as e:
msg = retry_failed_msg(
(self.req_bunch.retry_repeats-remaining)+1, self.req_bunch.retry_repeats,
self.req_bunch.target, self.req_bunch.retry_seconds, self.req_bunch.orig_cid, e)
self.logger.info(msg)
raise RetryFailed(remaining-1, e)
else:
return response
# ################################################################################################################################
def _notify_callback(self, is_ok, response):
callback_request = {
'ok': is_ok,
'orig_cid': self.req_bunch.orig_cid,
'call_cid': self.req_bunch.call_cid,
'source': self.req_bunch.source,
'target': self.req_bunch.target,
'retry_seconds': self.req_bunch.retry_seconds,
'retry_repeats': self.req_bunch.retry_repeats,
'context': self.req_bunch.callback_context,
'req_ts_utc': self.req_bunch.req_ts_utc,
'resp_ts_utc': utcnow().isoformat(),
'response': response
}
self.invoke_async(self.req_bunch.callback, callback_request)
# ################################################################################################################################
def _on_retry_finished(self, g):
""" A callback method invoked when a retry finishes. Will decide whether it should be
attempted to retry the invocation again or give up notifying the uses via callback
service if retry limit is reached.
"""
# Was there any exception caught when retrying?
e = g.exception
if e:
# Can we retry again?
if e.remaining:
g = spawn_later(self.req_bunch.retry_seconds, self._retry, e.remaining)
g.link(self._on_retry_finished)
# Reached the limit, warn users in logs, notify callback service and give up.
else:
msg = retry_limit_reached_msg(self.req_bunch.retry_repeats,
self.req_bunch.target, self.req_bunch.retry_seconds, self.req_bunch.orig_cid)
self.logger.warning(msg)
self._notify_callback(False, None)
# Let the callback know it's all good
else:
self._notify_callback(True, g.value)
# ################################################################################################################################
def handle(self):
# Convert to bunch so it's easier to read everything
self.req_bunch = Bunch(loads(self.request.payload))
# Initial retry linked to a retry callback
g = spawn(self._retry, self.req_bunch.retry_repeats)
g.link(self._on_retry_finished)
# ################################################################################################################################
| 3,979
|
Python
|
.py
| 77
| 43.077922
| 130
| 0.510446
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,292
|
sql.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/notif/sql.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# pylint: disable=attribute-defined-outside-init
# stdlib
from contextlib import closing
from datetime import datetime
from logging import DEBUG, getLogger
# SQLAlchemy
from sqlalchemy.orm.exc import NoResultFound
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.api import NOTIF as COMMON_NOTIF, SECRET_SHADOW
from zato.common.broker_message import NOTIF
from zato.common.odb.model import Cluster, NotificationSQL, SQLConnectionPool, Service
from zato.common.odb.query import notif_sql_list
from zato.server.service.internal import AdminService
from zato.server.service.internal.notif import NotifierService
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
# ################################################################################################################################
logger_notif = getLogger('zato_notif_sql')
has_debug = logger_notif.isEnabledFor(DEBUG)
# ################################################################################################################################
elem = 'notif_sql'
model = NotificationSQL
label = 'an SQL notification'
get_list_docs = 'SQL notifications'
broker_message = NOTIF
broker_message_prefix = 'SQL_'
list_func = notif_sql_list
output_required_extra = ['service_name']
create_edit_input_required_extra = ['service_name']
create_edit_rewrite = ['service_name']
skip_input_params = ('notif_type', 'service_id', 'get_data_patt', 'get_data', 'get_data_patt_neg',
'name_pattern_neg', 'name_pattern')
skip_output_params = ('get_data', 'get_data_patt_neg', 'get_data_patt', 'name_pattern_neg', 'name_pattern', 'service_name')
# ################################################################################################################################
def instance_hook(service, input, instance, attrs):
instance.notif_type = COMMON_NOTIF.TYPE.SQL
if attrs.is_create_edit:
with closing(service.odb.session()) as session:
instance.service_id = session.query(Service).\
filter(Service.name==input.service_name).\
filter(Service.cluster_id==Cluster.id).\
filter(Service.cluster_id==input.cluster_id).\
one().id
# ################################################################################################################################
def broker_message_hook(service, input, instance, attrs, service_type):
if service_type == 'create_edit':
input.notif_type = COMMON_NOTIF.TYPE.SQL
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = NotificationSQL.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
class RunNotifier(NotifierService):
notif_type = COMMON_NOTIF.TYPE.SQL
def run_notifier_impl(self, config):
# To make it possible to save it to logs directly
config['password'] = SECRET_SHADOW
out = []
try:
with closing(self.odb.session()) as session:
def_name = session.query(SQLConnectionPool).\
filter(SQLConnectionPool.id==config.def_id).\
filter(SQLConnectionPool.cluster_id==self.server.cluster_id).\
one().name
except NoResultFound:
logger_notif('Stopping notifier, could not find an SQL pool for config `%s`', config)
self.keep_running = False
return
with closing(self.outgoing.sql[def_name].session()) as session:
rows = session.execute(config.query).fetchall()
for row in rows:
dict_row = dict(row.items())
for k, v in dict_row.items():
if isinstance(v, datetime):
dict_row[k] = v.isoformat()
out.append(dict_row)
if out:
msg = 'Executing `%s` in background with %d %s'
if has_debug:
msg += ' ({})'.format(out)
len_out = len(out)
row_noun = 'row' if len_out == 1 else 'rows'
logger_notif.info(msg, config.service_name, len_out, row_noun)
self.invoke_async(config.service_name, {'data':out})
# ################################################################################################################################
| 5,396
|
Python
|
.py
| 104
| 45.288462
| 130
| 0.506466
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,293
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/notif/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# pylint: disable=attribute-defined-outside-init
# Bunch
from bunch import bunchify
from copy import deepcopy
from logging import getLogger
# gevent
from gevent import sleep, spawn
# Zato
from zato.common.api import SECRET_SHADOW
from zato.common.util.api import spawn_greenlet
from zato.server.service import Service
from zato.server.service.internal import AdminService
# ################################################################################################################################
logger_notif = getLogger('zato_notif_sql')
# ################################################################################################################################
class InvokeRunNotifier(Service):
def handle(self):
# Maps notification type to a service handling it
notif_type_service = {
'sql': 'zato.notif.sql.run-notifier',
}
spawn_greenlet(
self.invoke, notif_type_service[self.request.payload['config']['notif_type']], self.request.payload['config'])
# ################################################################################################################################
class InitNotifiers(Service):
def handle(self):
# One entry for each notification type
config_dicts = [
self.server.worker_store.worker_config.notif_sql,
]
for config_dict in config_dicts:
for value in config_dict.values():
config = value.config
config_no_password = deepcopy(config)
config_no_password['password'] = SECRET_SHADOW
logger_notif.info('Initializing notifier with config `%s`', config_no_password)
self.invoke(InvokeRunNotifier.get_name(), {'config': value.config})
# ################################################################################################################################
class NotifierService(AdminService):
notif_type = None
def run_notifier_impl(self, config):
raise NotImplementedError('Needs to be overridden in subclasses')
def run_notifier(self, config):
""" Invoked as a greenlet - fetches data from a remote data source and invokes the target service.
"""
# It's possible our config has changed since the last time we run so we need to check the current one.
current_config = self.server.worker_store.get_notif_config(self.notif_type, config.name)
# The notification definition has been deleted in between the invocations of ours so we need to stop now.
if not current_config:
self.keep_running = False
logger_notif.info('No current config, stopping notifier (self.keep_running=False)')
return
if not current_config.config['is_active']:
logger_notif.info('Current config is not active, not running the notifier (is_active)')
return
current_config_no_password = deepcopy(current_config)
current_config_no_password.config['password'] = SECRET_SHADOW
logger_notif.info('SQL notifier running with config `%r`', current_config_no_password)
# Ok, overwrite old config with current one.
config.update(current_config.config)
self.environ['notif_sleep_interval'] = config.interval
# Grab a distributed lock so we are sure it is only us who connects to pull newest data.
with self.lock('zato:lock:{}:{}'.format(self.notif_type, config.name), block=None):
self.run_notifier_impl(config)
def handle(self):
self.keep_running = True
config = bunchify(self.request.payload)
self.environ['notif_sleep_interval'] = config.interval
while self.keep_running:
spawn(self.run_notifier, config)
sleep(self.environ['notif_sleep_interval'])
self.logger.info('Stopped `%s` notifier `%s`', self.notif_type, config.name)
| 4,186
|
Python
|
.py
| 78
| 46.115385
| 130
| 0.597253
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,294
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/notif/cloud/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
| 238
|
Python
|
.py
| 6
| 38.166667
| 82
| 0.729258
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,295
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/notif/cloud/openstack/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
| 238
|
Python
|
.py
| 6
| 38.166667
| 82
| 0.729258
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,296
|
swift.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/notif/cloud/openstack/swift.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.server.service.internal import AdminService
class GetList(AdminService):
def handle(self):
self.response.payload = '[]'
class _CreateEdit(AdminService):
pass
class Create(AdminService):
pass
class Edit(AdminService):
pass
class Delete(AdminService):
pass
class RunNotifier(AdminService):
pass
| 503
|
Python
|
.py
| 20
| 22.1
| 64
| 0.738397
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,297
|
json_pointer.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/message/json_pointer.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,298
|
namespace.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/message/namespace.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from traceback import format_exc
# Zato
from zato.common.broker_message import MSG_NS
from zato.common.odb.model import Cluster, MsgNamespace
from zato.common.odb.query import namespace_list
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
class GetList(AdminService):
""" Returns a list of namespaces available.
"""
_filter_by = MsgNamespace.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_message_namespace_get_list_request'
response_elem = 'zato_message_namespace_get_list_response'
input_required = ('cluster_id',)
output_required = ('id', 'name', 'value')
def get_data(self, session):
return self._search(namespace_list, session, self.request.input.cluster_id, False)
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
class Create(AdminService):
""" Creates a new namespace.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_message_namespace_create_request'
response_elem = 'zato_message_namespace_create_response'
input_required = ('cluster_id', 'name', 'value')
output_required = ('id', 'name')
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
try:
cluster = session.query(Cluster).filter_by(id=input.cluster_id).first()
# Let's see if we already have a definition of that name before committing
# any stuff into the database.
existing_one = session.query(MsgNamespace).\
filter(Cluster.id==input.cluster_id).\
filter(MsgNamespace.name==input.name).first()
if existing_one:
raise Exception('Namespace [{0}] already exists on this cluster'.format(input.name))
definition = MsgNamespace(None, input.name, input.value, cluster.id)
session.add(definition)
session.commit()
except Exception:
self.logger.error('Could not create a namespace, e:`%s`', format_exc())
session.rollback()
raise
else:
input.action = MSG_NS.CREATE.value
self.broker_client.publish(input)
self.response.payload.id = definition.id
self.response.payload.name = definition.name
class Edit(AdminService):
""" Updates a namespace.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_message_namespace_edit_request'
response_elem = 'zato_message_namespace_edit_response'
input_required = ('id', 'cluster_id', 'name', 'value')
output_required = ('id', 'name')
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
try:
existing_one = session.query(MsgNamespace).\
filter(Cluster.id==input.cluster_id).\
filter(MsgNamespace.name==input.name).\
filter(MsgNamespace.id!=input.id).\
first()
if existing_one:
raise Exception('Namespace [{0}] already exists on this cluster'.format(input.name))
definition = session.query(MsgNamespace).filter_by(id=input.id).one()
old_name = definition.name
definition.name = input.name
definition.value = input.value
session.add(definition)
session.commit()
except Exception:
self.logger.error('Could not update the namespace, e:`%s`', format_exc())
session.rollback()
raise
else:
input.action = MSG_NS.EDIT.value
input.old_name = old_name
self.broker_client.publish(input)
self.response.payload.id = definition.id
self.response.payload.name = definition.name
class Delete(AdminService):
""" Deletes a namespace.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_message_namespace_delete_request'
response_elem = 'zato_message_namespace_delete_response'
input_required = ('id',)
def handle(self):
with closing(self.odb.session()) as session:
try:
auth = session.query(MsgNamespace).\
filter(MsgNamespace.id==self.request.input.id).\
one()
session.delete(auth)
session.commit()
except Exception:
self.logger.error('Could not delete the namespace, e:`%s`', format_exc())
session.rollback()
raise
else:
self.request.input.action = MSG_NS.DELETE.value
self.request.input.name = auth.name
self.broker_client.publish(self.request.input)
| 5,323
|
Python
|
.py
| 118
| 33.669492
| 104
| 0.601778
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,299
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/message/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|