id
int64 0
458k
| file_name
stringlengths 4
119
| file_path
stringlengths 14
227
| content
stringlengths 24
9.96M
| size
int64 24
9.96M
| language
stringclasses 1
value | extension
stringclasses 14
values | total_lines
int64 1
219k
| avg_line_length
float64 2.52
4.63M
| max_line_length
int64 5
9.91M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 7
101
| repo_stars
int64 100
139k
| repo_forks
int64 0
26.4k
| repo_open_issues
int64 0
2.27k
| repo_license
stringclasses 12
values | repo_extraction_date
stringclasses 433
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
10,000
|
__init__.py
|
zatosource_zato/code/zato-testing/src/zato/testing/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2020, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import os
from unittest import TestCase
# Bunch
from bunch import bunchify
# Cryptography
from cryptography.fernet import Fernet
# Zato
from zato.common.api import CHANNEL, DATA_FORMAT, UNITTEST
from zato.common.crypto.api import CryptoManager
from zato.common.json_internal import loads
from zato.common.kvdb.api import KVDB
from zato.common.odb.api import PoolStore
from zato.common.util.api import new_cid
from zato.server.base.worker import WorkerStore
from zato.server.connection.cache import CacheAPI
from zato.server.connection.http_soap.channel import RequestHandler
from zato.server.connection.vault import VaultConnAPI
from zato.server.base.parallel import ParallelServer
from zato.server.config import ConfigStore
from zato.server.service.store import ServiceStore
from zato.testing.requests_ import RequestsAdapter
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import basestring
# ################################################################################################################################
if 0:
from zato.common.odb.unittest_ import QueryCtx
from zato.server.service import Service
QueryCtx = QueryCtx
Service = Service
# ################################################################################################################################
# ################################################################################################################################
class _FSConfig:
def get(self, key):
return getattr(self, key, None)
# ################################################################################################################################
# ################################################################################################################################
class FSPubSub(_FSConfig):
__slots__ = 'data_prefix_len', 'data_prefix_short_len', 'log_if_deliv_server_not_found', 'log_if_wsx_deliv_server_not_found'
def __init__(self):
self.data_prefix_len = 100
self.data_prefix_short_len = 100
self.log_if_deliv_server_not_found = False
self.log_if_wsx_deliv_server_not_found = False
# ################################################################################################################################
# ################################################################################################################################
class FSPubSubMetaTopic(_FSConfig):
__slots__ = 'enabled', 'store_frequency'
def __init__(self):
self.enabled = False
self.store_frequency = None
# ################################################################################################################################
# ################################################################################################################################
class FSPubSubMetaEndpointPub(_FSConfig):
__slots__ = 'enabled', 'store_frequency', 'data_len', 'max_history'
def __init__(self):
self.enabled = False
self.store_frequency = 1000
self.data_len = 50
self.max_history = 200
# ################################################################################################################################
# ################################################################################################################################
class FSServerConfig(_FSConfig):
__slots__ = 'pubsub', 'pubsub_meta_topic', 'pubsub_meta_endpoint_pub', 'misc'
def __init__(self):
self.pubsub = FSPubSub()
self.pubsub_meta_topic = FSPubSubMetaTopic()
self.pubsub_meta_endpoint_pub = FSPubSubMetaEndpointPub()
# ################################################################################################################################
# ################################################################################################################################
class ServiceTestCaseConfig:
pass
# ################################################################################################################################
class Cache:
def __init__(self):
pass
def get(self, *args, **kwargs):
return True
# ################################################################################################################################
# ################################################################################################################################
class ServiceTestCase(TestCase):
def setUp(self):
# For mocking out Vault responses
self.vault_adapter = RequestsAdapter()
# We are always the first process in a server
os.environ['ZATO_SERVER_WORKER_IDX'] = '1'
# Represents the server.conf file
self.fs_server_config = FSServerConfig()
self.worker_config = ConfigStore()
self.fernet_key = Fernet.generate_key() # type: str
self.crypto_manager = CryptoManager(secret_key=self.fernet_key)
self.vault_conn_api = VaultConnAPI(requests_adapter=self.vault_adapter)
self.server = ParallelServer()
self.server.fs_server_config = self.fs_server_config
self.server.kvdb = KVDB()
self.server.component_enabled.stats = False
self.server.component_enabled.slow_response = False
self.server.crypto_manager = self.crypto_manager
self.service_store = ServiceStore(is_testing=True)
self.service_store.server = self.server
self.service_store.services = {}
self.server.service_store = self.service_store
self.fs_sql_config = {
UNITTEST.SQL_ENGINE: {
'ping_query': 'SELECT 1+1'
}
}
self.cache = Cache()
self.sql_pool_store = PoolStore()
self.worker_store = WorkerStore(self.worker_config, self.server)
self.worker_store.sql_pool_store = self.sql_pool_store
self.worker_store.outconn_wsx = None
self.worker_store.vault_conn_api = self.vault_conn_api
self.worker_store.sms_twilio_api = None
self.worker_store.out_sap = None
self.worker_store.out_sftp = None
self.worker_store.outconn_ldap = {}
self.worker_store.outconn_mongodb = {}
self.worker_store.def_kafka = {}
self.worker_store.cache_api = CacheAPI(self.server)
self.worker_store.cache_api.default = self.cache
self.request_handler = RequestHandler(self.server)
self.wsgi_environ = {
'HTTP_HOST': 'api.localhost'
}
# Callback methods for particular SQL queries
self.sql_callback_by_idx = {}
# ################################################################################################################################
def add_outconn_sql(self, name):
# type: (str)
self.sql_pool_store.add_unittest_item(name)
# ################################################################################################################################
def import_services(self, item):
# type: (object)
self.service_store.import_services_from_anywhere(item, None, None, False)
# ################################################################################################################################
def invoke_service(self, class_, request=None, **kwargs):
# type: (Service, object, object)
class_.name = class_.get_name()
class_.impl_name = class_.get_impl_name()
class_.component_enabled_ibm_mq = True
class_.component_enabled_zeromq = False
class_.component_enabled_sms = True
class_.component_enabled_email = False
class_.component_enabled_search = False
class_.component_enabled_msg_path = False
class_.component_enabled_patterns = False
class_.has_sio = True
class_._worker_config = self.worker_config
class_._worker_store = self.worker_store
class_.crypto = self.server.crypto_manager
service = class_() # type: Service
service.out.vault = self.vault_conn_api
self.service_store.services[service.impl_name] = {
'slow_threshold': 100,
}
channel = kwargs.get('channel') or CHANNEL.INVOKE
data_format = kwargs.get('data_format') or DATA_FORMAT.DICT
transport = ''
broker_client = None
cid = kwargs.get('cid') or new_cid()
simple_io_config = {
'bytes_to_str': {'encoding': 'utf8'}
}
response = service.update_handle(
self.request_handler._set_response_data, service, request, channel, data_format, transport,
self.server, broker_client, self.worker_store, cid, simple_io_config, environ=kwargs.get('environ'))
if kwargs.get('as_bunch'):
if isinstance(response.payload, basestring):
payload = loads(response.payload)
payload = bunchify(payload)
response._payload = payload
return response
# ################################################################################################################################
# ################################################################################################################################
| 9,563
|
Python
|
.py
| 181
| 46.243094
| 130
| 0.475434
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,001
|
service.py
|
zatosource_zato/code/zato-testing/src/zato/testing/service.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2020, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# ################################################################################################################################
# ################################################################################################################################
class TestingService:
schema = None
@staticmethod
def after_add_to_store(*ignored_args, **ignored_kwargs):
pass
# ################################################################################################################################
# ################################################################################################################################
| 899
|
Python
|
.py
| 15
| 57.066667
| 130
| 0.289954
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,002
|
pre_uninstall.py
|
zatosource_zato/code/util/pre_uninstall.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
import os
# ################################################################################################################################
# ################################################################################################################################
log_format = '%(asctime)s - %(levelname)s - %(name)s:%(lineno)d - %(message)s'
logging.basicConfig(level=logging.DEBUG, format=log_format)
logger = logging.getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
class WindowsPreUninstall:
""" Code to run before a Windows package is to be uninstalled.
"""
def __init__(self, base_dir:'str', bin_dir:'str') -> 'None':
self.base_dir = base_dir
self.bin_dir = bin_dir
# This is the path to the directory that 'zato.py' command is in
self.zato_windows_bin_dir = os.path.join(self.base_dir, 'windows-bin')
# Full path to 'zato.py'
self.zato_windows_bin_path = os.path.join(self.zato_windows_bin_dir, 'zato.py')
# We need for the drive letter to be upper-cased since this is what will be found in the registry
drive_letter, rest = self.zato_windows_bin_dir[0], self.zato_windows_bin_dir[1:]
drive_letter = drive_letter.upper()
self.zato_windows_bin_dir = drive_letter + rest
# ################################################################################################################################
def update_windows_registry(self):
# stdlib
from winreg import \
HKEY_CURRENT_USER as hkey_current_user, \
KEY_ALL_ACCESS as key_all_access, \
REG_EXPAND_SZ as reg_expand_sz, \
OpenKey, \
QueryValueEx, \
SetValueEx # noqa: E272
# pywin32
from win32con import \
HWND_BROADCAST as hwnd_broadcast, \
WM_SETTINGCHANGE as wm_settingchange
# pywin32 as well
from win32gui import SendMessage
# We look up environment variables for current user
root = hkey_current_user
sub_key = 'Environment'
# Open the registry key ..
with OpenKey(root, sub_key, 0, key_all_access) as reg_key_handle:
# .. look up the current value of %path% ..
env_path, _ = QueryValueEx(reg_key_handle, 'path')
# .. make sure that our path is already there ..
if self.zato_windows_bin_dir not in env_path:
return
# .. if we are here, it means that we can remove our path ..
env_path = env_path.replace(self.zato_windows_bin_dir, '')
# .. now, we can save the new value of %path% in the registry ..
SetValueEx(reg_key_handle, 'path', 0, reg_expand_sz, env_path)
# .. finally, we can notify the system of the change.
SendMessage(hwnd_broadcast, wm_settingchange, 0, sub_key)
# ################################################################################################################################
def run(self):
self.update_windows_registry()
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
curdir = os.path.dirname(os.path.abspath(__file__))
base_dir = os.path.join(curdir, '..')
base_dir = os.path.abspath(base_dir)
base_dir = base_dir.replace('\\', '\\\\')
bin_dir = os.path.join(base_dir, 'Scripts')
bin_dir = os.path.abspath(bin_dir)
pre_uninstall = WindowsPreUninstall(base_dir, bin_dir)
pre_uninstall.run()
# ################################################################################################################################
# ################################################################################################################################
| 4,530
|
Python
|
.py
| 77
| 51.415584
| 130
| 0.420457
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,003
|
check_tcp_ports.py
|
zatosource_zato/code/util/check_tcp_ports.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.common.util.api import wait_until_port_free
# ################################################################################################################################
# ################################################################################################################################
def wait_for_ports(*data): # type: ignore
""" Blocks until input TCP ports are free.
"""
for port, component in data: # type: ignore
if not wait_until_port_free(port, 10):
print(f'Port taken {port} ({component})')
# ################################################################################################################################
def get_scheduler_data():
return [[31530, 'Scheduler']] # type: ignore
# ################################################################################################################################
def get_non_scheduler_data():
return [
[8183, 'Dashboard'], # type: ignore
[11223, 'Load-balancer'],
[20151, 'Load-balancer\'s agent'],
# Servers come last because they may be the last to stop
# in case we are being called during an environment's restart.
[17010, 'server1'],
[17011, 'server2']
]
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
# stdlib
import sys
if len(sys.argv) == 2:
argv1 = sys.argv[1]
scheduler_only = argv1 == 'scheduler-only'
no_scheduler = argv1 == 'no-scheduler'
needs_scheduler = not no_scheduler
else:
scheduler_only = False
no_scheduler = False
needs_scheduler = True
data = []
scheduler_data = get_scheduler_data() # type: ignore
non_scheduler_data = get_non_scheduler_data() # type: ignore
if scheduler_only:
data.extend(scheduler_data)
else:
if needs_scheduler:
data.extend(scheduler_data)
data.extend(non_scheduler_data)
wait_for_ports(*data)
# ################################################################################################################################
# ################################################################################################################################
| 2,667
|
Python
|
.py
| 55
| 42.945455
| 130
| 0.369314
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,004
|
post_install.py
|
zatosource_zato/code/util/post_install.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
import os
import sys
from glob import glob
from pathlib import Path
from platform import system as platform_system
from shutil import copy as shutil_copy
from typing import Any as any_, cast as cast_
# ################################################################################################################################
# ################################################################################################################################
log_format = '%(asctime)s - %(levelname)s - %(name)s:%(lineno)d - %(message)s'
logging.basicConfig(level=logging.DEBUG, format=log_format)
logger = logging.getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
class ModuleCtx:
Long_File_Pattern = r'\\?\%s'
Long_File_Prefix = '\\\\?\\'
Long_File_Prefix_Escaped = r'\\\?\\'
# ################################################################################################################################
# ################################################################################################################################
is_windows = 'windows' in platform_system().lower()
# ################################################################################################################################
# ################################################################################################################################
#
# This path is relative to the parent of the path that the 'zato' command is in (self.base_dir).
#
site_packages_relative = ['lib', 'site-packages']
# ################################################################################################################################
# ################################################################################################################################
class PostInstall:
""" Code to run after a Zato package has been just installed.
"""
build_dir_to_base_depth: 'int'
base_dir: 'str'
code_dir: 'str'
bin_dir: 'str'
# A directory where Python binaries are in, e.g. python3.11, python3.12 etc.
python_dir: 'str'
# The full path to the python_dir directory
python_dir_full: 'str'
# The directory that this installation was originally build in,
# e.g. the one that was used on the build server.
orig_build_dir: 'str'
# Packages are installed here
site_packages_dir: 'str'
# This is the path to the directory that 'zato.bat' command is in
zato_windows_bin_dir: 'str'
lib_dir_elems = None # type: any_
bin_path_prefix_elems = None # type: any_
bin_path_needs_python_dir = None # type: any_
zato_bin_line: 'int | None' = None
zato_bin_command: 'str | None' = None
# ################################################################################################################################
def init(self, base_dir:'str', bin_dir:'str') -> 'None':
self.base_dir = base_dir
self.bin_dir = bin_dir
# Packages are installed here
self.site_packages_dir = os.path.join(self.code_dir, *site_packages_relative)
# ################################################################################################################################
def update_files(self, files_dir:'any_', patterns:'any_', to_ignore:'any_') -> 'None':
# Support long paths under Windows
if is_windows:
files_dir = ModuleCtx.Long_File_Pattern % files_dir
# To be sorted later
file_names = []
# Check all input patterns ..
for name in patterns:
# .. full pattern for glob, including the directory with files ..
full_pattern = os.path.join(files_dir, name)
# .. consult all the file names in the directory ..
for full_path in glob(full_pattern):
# .. ignore sub-directories ..
if os.path.isdir(full_path):
continue
# .. confirm if the file name is not among ignored ones ..
for ignored in to_ignore:
file_name = os.path.basename(full_path)
if file_name.startswith(ignored) or file_name.endswith(ignored):
should_add = False
break
else:
should_add = True
# .. if we enter this 'if' branch, it means that the file is not to be ignored.
if should_add:
file_names.append(full_path)
# To make it easier to recognise what we are working with currently
file_names.sort()
for idx, name in enumerate(file_names, 1):
# Prepare a backup file's name ..
backup_name = name + '-bak'
# .. and make the backup before modifying the file.
shutil_copy(name, backup_name)
# Now, we can get the contents of the original file
data = open(name, 'r', encoding='utf8').read() # type: str
if self.orig_build_dir in data:
# Log what we are about to do
logger.info('#%s Replacing `%s` in %s', idx, self.orig_build_dir, name)
# Replace the build directory with the actual installation directory ..
data = data.replace(self.orig_build_dir, self.code_dir)
# .. and save the data on disk.
f = open(name, 'w')
_ = f.write(data)
f.close()
logger.info('#%s Finished replacing', idx)
# ################################################################################################################################
def update_site_packages_files(self) -> 'None':
# File types that we are going to modify
patterns = ['*.pth', '*.egg-link']
# Patterns will be matched against this directory
files_dir = self.site_packages_dir
# In site-packages, there are no files to ignore except for backup ones
to_ignore = ['-bak']
logger.info('Updating site-packages: %s -> %s -> %s', files_dir, patterns, to_ignore)
# Actually updates the files now
self.update_files(files_dir, patterns, to_ignore)
# ################################################################################################################################
def update_bin_files(self) -> 'None':
# In the 'bin' directory, we update all the files
patterns = ['*']
# Patterns will be matched against this directory
files_dir = self.bin_dir
# Ignore binary files in addition to the backup ones
to_ignore = ['python', '-bak', '.dll', '.exe', '.pyd', '.zip']
logger.info('Updating bin: %s -> %s -> %s', files_dir, patterns, to_ignore)
# Actually updates the files now
self.update_files(files_dir, patterns, to_ignore)
# ################################################################################################################################
def set_git_root_dir_config(self) -> 'None':
git_root_dir = os.path.join(self.base_dir, '..')
git_root_dir = os.path.abspath(git_root_dir)
git_root_dir = git_root_dir.replace('\\', '/')
try:
command = f'git config --global --add safe.directory {git_root_dir}'
_ = os.system(command)
except Exception:
# This system may not have git
pass
# ################################################################################################################################
def update_windows_registry(self) -> 'None':
# stdlib
from winreg import OpenKey # type: ignore
from winreg import QueryValueEx # type: ignore
from winreg import SetValueEx # type: ignore
from winreg import HKEY_CURRENT_USER as hkey_current_user # type: ignore
from winreg import KEY_ALL_ACCESS as key_all_access # type: ignore
from winreg import REG_EXPAND_SZ as reg_expand_sz # type: ignore
# pywin32
from win32con import HWND_BROADCAST as hwnd_broadcast # type: ignore
from win32con import WM_SETTINGCHANGE as wm_settingchange # type: ignore
# pywin32 as well
from win32gui import SendMessage # type: ignore
# We look up environment variables for current user
root = hkey_current_user
sub_key = 'Environment'
# Open the registry key ..
with OpenKey(root, sub_key, 0, key_all_access) as reg_key_handle: # type: ignore
# .. look up the current value of %path% ..
env_path, _ = QueryValueEx(reg_key_handle, 'path')
# .. make sure that new path is not already there ..
if self.zato_windows_bin_dir in env_path:
return
# .. if we are here, it means that we add our path ..
env_path += ';' + self.zato_windows_bin_dir
# .. now, we can save the new value of %path% in the registry ..
SetValueEx(reg_key_handle, 'path', 0, reg_expand_sz, env_path)
# .. finally, we can notify the system of the change.
_ = SendMessage(hwnd_broadcast, wm_settingchange, 0, cast_('any_', sub_key))
# ################################################################################################################################
def update_paths(self) -> 'None':
self.update_site_packages_files()
self.update_bin_files()
self.set_git_root_dir_config()
# ################################################################################################################################
def update_registry(self) -> 'None':
self.update_windows_registry()
# ################################################################################################################################
def get_python_dir(self) -> 'any_':
python_dir = 'default-python_dir'
# This the directory where the Python directory will be found ..
lib_dir = os.path.join(self.base_dir, *self.lib_dir_elems)
# .. list all the directories in the lib dir ..
for item in sorted(os.listdir(lib_dir)):
# .. accept the one that is a Python one ..
if item.startswith('python'):
python_dir = item
break
# .. and return the result to the caller.
return python_dir
# ################################################################################################################################
def get_python_dir_full(self) -> 'str':
python_dir_full = os.path.join(self.base_dir, *self.lib_dir_elems, self.python_dir)
python_dir_full = os.path.abspath(python_dir_full)
return python_dir_full
# ################################################################################################################################
def get_orig_build_dir(self) -> 'str':
# Build a full path to the zato command ..
zato_bin_path = [self.base_dir]
zato_bin_path.extend(self.bin_path_prefix_elems)
if self.bin_path_needs_python_dir:
zato_bin_path.append(self.python_dir)
zato_bin_path.append(self.zato_bin_command) # type: ignore
zato_bin_path = os.sep.join(zato_bin_path)
if is_windows:
zato_bin_path = ModuleCtx.Long_File_Pattern % zato_bin_path
# .. read the whole contents ..
lines = open(zato_bin_path).readlines()
# .. our path will be in the first line ..
bin_line = lines[self.zato_bin_line] # type: ignore
bin_line = bin_line.strip()
bin_path = self.extract_bin_path_from_bin_line(bin_line)
#
# .. Now, we have something like this in bin_line:
# .. /home/user/projects/zatosource-zato/3.2/code/bin/python
# .. and we would like to remove the trailing parts to have this:
# .. /home/user/projects/zatosource-zato/3.2/code/
#
# .. How many parts to remove will depend on what operating system we are on
# .. which is why it is our subclasses that tell it to us below.
# Turn what we have so far into a Path object so it is easier to process it ..
bin_path = Path(bin_path)
# .. extract the original build directory now ..
orig_build_dir = bin_path.parts[:-self.build_dir_to_base_depth]
# Turn what we have so far into a list ..
orig_build_dir = list(orig_build_dir)
# If we are not on Windows, we need to remove the leading slash character
# because we are going to use os.sep to join all the remaining parts.
if not is_windows:
try:
orig_build_dir.remove('/')
except ValueError:
pass
# Prepend a slahs character, unless we are on Windows
prefix = '' if is_windows else '/'
# .. turn it into a list ..
orig_build_dir = prefix + os.sep.join(orig_build_dir)
# Correct the path separator on Windows
if is_windows:
orig_build_dir = orig_build_dir.replace('\\\\', '\\')
# .. and return it to our caller.
return orig_build_dir
# ################################################################################################################################
def get_site_packages_dir(self) -> 'str':
site_packages_dir = os.path.join(self.python_dir_full, 'site-packages')
return site_packages_dir
# ################################################################################################################################
def get_impl_base_dir(self) -> 'str':
raise NotImplementedError('Must be implemented by subclasses')
# ################################################################################################################################
def run_impl(self) -> 'None':
raise NotImplementedError('Must be implemented by subclasses')
# ################################################################################################################################
def get_bin_dir(self) -> 'str':
raise NotImplementedError('Must be implemented by subclasses')
# ################################################################################################################################
def extract_bin_path_from_bin_line(self, bin_line:'str') -> 'str':
raise NotImplementedError('Must be implemented by subclasses')
# ################################################################################################################################
def get_base_dir(self) -> 'str':
# Base directory may be given explicitly or we will need build it in relation to our own location
if len(sys.argv) > 1:
base_dir = sys.argv[1]
if base_dir.endswith('\\'):
base_dir = base_dir[:-1]
else:
base_dir = self.get_impl_base_dir()
return base_dir
# ################################################################################################################################
def run(self) -> 'None':
# Prepare paths ..
self.base_dir = self.get_base_dir()
self.code_dir = os.path.join(self.base_dir, 'code')
self.python_dir = self.get_python_dir()
self.python_dir_full = self.get_python_dir_full()
self.orig_build_dir = self.get_orig_build_dir()
"""
print()
print(111, self.base_dir)
print()
print()
print(222, self.python_dir)
print()
print()
print(333, self.python_dir_full)
print()
print()
print(444, self.orig_build_dir)
print()
"""
# .. if these are the same, it means that we do not have anything to do.
if self.base_dir == self.orig_build_dir:
logger.info('Returning as base_dir and orig_build_dir are the same (%s)', self.base_dir)
return
else:
# .. prepare the rest of the configuration ..
self.bin_dir = self.get_bin_dir()
self.site_packages_dir = self.get_site_packages_dir()
# .. and actually run the process.
self.run_impl()
# ################################################################################################################################
# ################################################################################################################################
class WindowsPostInstall(PostInstall):
lib_dir_elems = ['code', 'bundle-ext', 'python-windows']
bin_path_prefix_elems = ['code', 'bundle-ext', 'python-windows']
bin_path_needs_python_dir = True
zato_bin_line = 1
zato_bin_command = 'zato.bat'
build_dir_to_base_depth = 4
# ################################################################################################################################
def run_impl(self) -> 'None':
self.update_paths()
# ################################################################################################################################
def get_bin_dir(self) -> 'str':
return self.python_dir_full
# ################################################################################################################################
def extract_bin_path_from_bin_line(self, bin_line:'str') -> 'str':
bin_line = bin_line.split() # type: ignore
bin_path = bin_line[0]
bin_path = bin_path.replace('"', '')
return bin_path
# ################################################################################################################################
# ################################################################################################################################
class NonWindowsPostInstall(PostInstall):
lib_dir_elems = ['code', 'lib']
bin_path_prefix_elems = ['code', 'bin']
bin_path_needs_python_dir = False
zato_bin_line = 0
zato_bin_command = 'zato'
build_dir_to_base_depth = 2
# ################################################################################################################################
def run_impl(self) -> 'None':
self.update_paths()
# ################################################################################################################################
def get_impl_base_dir(self) -> 'str':
curdir = os.path.dirname(os.path.abspath(__file__))
base_dir = os.path.join(curdir, '..')
base_dir = os.path.abspath(base_dir)
return base_dir
# ################################################################################################################################
def get_bin_dir(self) -> 'str':
bin_dir = os.path.join(self.code_dir, 'bin')
bin_dir = os.path.abspath(bin_dir)
return bin_dir
# ################################################################################################################################
def extract_bin_path_from_bin_line(self, bin_line:'str') -> 'str':
bin_path = bin_line.replace('#!', '')
return bin_path
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
class_ = WindowsPostInstall if is_windows else NonWindowsPostInstall
instance = class_()
instance.run()
# ################################################################################################################################
# ################################################################################################################################
| 20,431
|
Python
|
.py
| 361
| 48.218837
| 130
| 0.430904
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,005
|
zato_environment.py
|
zatosource_zato/code/util/zato_environment.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import glob
import logging
import os
import platform
import sys
from distutils.dir_util import copy_tree
from pathlib import Path
from subprocess import check_output, PIPE, Popen
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, strlist, strnone
# ################################################################################################################################
# ################################################################################################################################
log_format = '%(asctime)s - %(levelname)s - %(name)s:%(lineno)d - %(message)s'
logging.basicConfig(level=logging.DEBUG, format=log_format)
logger = logging.getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
platform_system = platform.system().lower()
is_windows = 'windows' in platform_system
is_linux = 'linux' in platform_system # noqa: E272
# ################################################################################################################################
# ################################################################################################################################
pip_deps_windows = 'setuptools==57.4.0 wheel'
pip_deps_non_windows = 'setuptools==57.4.0 wheel pip'
pip_deps = pip_deps_windows if is_windows else pip_deps_non_windows
# ################################################################################################################################
# ################################################################################################################################
zato_command_template_linux = r"""
#!{bin_dir}/python
# To prevent an attribute error in pyreadline\py3k_compat.py
# AttributeError: module 'collections' has no attribute 'Callable'
try:
import collections
collections.Callable = collections.abc.Callable
except AttributeError:
pass
# Zato
from zato.cli.zato_command import main
if __name__ == '__main__':
# stdlib
import re
import sys
# This is needed by SUSE
sys.path.append(r'{base_dir}/lib64/python3.6/site-packages/')
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
""".strip() # noqa: W605
zato_command_template_windows = r"""
@echo off
"{bundled_python_dir}\python.exe" "\\?\{code_dir}\\zato-cli\\src\\zato\\cli\\_run_zato.py" %*
""".strip() # noqa: W605
# ################################################################################################################################
# ################################################################################################################################
class EnvironmentManager:
def __init__(self, base_dir:'str', bin_dir:'str') -> 'None':
self.base_dir = base_dir
self.bin_dir = bin_dir
self.pip_options = ''
self.eggs_dir = 'invalid-self.eggs_dir'
self.bundle_ext_dir = 'invalid-bundle_ext_dir'
self.site_packages_dir = 'invalid-site_packages_dir'
self.pip_pyz_path = 'invalid-pip_pyz_path'
self.python_command = 'invalid-python_command'
self.pip_command = 'invalid-pip_command'
self.bundled_python_dir = 'invalid-bundled_python_dir'
self.zato_reqs_path = 'invalid-zato_reqs_path'
self.code_dir = 'invalid-code_dir'
self._set_up_pip_flags()
self._set_up_dir_and_attr_names()
# ################################################################################################################################
def _get_linux_distro_name(self) -> 'str':
# Short-cut for non-Linux systems
if not is_linux:
return ''
# If we are here, it means that we are under a Linux distribution and we assume
# that the file exists per https://www.freedesktop.org/software/systemd/man/os-release.html
# By default, we do not have it
distro_name = ''
data = open('/etc/os-release').read()
data = data.splitlines()
for line in data:
if line.startswith('PRETTY_NAME'):
line = line.split('=')
distro_name = line[1]
distro_name = distro_name.replace('"', '')
distro_name = distro_name.lower()
break
logger.info('Linux distribution found -> `%s`', distro_name)
return distro_name
# ################################################################################################################################
def _set_up_pip_flags(self) -> 'None':
#
# Under RHEL, pip install may not have the '--no-warn-script-location' flag.
# At the same time, under RHEL, we need to use --no-cache-dir.
#
linux_distro = self._get_linux_distro_name()
is_rhel = 'red hat' in linux_distro or 'centos' in linux_distro
# Explicitly ignore the non-existing option and add a different one..
if is_rhel:
self.pip_options = '--no-cache-dir'
# .. or make use of it.
else:
self.pip_options = '--no-warn-script-location'
# ################################################################################################################################
def _set_up_dir_and_attr_names(self) -> 'None':
# This needs to be checked in runtime because we do not know
# under what Python version we are are going to run.
py_version = '{}.{}'.format(sys.version_info.major, sys.version_info.minor)
logger.info('Python version maj.min -> %s', py_version)
logger.info('Python self.base_dir -> %s', self.base_dir)
self.bundle_ext_dir = os.path.join(self.base_dir, '..')
self.bundle_ext_dir = os.path.abspath(self.bundle_ext_dir)
logger.info('Bundle ext. dir -> %s', self.bundle_ext_dir)
# This will exist only under Windows
if is_windows:
# Dynamically check what our current embedded Python's directory is ..
self.bundled_python_dir = self.get_bundled_python_version(self.bundle_ext_dir, 'windows')
# Under Linux, the path to site-packages contains the Python version but it does not under Windows.
# E.g. ~/src-zato/lib/python3.8/site-packages vs. C:\src-zato\lib\site-packages
if is_linux:
python_version_dir = 'python' + py_version
py_lib_dir = os.path.join('lib', python_version_dir)
else:
py_lib_dir = os.path.join(self.bundled_python_dir, 'lib')
py_lib_dir = os.path.abspath(py_lib_dir)
logger.info('Python lib dir -> %s', py_lib_dir)
self.site_packages_dir = os.path.join(py_lib_dir, 'site-packages')
self.site_packages_dir = os.path.abspath(self.site_packages_dir)
logger.info('Python site-packages dir -> %s', self.site_packages_dir)
self.eggs_dir = os.path.join(self.base_dir, 'eggs')
self.eggs_dir = os.path.abspath(self.eggs_dir)
logger.info('Python eggs dir -> %s', self.eggs_dir)
if is_windows:
# This is always in the same location
self.pip_pyz_path = os.path.join(self.bundle_ext_dir, 'pip', 'pip.pyz')
# .. and build the full Python command now.
self.python_command = os.path.join(self.bundled_python_dir, 'python.exe')
# We are now ready to build the full pip command ..
self.pip_command = f'{self.python_command} {self.pip_pyz_path}'
# .. and the install prefix as well.
self.pip_install_prefix = f'--prefix {self.bundled_python_dir}'
# Where we keep our own requirements
self.zato_reqs_path = os.path.join(self.base_dir, '..', '..', 'requirements.txt')
self.zato_reqs_path = os.path.abspath(self.zato_reqs_path)
# Where the zato-* packages are (the "code" directory)
self.code_dir = os.path.join(self.bundle_ext_dir, '..')
self.code_dir = os.path.abspath(self.code_dir)
else:
# These are always in the same location
self.pip_command = os.path.join(self.bin_dir, 'pip')
self.python_command = os.path.join(self.bin_dir, 'python')
self.code_dir = self.base_dir
self.zato_reqs_path = os.path.join(self.base_dir, 'requirements.txt')
# This is not used under Linux
self.pip_install_prefix = ''
# ################################################################################################################################
def get_bundled_python_version(self, bundle_ext_dir:'str', os_type:'str') -> 'str':
python_parent_dir = f'python-{os_type}'
python_parent_dir = os.path.join(bundle_ext_dir, python_parent_dir)
# We want to ignore any names other than ones matching this pattern
pattern = os.path.join(python_parent_dir, 'python-*')
results = []
for item in glob.glob(pattern):
results.append(item)
if not results:
raise Exception(f'No bundled Python version found matching pattern: `{pattern}`')
if len(results) > 1:
raise Exception(f'Too many results found matching pattern: `{pattern}` -> `{results}`')
# If we are here, it means that we have exactly one result that we can return to our caller
result = results[0]
return result
# ################################################################################################################################
def _create_symlink(self, from_:'str', to:'str') -> 'None':
try:
os.symlink(from_, to)
except FileExistsError:
# It is not an issue if it exists, likely install.sh/.bat ran twice.
pass
else:
logger.info('Symlinked from `%s` to `%s`', from_, to)
# ################################################################################################################################
def _create_executable(self, path:'str', data:'str') -> 'None':
f = open(path, 'w')
_ = f.write(data)
f.close()
logger.info('Created file `%s`', path)
# .. and make it executable.
os.chmod(path, 0o740)
logger.info('Made file executable `%s`', path)
# ################################################################################################################################
def run_command(
self,
command:'str',
exit_on_error:'bool'=True,
needs_stdout:'bool'=False,
needs_stderr:'bool'=False,
log_stderr:'bool'=True,
use_check_output:'bool'=False
) -> 'str | None':
logger.info('Running `%s`', command)
# Turn what is possibly a multi-line command into a list of arguments ..
command = command.strip()
command_split = command.split()
func = self._run_check_output if use_check_output else self._run_popen
return func(command_split, exit_on_error, needs_stdout, needs_stderr, log_stderr)
# ################################################################################################################################
def _run_check_output(
self,
command:'strlist',
exit_on_error:'bool'=True,
needs_stdout:'bool'=False,
needs_stderr:'bool'=False,
log_stderr:'bool'=True
) -> 'any_':
# This will be potentially returned to our caller
stdout = b''
# Run the command ..
try:
stdout = check_output(command) # type: bytes
except Exception as e:
stderr = e.args
if log_stderr:
logger.warning(stderr)
if exit_on_error:
sys.exit(1)
else:
if needs_stderr:
return stderr
else:
if needs_stdout:
return stdout.decode('utf8')
# ################################################################################################################################
def _run_popen(
self,
command:'str | strlist',
exit_on_error:'bool'=True,
needs_stdout:'bool'=False,
needs_stderr:'bool'=False,
log_stderr:'bool'=True
) -> 'strnone':
# This will be potentially returned to our caller
stdout = None
# Run the command ..
process = Popen(command, stderr=PIPE, stdout=PIPE if needs_stdout else None)
# .. and wait until it completes.
while True:
stderr = process.stderr.readline() # type: ignore
if needs_stdout:
stdout = process.stdout.readline() # type: ignore
stdout = stdout.strip()
stdout = stdout.decode('utf8')
if stderr:
stderr = stderr.strip()
stderr = stderr.decode('utf8')
if log_stderr:
logger.warning(stderr)
if exit_on_error:
process.kill()
sys.exit(1)
else:
if needs_stderr:
return stderr
if process.poll() is not None:
break
if needs_stdout:
return stdout
# ################################################################################################################################
def pip_install_core_pip(self) -> 'None':
# Set up the command ..
command = '{pip_command} install {pip_install_prefix} {pip_options} -U {pip_deps}'.format(**{
'pip_command': self.pip_command,
'pip_install_prefix': self.pip_install_prefix,
'pip_options': self.pip_options,
'pip_deps': pip_deps,
})
# .. and run it.
_ = self.run_command(command, exit_on_error=True)
# ################################################################################################################################
def pip_install_requirements_by_path(self, reqs_path:'str', exit_on_error:'bool'=False) -> 'None':
if not os.path.exists(reqs_path):
logger.info('Skipped user-defined requirements.txt. No such path `%s`.', reqs_path)
return
# Set up the command ..
command = """
{pip_command}
-v
install
{pip_install_prefix}
{pip_options}
-r {reqs_path}
""".format(**{
'pip_command': self.pip_command,
'pip_install_prefix': self.pip_install_prefix,
'pip_options': self.pip_options,
'reqs_path': reqs_path
})
# .. and run it.
_ = self.run_command(command, exit_on_error=exit_on_error)
# ################################################################################################################################
def pip_install_zato_requirements(self) -> 'None':
# Install our own requirements
self.pip_install_requirements_by_path(self.zato_reqs_path, exit_on_error=False)
# ################################################################################################################################
def run_pip_install_zato_packages(self, packages:'strlist') -> 'None':
# All the -e arguments that pip will receive
pip_args = []
# Build the arguments
for name in packages:
package_path = os.path.join(self.code_dir, name)
arg = '-e {}'.format(package_path)
pip_args.append(arg)
# Build the command ..
command = '{pip_command} install {pip_install_prefix} --no-warn-script-location {pip_args}'.format(**{
'pip_command': self.pip_command,
'pip_install_prefix': self.pip_install_prefix,
'pip_args': ' '.join(pip_args)
})
# .. and run it.
_ = self.run_command(command, exit_on_error=False)
# ################################################################################################################################
def pip_install_standalone_requirements(self) -> 'None':
# These cannot be installed via requirements.txt
packages = [
'cython==0.29.32',
'numpy==1.22.3',
'pyOpenSSL==23.0.0',
'zato-ext-bunch==1.2'
]
# This needs to be installed here rather than via requirements.txt
if not is_windows:
packages.append('posix-ipc==1.0.0')
for package in packages:
# Set up the command ..
command = '{pip_command} install {pip_install_prefix} --no-warn-script-location {package}'.format(**{
'pip_command': self.pip_command,
'pip_install_prefix': self.pip_install_prefix,
'package': package,
})
# .. and run it.
_ = self.run_command(command, exit_on_error=True)
# ################################################################################################################################
def pip_install_zato_packages(self) -> 'None':
# Note that zato-common must come first.
packages = [
'zato-common',
'zato-agent',
'zato-broker',
'zato-cli',
'zato-client',
'zato-cy',
'zato-distlock',
'zato-hl7',
'zato-lib',
'zato-scheduler',
'zato-server',
'zato-web-admin',
'zato-zmq',
'zato-sso',
'zato-testing',
]
self.run_pip_install_zato_packages(packages)
# ################################################################################################################################
def pip_uninstall(self) -> 'None':
# Packages that will be uninstalled, e.g. no longer needed
packages = [
'imbox',
'pycrypto',
'python-keyczar',
]
# Build the command ..
command = '{} uninstall -y -qq {}'.format(self.pip_command, ' '.join(packages))
# .. and run it.
_ = self.run_command(command, exit_on_error=False)
# ################################################################################################################################
def pip_install(self) -> 'None':
self.pip_install_core_pip()
self.pip_install_standalone_requirements()
self.pip_install_zato_requirements()
self.pip_install_zato_packages()
self.pip_uninstall()
# ################################################################################################################################
def update_git_revision(self) -> 'None':
# This is where we will store our last git commit ID
revision_file_path = os.path.join(self.base_dir, 'release-info', 'revision.txt')
# Make sure the underlying git command runs in our git repository ..
os.chdir(self.base_dir)
# Build the command ..
command = 'git log -n 1 --pretty=format:%H --no-color'
# .. run the command to get our latest commit ID ..
commit_id = self.run_command(command, needs_stdout=True, use_check_output=True)
# .. and store it in an external file for 'zato --version' and other tools to use.
f = open(revision_file_path, 'w')
f.write(commit_id) # type: ignore
f.close()
logger.info('Git commit ID -> `%s`', commit_id)
# ################################################################################################################################
def add_eggs_symlink(self) -> 'None':
if not is_windows:
self._create_symlink(self.site_packages_dir, self.eggs_dir)
# ################################################################################################################################
def add_extlib_to_sys_path(self, extlib_dir:'Path') -> 'None':
# This file contains entries that, in runtime, will be found in sys.path
easy_install_path = os.path.join(self.site_packages_dir, 'easy-install.pth')
# .. add the path to easy_install ..
f = open(easy_install_path, 'a')
_ = f.write(extlib_dir.as_posix())
_ = f.write(os.linesep)
f.close()
# ################################################################################################################################
def add_extlib(self) -> 'None':
# This is where external depdendencies can be kept
extlib_dir_path = os.path.join(self.base_dir, 'extlib')
# For backward compatibility, this will point to extlib
extra_paths_dir = os.path.join(self.base_dir, 'zato_extra_paths')
# Build a Path object ..
extlib_dir = Path(extlib_dir_path)
# .. create the underlying directory ..
extlib_dir.mkdir(exist_ok=True)
# .. add the path to easy_install ..
self.add_extlib_to_sys_path(extlib_dir)
# .. and symlink it for backward compatibility.
if not is_windows:
self._create_symlink(extlib_dir_path, extra_paths_dir)
# ################################################################################################################################
def add_py_command(self) -> 'None':
# This is where will will save it
command_name = 'py.bat' if is_windows else 'py'
py_command_path = os.path.join(self.bin_dir, command_name)
# There will be two versions, one for Windows and one for other systems
#
# Windows
#
if is_windows:
template = ''
template += '"{}" %*'
# Non-Windows
else:
template = ''
template += '#!/bin/sh'
template += '\n'
template += '"{}" "$@"'
# Add the full path to the OS-specific template ..
data = template.format(self.python_command)
# .. and add the file to the system.
self._create_executable(py_command_path, data)
# ################################################################################################################################
def add_zato_command(self) -> 'None':
# Differentiate between Windows and other systems as the extension is needed under the former
command_name = 'zato.bat' if is_windows else 'zato'
if is_windows:
command_name = 'zato.bat'
template = zato_command_template_windows
template_kwargs = {
'code_dir': self.code_dir,
'bundled_python_dir': self.bundled_python_dir,
}
else:
command_name = 'zato'
template = zato_command_template_linux
template_kwargs = {
'base_dir': self.base_dir,
'bin_dir': self.bin_dir,
}
# This is where the command file will be created
command_path = os.path.join(self.bin_dir, command_name)
# Build the full contents of the command file ..
data = template.format(**template_kwargs)
# .. and add the file to the file system.
self._create_executable(command_path, data)
# ################################################################################################################################
def copy_patches(self) -> 'None':
# Where our patches can be found
patches_dir = os.path.join(self.code_dir, 'patches')
# Where to copy them to
dest_dir = self.site_packages_dir
logger.info('Copying patches from %s -> %s', patches_dir, dest_dir)
# Recursively copy all the patches, overwriting any files found
_ = copy_tree(patches_dir, dest_dir, preserve_symlinks=True, verbose=1)
logger.info('Copied patches from %s -> %s', patches_dir, dest_dir)
# ################################################################################################################################
def install(self) -> 'None':
# self.update_git_revision()
self.pip_install()
self.add_eggs_symlink()
self.add_extlib()
self.add_py_command()
self.add_zato_command()
self.copy_patches()
# ################################################################################################################################
def update(self) -> 'None':
self.update_git_revision()
self.pip_install()
self.copy_patches()
# ################################################################################################################################
def runtime_setup_with_env_variables(self) -> 'None':
# In this step, we need to look up any possible custom pip requirements
# that we already know that are defined through environment variables.
python_reqs = os.environ.get('ZATO_PYTHON_REQS', '')
# OK, we have some requirements files to install packages from ..
if python_reqs:
# .. support multiple files on input ..
python_reqs = python_reqs.split(':')
# .. and install them now.
for path in python_reqs:
self.pip_install_requirements_by_path(path)
# This step is similar but instead of installing dependencies from pip requirements,
# we add to sys.path entire directories where runtime user code can be found.
extlib_dir = os.environ.get('ZATO_EXTLIB_DIR', '')
# OK, we have some requirements files to install packages from ..
if extlib_dir:
# .. support multiple files on input ..
extlib_dir = extlib_dir.split(':')
# .. and install them now.
for path in extlib_dir:
self.add_extlib_to_sys_path(Path(path))
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
bin_dir = os.path.dirname(sys.executable)
base_dir = os.path.join(bin_dir, '..')
base_dir = os.path.abspath(base_dir)
command = sys.argv[1]
env_manager = EnvironmentManager(base_dir, bin_dir)
func = getattr(env_manager, command)
func()
# ################################################################################################################################
# ################################################################################################################################
| 27,421
|
Python
|
.py
| 535
| 41.985047
| 130
| 0.465102
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,006
|
setup.py
|
zatosource_zato/code/zato-server/setup.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# flake8: noqa
from setuptools import setup, find_packages
version = '3.2'
setup(
name = 'zato-server',
version = version,
author = 'Zato Source s.r.o.',
author_email = 'info@zato.io',
url = 'https://zato.io',
package_dir = {'':'src'},
packages = find_packages('src'),
namespace_packages = ['zato'],
zip_safe = False,
)
| 535
|
Python
|
.py
| 19
| 23.842105
| 64
| 0.617357
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,007
|
store.py
|
zatosource_zato/code/zato-server/store.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import weakref
from copy import deepcopy
from logging import getLogger
from traceback import format_exc
# Bunch
from bunch import Bunch
# Zato
from zato.common.api import SECRET_SHADOW
from zato.common.exception import Inactive
logger = getLogger(__name__)
class BaseAPI:
""" A base class for connection/query APIs.
"""
def __init__(self, conn_store):
self._conn_store = conn_store
def get(self, name, skip_inactive=False):
item = self._conn_store.get(name)
if not item:
msg = 'No such item `{}` in `{}`'.format(name, sorted(self._conn_store.items))
logger.warning(msg)
raise KeyError(msg)
if not item.config.is_active and not skip_inactive:
msg = '`{}` is inactive'.format(name)
logger.warning(msg)
raise Inactive(msg)
return item
def __getitem__(self, name):
return self.get(name, False)
def create(self, name, msg, *args, **extra):
return self._conn_store.create(name, msg, **extra)
def edit(self, name, msg, **extra):
return self._conn_store.edit(name, msg, **extra)
def delete(self, name):
return self._conn_store.delete(name)
def change_password(self, config):
return self._conn_store.change_password(config)
# ################################################################################################################################
class BaseStore:
""" A base class for connection/query stores.
"""
def __init__(self):
self.items = {}
# gevent
from gevent.lock import RLock
self.lock = RLock()
def __getitem__(self, name):
return self.items[name]
def get(self, name):
return self.items.get(name)
def _create(self, name, config, **extra):
""" Actually adds a new definition, must be called with self.lock held.
"""
config_no_sensitive = deepcopy(config)
if 'password' in config:
config_no_sensitive['password'] = SECRET_SHADOW
item = Bunch(config=config, config_no_sensitive=config_no_sensitive, is_created=False, impl=None)
# It's optional
conn = extra.get('def_', {'conn':None})['conn']
try:
logger.debug('Creating `%s`', config_no_sensitive)
impl = self.create_impl(config, config_no_sensitive, **extra)
def execute(session, statement):
def execute_impl(**kwargs):
if not session:
raise Exception('Cannot execute the query without a session')
return session.execute(statement, kwargs)
return execute_impl
item.execute = execute(conn, impl)
logger.debug('Created `%s`', config_no_sensitive)
except Exception:
logger.warning('Could not create `%s`, config:`%s`, e:`%s`', name, config_no_sensitive, format_exc())
else:
item.impl = impl
item.is_created = True
if conn:
item.extra = weakref.proxy(conn)
else:
item.conn = item.impl
self.items[name] = item
return item
def create(self, name, config, **extra):
""" Adds a new connection definition.
"""
with self.lock:
return self._create(name, config, **extra)
def _delete(self, name):
""" Actually deletes a definition. Must be called with self.lock held.
"""
try:
if not name in self.items:
raise Exception('No such name `{}` among `{}`'.format(name, self.items.keys()))
self.delete_impl()
except Exception:
logger.warning('Error while deleting `%s`, e:`%s`', name, format_exc())
finally:
if name in self.items:
del self.items[name]
def delete(self, name):
""" Deletes an existing connection.
"""
with self.lock:
self._delete(name)
def _edit(self, name, config, **extra):
self._delete(name)
return self._create(config.name, config, **extra)
def edit(self, name, config, **extra):
with self.lock:
self._edit(name, config, **extra)
def change_password(self, password_data):
with self.lock:
# This may not exist if change-password is invoked from enmasse before create finished
item = self.items.get(password_data.name)
if item:
new_config = deepcopy(item.config_no_sensitive)
new_config.password = password_data.password
self.edit(password_data.name, new_config)
def create_impl(self):
raise NotImplementedError('Should be overridden by subclasses (BaseStore.create_impl)')
def delete_impl(self):
pass # It's OK - sometimes deleting a connection doesn't have to mean doing anything unusual
| 5,220
|
Python
|
.py
| 127
| 32.015748
| 130
| 0.589109
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,008
|
__init__.py
|
zatosource_zato/code/zato-server/test/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 148
|
Python
|
.py
| 5
| 28.2
| 64
| 0.687943
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,009
|
test_sio_eval.py
|
zatosource_zato/code/zato-server/test/zato/test_sio_eval.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from copy import deepcopy
from unittest import main
# Zato
from zato.common.test import BaseSIOTestCase
from zato.common.test.apispec_ import CyMyService
# Zato - Cython
from zato.simpleio import CySimpleIO
# ################################################################################################################################
# ################################################################################################################################
class SIOEvalTestCase(BaseSIOTestCase):
def test_eval_bool(self):
MyClass = deepcopy(CyMyService)
CySimpleIO.attach_sio(None, self.get_server_config(), MyClass)
sio = MyClass._sio # type: CySimpleIO
elem_name = 'is_abc'
encrypt_func = None
value = ''
result = sio.eval_(elem_name, value, encrypt_func)
self.assertIsInstance(result, bool)
self.assertFalse(result)
value = None
result = sio.eval_(elem_name, value, encrypt_func)
self.assertIsInstance(result, bool)
self.assertFalse(result)
value = 't'
result = sio.eval_(elem_name, value, encrypt_func)
self.assertIsInstance(result, bool)
self.assertTrue(result)
value = 'true'
result = sio.eval_(elem_name, value, encrypt_func)
self.assertIsInstance(result, bool)
self.assertTrue(result)
value = 'on'
result = sio.eval_(elem_name, value, encrypt_func)
self.assertIsInstance(result, bool)
self.assertTrue(result)
def test_eval_int(self):
MyClass = deepcopy(CyMyService)
CySimpleIO.attach_sio(None, self.get_server_config(), MyClass)
sio = MyClass._sio # type: CySimpleIO
elem_name = 'user_id'
encrypt_func = None
value = ''
result = sio.eval_(elem_name, value, encrypt_func)
self.assertIsNone(result)
value = None
result = sio.eval_(elem_name, value, encrypt_func)
self.assertIsNone(result)
value = '111'
result = sio.eval_(elem_name, value, encrypt_func)
self.assertIsInstance(result, int)
self.assertEqual(result, 111)
value = 222
result = sio.eval_(elem_name, value, encrypt_func)
self.assertIsInstance(result, int)
self.assertEqual(result, 222)
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
| 2,947
|
Python
|
.py
| 67
| 37.179104
| 130
| 0.499475
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,010
|
__init__.py
|
zatosource_zato/code/zato-server/test/zato/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,011
|
test_all.py
|
zatosource_zato/code/zato-server/test/zato/pattern/test_all.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from unittest import main, TestCase
# Faker
from faker import Faker
# gevent
from gevent import sleep
from gevent.lock import RLock
# Zato
from zato.common import CHANNEL
from zato.common.ext.dataclasses import dataclass
from zato.common.util import spawn_greenlet
from zato.server.pattern.api import ParallelExec
from zato.server.pattern.base import ParallelBase
from zato.server.pattern.model import ParallelCtx
from zato.server.service import PatternsFacade
# ################################################################################################################################
# ################################################################################################################################
# Random bytes to seed our data with
seed = 'FVD2nbPOVIXZ6'
Faker.seed(seed)
fake = Faker()
# ################################################################################################################################
# ################################################################################################################################
_pattern_call_channels=(CHANNEL.FANOUT_CALL, CHANNEL.PARALLEL_EXEC_CALL)
_fanout_call = CHANNEL.FANOUT_CALL
# ################################################################################################################################
# ################################################################################################################################
class FakeService:
def __init__(self, cache, lock, response_payload):
# type: (dict, RLock, str) -> None
self.cid = None # type: int
self.name = None # type: str
self.cache = cache
self.lock = lock
self.patterns = PatternsFacade(self, self.cache, self.lock)
self.response_payload = response_payload
self.response_exception = None
def invoke_async(self, target_name, payload, channel, cid):
invoked_service = FakeService(self.cache, self.lock, payload)
invoked_service.name = target_name
invoked_service.cid = cid
# If we are invoked via patterns, let the callbacks run ..
if channel in _pattern_call_channels:
# .. find the correct callback function first ..
if channel == _fanout_call:
func = self.patterns.fanout.on_call_finished
else:
func = self.patterns.parallel.on_call_finished
# .. and run the function in a new greenlet.
spawn_greenlet(func, invoked_service, self.response_payload, self.response_exception)
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class ParamsCtx:
cid: object
source_service: FakeService
source_name: str
service_name1: str
service_name2: str
service_input1: str
service_input2: str
on_final1: str
on_final2: str
on_target1: str
on_target2: str
targets: list
on_final_list: list
on_target_list: list
# ################################################################################################################################
# ################################################################################################################################
class BaseTestCase(TestCase):
def get_default_params(self, cache, lock, response_payload=None):
# type: (dict, RLock, str) -> ParamsCtx
cid = fake.pyint()
source_name = 'source.name.1'
source_service = FakeService(cache, lock, response_payload)
source_service.cid = cid
source_service.name = source_name
service_name1 = 'my.service.1'
service_name2 = 'my.service.2'
service_input1 = {'my.input': '1'}
service_input2 = {'my.input': '2'}
on_final1 = 'on.final.1'
on_final2 = 'on.final.2'
on_target1 = 'on.target.1'
on_target2 = 'on.target.2'
targets = {
service_name1: service_input1,
service_name2: service_input2,
}
on_final_list = [
on_final1,
on_final2
]
on_target_list = [
on_target1,
on_target2
]
ctx = ParamsCtx()
ctx.cid = cid
ctx.source_service = source_service
ctx.source_name = source_name
ctx.service_name1 = service_name1
ctx.service_name2 = service_name2
ctx.service_input1 = service_input1
ctx.service_input2 = service_input2
ctx.on_final1 = on_final1
ctx.on_final2 = on_final2
ctx.on_target1 = on_target1
ctx.on_target2 = on_target2
ctx.targets = targets
ctx.on_final_list = on_final_list
ctx.on_target_list = on_target_list
return ctx
# ################################################################################################################################
# ################################################################################################################################
class PatternBaseTestCase(BaseTestCase):
def test_base_parallel_invoke_params_no_cid(self):
cache = {}
lock = RLock()
params_ctx = self.get_default_params(cache, lock)
def fake_invoke(ctx):
# type: (ParallelCtx) -> None
self.assertEqual(ctx.cid, params_ctx.cid)
self.assertEqual(ctx.source_name, params_ctx.source_name)
self.assertListEqual(ctx.on_final_list, params_ctx.on_final_list)
self.assertListEqual(ctx.on_target_list, params_ctx.on_target_list)
self.assertEqual(ctx.target_list[0].name, params_ctx.service_name1)
self.assertDictEqual(ctx.target_list[0].payload, params_ctx.service_input1)
self.assertEqual(ctx.target_list[1].name, params_ctx.service_name2)
self.assertDictEqual(ctx.target_list[1].payload, params_ctx.service_input2)
api = ParallelBase(params_ctx.source_service, cache, lock)
api._invoke = fake_invoke
api.invoke(params_ctx.targets, params_ctx.on_final_list, params_ctx.on_target_list)
# ################################################################################################################################
def test_base_parallel_invoke_params_with_cid(self):
cache = {}
lock = RLock()
params_ctx = self.get_default_params(cache, lock)
custom_cid = fake.pystr()
def fake_invoke(ctx):
# type: (ParallelCtx) -> None
self.assertEqual(ctx.cid, custom_cid)
self.assertEqual(ctx.source_name, params_ctx.source_name)
self.assertListEqual(ctx.on_final_list, params_ctx.on_final_list)
self.assertListEqual(ctx.on_target_list, params_ctx.on_target_list)
self.assertEqual(ctx.target_list[0].name, params_ctx.service_name1)
self.assertDictEqual(ctx.target_list[0].payload, params_ctx.service_input1)
self.assertEqual(ctx.target_list[1].name, params_ctx.service_name2)
self.assertDictEqual(ctx.target_list[1].payload, params_ctx.service_input2)
api = ParallelBase(params_ctx.source_service, cache, lock)
api._invoke = fake_invoke
api.invoke(params_ctx.targets, params_ctx.on_final_list, params_ctx.on_target_list, custom_cid)
# ################################################################################################################################
def test_base_parallel_invoke_params_single_elements(self):
cache = {}
lock = RLock()
params_ctx = self.get_default_params(cache, lock)
custom_on_final = fake.pystr()
custom_on_target = fake.pystr()
def fake_invoke(ctx):
# type: (ParallelCtx) -> None
self.assertEqual(ctx.cid, params_ctx.cid)
self.assertEqual(ctx.source_name, params_ctx.source_name)
self.assertListEqual(ctx.on_final_list, [custom_on_final])
self.assertListEqual(ctx.on_target_list, [custom_on_target])
self.assertEqual(ctx.target_list[0].name, params_ctx.service_name1)
self.assertDictEqual(ctx.target_list[0].payload, params_ctx.service_input1)
self.assertEqual(ctx.target_list[1].name, params_ctx.service_name2)
self.assertDictEqual(ctx.target_list[1].payload, params_ctx.service_input2)
api = ParallelBase(params_ctx.source_service, cache, lock)
api._invoke = fake_invoke
api.invoke(params_ctx.targets, custom_on_final, custom_on_target)
# ################################################################################################################################
def test_base_parallel_invoke_params_final_is_none(self):
cache = {}
lock = RLock()
params_ctx = self.get_default_params(cache, lock)
custom_on_final = None
custom_on_target = fake.pystr()
def fake_invoke(ctx):
# type: (ParallelCtx) -> None
self.assertEqual(ctx.cid, params_ctx.cid)
self.assertEqual(ctx.source_name, params_ctx.source_name)
self.assertIsNone(ctx.on_final_list)
self.assertListEqual(ctx.on_target_list, [custom_on_target])
self.assertEqual(ctx.target_list[0].name, params_ctx.service_name1)
self.assertDictEqual(ctx.target_list[0].payload, params_ctx.service_input1)
self.assertEqual(ctx.target_list[1].name, params_ctx.service_name2)
self.assertDictEqual(ctx.target_list[1].payload, params_ctx.service_input2)
api = ParallelBase(params_ctx.source_service, cache, lock)
api._invoke = fake_invoke
api.invoke(params_ctx.targets, custom_on_final, custom_on_target)
# ################################################################################################################################
def test_base_parallel_invoke_params_target_is_none(self):
cache = {}
lock = RLock()
params_ctx = self.get_default_params(cache, lock)
custom_on_final = fake.pystr()
custom_on_target = None
def fake_invoke(ctx):
# type: (ParallelCtx) -> None
self.assertEqual(ctx.cid, params_ctx.cid)
self.assertEqual(ctx.source_name, params_ctx.source_name)
self.assertListEqual(ctx.on_final_list, [custom_on_final])
self.assertIsNone(ctx.on_target_list)
self.assertEqual(ctx.target_list[0].name, params_ctx.service_name1)
self.assertDictEqual(ctx.target_list[0].payload, params_ctx.service_input1)
self.assertEqual(ctx.target_list[1].name, params_ctx.service_name2)
self.assertDictEqual(ctx.target_list[1].payload, params_ctx.service_input2)
api = ParallelBase(params_ctx.source_service, cache, lock)
api._invoke = fake_invoke
api.invoke(params_ctx.targets, custom_on_final, custom_on_target)
# ################################################################################################################################
# ################################################################################################################################
class ParallelExecTestCase(BaseTestCase):
def test_parallel_exec(self):
cache = {}
lock = RLock()
response_payload = 'my.payload'
params_ctx = self.get_default_params(cache, lock, response_payload)
params_ctx.on_final_list = []
api = ParallelExec(params_ctx.source_service, cache, lock)
api.invoke(params_ctx.targets, params_ctx.on_target_list)
# Give the test enough time to run
sleep(0.01)
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
| 12,538
|
Python
|
.py
| 236
| 44.95339
| 130
| 0.507494
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,012
|
test_google.py
|
zatosource_zato/code/zato-server/test/zato/connection/test_google.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from json import loads
from unittest import main, TestCase
# Zato
from zato.common.util.open_ import open_r
from zato.server.connection.google import GoogleClient
# ################################################################################################################################
# ################################################################################################################################
class ModuleCtx:
Env_Key_Service_File_Path = 'Zato_Test_Google_Service_File_Path'
Env_Key_User = 'Zato_Test_Google_User'
# ################################################################################################################################
# ################################################################################################################################
class GoogleClientTestCase(TestCase):
def test_connect(self):
# Try to get the main environment variable ..
service_file_path = os.environ.get(ModuleCtx.Env_Key_Service_File_Path)
# .. and if it does not exist, do not run the test
if not service_file_path:
return
else:
with open_r(service_file_path) as f:
service_file_path = f.read()
service_file_path = loads(service_file_path)
# Get the rest of the configuration
user = os.environ.get(ModuleCtx.Env_Key_User) or 'Missing_Env_Key_User'
# These are constant
api_name = 'drive'
api_version = 'v3'
scopes = ['https://www.googleapis.com/auth/drive']
client = GoogleClient(api_name, api_version, user, scopes, service_file_path)
client.connect()
# Use the Google Drive API ..
files_api = client.conn.files()
# .. and list all of the remote files available ..
listing = files_api.list().execute()
# .. if we are here, it means that the credentials were valid,
# .. and even if we do not know what exactly is returned,
# .. we still do know that it is a dict object.
self.assertIsInstance(listing, dict)
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
# ################################################################################################################################
| 2,851
|
Python
|
.py
| 52
| 48.826923
| 130
| 0.405542
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,013
|
test_email_imap.py
|
zatosource_zato/code/zato-server/test/zato/connection/test_email_imap.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from operator import itemgetter
from unittest import main, TestCase
# Bunch
from bunch import bunchify
# Zato
from zato.common.api import EMAIL
from zato.server.connection.email import GenericIMAPConnection, Microsoft365IMAPConnection
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.api import IMAPMessage
from zato.common.typing_ import anylist
# ################################################################################################################################
# ################################################################################################################################
class ModuleCtx:
Env_Key_IMAP_Host = 'Zato_Test_IMAP_Host'
Env_Key_IMAP_Port = 'Zato_Test_IMAP_Port'
Env_Key_IMAP_Display_Name = 'Zato_Test_IMAP_Display_Name'
Env_Key_IMAP_Email = 'Zato_Test_IMAP_Email'
Env_Key_IMAP_Password = 'Zato_Test_IMAP_Password'
Env_Key_IMAP_Test_Subject = 'Zato_Test_IMAP_Test_Subject'
Env_Key_MS365_Tenant_ID = 'Zato_Test_IMAP_MS365_Tenant_ID'
Env_Key_MS365_Client_ID = 'Zato_Test_IMAP_MS365_Client_ID'
Env_Key_MS365_Secret = 'Zato_Test_IMAP_MS365_Secret'
Env_Key_MS365_Display_Name = 'Zato_Test_IMAP_MS365_Display_Name'
Env_Key_MS365_Email = 'Zato_Test_IMAP_MS365_Email'
Env_Key_MS365_Test_Subject = 'Zato_Test_IMAP_MS365_Test_Subject'
HTML_Body_MS365 = '<html><head>\r\n<meta http-equiv="Content-Type" content="text/html; charset=utf-8">' \
'<style type="text/css" style="display:none">\r\n<!--\r\np\r\n\t{margin-top:0;\r\n\tmargin-bottom:0}\r\n-->' \
'\r\n</style></head><body dir="ltr"><div class="elementToProof" style="font-family:Calibri,Arial,Helvetica,' \
'sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)"><div style="color:#000000' \
'; background-color:#ffffff; font-family:\'Ubuntu Mono\',\'Droid Sans Mono\',\'monospace\',monospace,\'Droid Sans ' \
'Fallback\',\'Droid Sans Mono\',\'monospace\',monospace,\'Droid Sans Fallback\'; font-weight:normal; ' \
'font-size:17px; line-height:20px"><span><span class="ContentPasted0" style="color:#007f00">This is a test message.' \
'</span></span></div></div></body></html>'
# ################################################################################################################################
# ################################################################################################################################
class ExpectedGetData:
msg_id: 'str'
imap_message: 'IMAPMessage'
body_plain: 'anylist'
body_html: 'anylist'
name: 'str'
username: 'str'
subject: 'str'
sent_from_display_name: 'str'
sent_from_email: 'str'
sent_to_display_name: 'str'
sent_to_email: 'str'
cc: 'str | None'
# ################################################################################################################################
# ################################################################################################################################
class BaseIMAPConnectionTestCase(TestCase):
maxDiff = 100_000_000
def run_get_assertions(self, expected:'ExpectedGetData') -> 'None':
self.assertTrue(len(expected.msg_id) > 1)
self.assertTrue(len(expected.imap_message.data.message_id) > 1)
self.assertEqual(expected.imap_message.data.subject, expected.subject)
#
# From
#
sent_from = expected.imap_message.data.sent_from[0]
expected_sent_from = {
'name': expected.sent_from_display_name,
'email': expected.sent_from_email,
}
self.assertDictEqual(sent_from, expected_sent_from)
#
# To
#
sent_to = expected.imap_message.data.sent_to
self.assertEqual(len(sent_to), 1)
sent_to = sent_to[0]
expected_sent_to = {
'name': expected.sent_to_display_name,
'email': expected.sent_to_email,
}
self.assertDictEqual(sent_to, expected_sent_to)
#
# CC
#
sent_to_cc = expected.imap_message.data.cc
self.assertEqual(len(sent_to_cc), 1)
sent_to_cc = sent_to_cc[0]
expected_sent_to_cc = {
'name': expected.sent_to_display_name,
'email': expected.sent_to_email,
}
self.assertDictEqual(sent_to_cc, expected_sent_to_cc)
#
# Body
#
body = expected.imap_message.data.body
expected_body = {
'plain': expected.body_plain,
'html': expected.body_html,
}
self.assertDictEqual(body, expected_body)
#
# Attachments
#
attachments = expected.imap_message.data.attachments
attachments = sorted(attachments, key=itemgetter('filename'))
# We expect for exactly two attachments to exist
self.assertEqual(len(attachments), 2)
attach1 = attachments[0]
attach2 = attachments[1]
self.assertEqual(attach1['content-type'], 'text/plain')
self.assertEqual(attach1['size'], 5)
self.assertEqual(attach1['filename'], 'file1.txt')
self.assertEqual(attach2['content-type'], 'text/plain')
self.assertEqual(attach2['size'], 5)
self.assertEqual(attach2['filename'], 'file2.txt')
attach1_content = attach1['content']
attach1_content = attach1_content.getvalue()
attach2_content = attach2['content']
attach2_content = attach2_content.getvalue()
self.assertEqual(attach1_content, b'data1')
self.assertEqual(attach2_content, b'data2')
# ################################################################################################################################
# ################################################################################################################################
class GenericIMAPConnectionTestCase(BaseIMAPConnectionTestCase):
def get_conn(self) -> 'GenericIMAPConnection | None':
# Try to get the main environment variable ..
host = os.environ.get(ModuleCtx.Env_Key_IMAP_Host)
# .. and if it does not exist, do not run the test
if not host:
return
else:
port = os.environ.get(ModuleCtx.Env_Key_IMAP_Port)
email = os.environ.get(ModuleCtx.Env_Key_IMAP_Email)
password = os.environ.get(ModuleCtx.Env_Key_IMAP_Password)
test_subject = os.environ.get(ModuleCtx.Env_Key_IMAP_Test_Subject)
config = bunchify({
'cluster_id': 1,
'id': 2,
'is_active':True,
'opaque1':None,
'name': 'My Name',
'host': host,
'port': port,
'username': email,
'password': password,
'mode': EMAIL.IMAP.MODE.SSL,
'debug_level': 2,
'get_criteria': f'SUBJECT "{test_subject}"',
'server_type': EMAIL.IMAP.ServerType.Generic,
})
conn = GenericIMAPConnection(config, config)
return conn
# ################################################################################################################################
def test_get(self):
conn = self.get_conn()
if not conn:
return
# Reusable
test_subject = os.environ.get(ModuleCtx.Env_Key_IMAP_Test_Subject) or ''
display_name = os.environ.get(ModuleCtx.Env_Key_IMAP_Display_Name) or ''
email = os.environ.get(ModuleCtx.Env_Key_IMAP_Email) or ''
# Run the function under test
result = conn.get()
# Turn it into a list upfront
result = list(result)
# We expect to find one test message
self.assertEqual(len(result), 1)
# From now on, work with this message only
result = result[0]
msg_id, imap_message = result
# Prepare the information about what we expect to receive ..
expected = ExpectedGetData()
expected.msg_id = msg_id
expected.imap_message = imap_message
expected.subject = test_subject
expected.sent_from_display_name = display_name
expected.sent_from_email = email
expected.sent_to_display_name = display_name
expected.sent_to_email = email
expected.body_plain = ['This is a test message.']
expected.body_html = []
# .. and run assertions now.
self.run_get_assertions(expected)
# ################################################################################################################################
# ################################################################################################################################
class Microsoft365IMAPConnectionTestCase(BaseIMAPConnectionTestCase):
def get_conn(self) -> 'Microsoft365IMAPConnection | None':
# Try to get the main environment variable ..
tenant_id = os.environ.get(ModuleCtx.Env_Key_MS365_Tenant_ID)
# .. and if it does not exist, do not run the test
if not tenant_id:
return
else:
client_id = os.environ.get(ModuleCtx.Env_Key_MS365_Client_ID)
secret = os.environ.get(ModuleCtx.Env_Key_MS365_Secret)
email = os.environ.get(ModuleCtx.Env_Key_MS365_Email)
config = {
'cluster_id': 1,
'id': 2,
'is_active':True,
'opaque1':None,
'name': 'My Name',
'tenant_id': tenant_id,
'client_id': client_id,
'password': secret,
'username': email,
'filter_criteria': EMAIL.DEFAULT.FILTER_CRITERIA,
'server_type': EMAIL.IMAP.ServerType.Microsoft365,
}
conn = Microsoft365IMAPConnection(config, config)
return conn
# ################################################################################################################################
def test_ping(self):
conn = self.get_conn()
if not conn:
return
# Assume we will not find the Inbox folder
found_inbox = False
# Run the function under test
result = conn.ping()
for item in result:
if item.name == 'Inbox':
found_inbox = True
break
if not found_inbox:
self.fail(f'Expected for folder Inbox to exist among {result}')
# ################################################################################################################################
def test_get(self):
conn = self.get_conn()
if not conn:
return
# Reusable
test_subject = os.environ.get(ModuleCtx.Env_Key_MS365_Test_Subject) or ''
username = os.environ.get(ModuleCtx.Env_Key_MS365_Display_Name) or ''
email = os.environ.get(ModuleCtx.Env_Key_MS365_Email) or ''
# Run the function under test
result = conn.get(filter=f"subject eq '{test_subject}'")
# Turn it into a list upfront
result = list(result)
# We expect to find one test message
self.assertEqual(len(result), 1)
# From now on, work with this message only
result = result[0]
msg_id, imap_message = result
# Prepare the information about what we expect to receive ..
expected = ExpectedGetData()
expected.msg_id = msg_id
expected.imap_message = imap_message
expected.subject = test_subject
expected.sent_from_display_name = username
expected.sent_from_email = email
expected.sent_to_display_name = username
expected.sent_to_email = email
expected.body_plain = []
expected.body_html = [ModuleCtx.HTML_Body_MS365]
# .. and run assertions now.
self.run_get_assertions(expected)
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
# ################################################################################################################################
| 12,905
|
Python
|
.py
| 268
| 39.764925
| 130
| 0.498445
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,014
|
test_facade.py
|
zatosource_zato/code/zato-server/test/zato/connection/test_facade.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Must come first
from gevent.monkey import patch_all
_ = patch_all()
# stdlib
import os
import logging
from unittest import main, TestCase
# ################################################################################################################################
# ################################################################################################################################
if 0:
pass
# ################################################################################################################################
# ################################################################################################################################
log_format = '%(asctime)s - %(levelname)s - %(name)s:%(lineno)d - %(message)s'
logging.basicConfig(level=logging.INFO, format=log_format)
# ################################################################################################################################
# ################################################################################################################################
class ModuleCtx:
Env_Key_Should_Test = 'Zato_Test_REST_Facade'
# ################################################################################################################################
# ################################################################################################################################
class RESTFacadeTestCase(TestCase):
def test_rest_calls(self):
if not os.environ.get(ModuleCtx.Env_Key_Should_Test):
return
# Zato
from zato.common.util.cli import CommandLineServiceInvoker
service = 'helpers.rest.internal.tester'
expected_stdout = b'(None)\n'
invoker = CommandLineServiceInvoker(expected_stdout)
invoker.invoke_and_test(service)
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
# ################################################################################################################################
| 2,565
|
Python
|
.py
| 42
| 57.595238
| 130
| 0.261286
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,015
|
__init__.py
|
zatosource_zato/code/zato-server/test/zato/connection/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,016
|
test_def_kafka.py
|
zatosource_zato/code/zato-server/test/zato/connection/test_def_kafka.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Must come first
from gevent.monkey import patch_all
_ = patch_all()
# stdlib
import logging
import os
from unittest import main, TestCase
# Bunch
from bunch import bunchify
# Zato
from zato.common import Kafka
from zato.common.typing_ import cast_
from zato.server.generic.api.def_kafka import DefKafkaWrapper
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from pykafka import KafkaClient
KafkaClient = KafkaClient
# ################################################################################################################################
# ################################################################################################################################
log_format = '%(asctime)s - %(levelname)s - %(name)s:%(lineno)d - %(message)s'
logging.basicConfig(level=logging.INFO, format=log_format)
# ################################################################################################################################
# ################################################################################################################################
default = Kafka.Default
timeout = default.Timeout
# ################################################################################################################################
# ################################################################################################################################
class ModuleCtx:
Env_Key_Should_Test = 'Zato_Test_Kafka'
# ################################################################################################################################
# ################################################################################################################################
class DefKafkaTestCase(TestCase):
def get_config(self, conn_name:'str') -> 'Bunch':
config = bunchify({
'name': conn_name,
'is_active': True,
'username': 'kafka_user',
'secret': 'kafka_password',
'server_list': default.Server_List,
'should_use_zookeeper': True,
'socket_timeout': timeout.Socket,
'offset_timeout': timeout.Offsets,
'should_exclude_internal_topics': True,
'source_address': None,
'broker_version': default.Broker_Version,
'is_tls_enabled': False,
})
return config
# ################################################################################################################################
def xtest_ping(self):
if not os.environ.get(ModuleCtx.Env_Key_Should_Test):
return
conn_name = 'DefKafkaTestCase.test_ping'
config = self.get_config(conn_name)
wrapper = DefKafkaWrapper(config)
wrapper.ping()
# ################################################################################################################################
def test_publish(self):
if not os.environ.get(ModuleCtx.Env_Key_Should_Test):
return
conn_name = 'DefKafkaTestCase.test_publish'
config = self.get_config(conn_name)
wrapper = DefKafkaWrapper(config)
client = cast_('KafkaClient', wrapper.client)
topic = client.topics['my.test']
"""
with topic.get_sync_producer() as producer:
for x in range(40000):
msg = f'Test message #{x}'
msg = msg.encode('utf8')
producer.produce(msg)
"""
consumer = topic.get_simple_consumer()
print(111, consumer)
# for message in consumer:
# print(111, message.offset, message.value)
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
# ################################################################################################################################
| 4,641
|
Python
|
.py
| 89
| 45.966292
| 130
| 0.339898
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,017
|
test_outconn_ldap.py
|
zatosource_zato/code/zato-server/test/zato/connection/test_outconn_ldap.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from unittest import main, TestCase
# Bunch
from bunch import bunchify
# Zato
from zato.server.generic.api.outconn_ldap import LDAPClient
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
# ################################################################################################################################
# ################################################################################################################################
class ModuleCtx:
Env_Key_Should_Test = 'Zato_Test_LDAP'
# ################################################################################################################################
# ################################################################################################################################
class OutconnLDAPTestCase(TestCase):
def get_config(self, conn_name:'str') -> 'Bunch':
config = bunchify({
'name': conn_name,
'is_active': True,
'server_list': ['localhost:1389'],
'username': 'cn=admin,dc=example,dc=org',
'secret': 'adminpassword',
'is_tls_enabled': False,
'get_info': None,
'connect_timeout': 5,
'ip_mode': 'IP_SYSTEM_DEFAULT',
'tls': None,
'sasl_mechanism': None,
'pool_name': None,
'pool_ha_strategy': 'ROUND_ROBIN',
'pool_max_cycles': None,
'pool_exhaust_timeout': None,
'auto_bind': None,
'use_auto_range': None,
'should_check_names': None,
'is_stats_enabled': None,
'is_read_only': None,
'pool_lifetime': None,
'should_return_empty_attrs': None,
'pool_keep_alive': None,
})
return config
# ################################################################################################################################
def test_ping(self):
if not os.environ.get(ModuleCtx.Env_Key_Should_Test):
return
conn_name = 'OutconnLDAPTestCase.test_ping'
config = self.get_config(conn_name)
client = LDAPClient(config)
client.ping()
# ################################################################################################################################
def test_query(self):
if not os.environ.get(ModuleCtx.Env_Key_Should_Test):
return
conn_name = 'OutconnLDAPTestCase.test_ping'
config = self.get_config(conn_name)
client = LDAPClient(config)
# Where in the directory we expect to find the user
search_base = 'dc=example, dc=org'
# Look up users up by either username or email
# search_filter = '(uid=*)'
search_filter = '(&(|(uid={user_info})(mail={user_info})))'
user_filter = search_filter.format(user_info='user01')
# We are looking up these attributes
query_attributes = ['uid', 'givenName', 'sn', 'mail']
with client.get() as conn:
has_result = conn.search(search_base, user_filter, attributes=query_attributes)
if not has_result:
self.fail('Expected for results to be available')
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
# ################################################################################################################################
| 4,216
|
Python
|
.py
| 83
| 43.036145
| 130
| 0.362817
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,018
|
test_zato_kvdb_list.py
|
zatosource_zato/code/zato-server/test/zato/kvdb/test_zato_kvdb_list.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from unittest import main, TestCase
# Zato
from zato.common.test import rand_int, rand_string
from zato.server.connection.kvdb.api import ObjectCtx, ListRepo
# ################################################################################################################################
# ################################################################################################################################
sync_threshold = 1
sync_interval = 1
# ################################################################################################################################
# ################################################################################################################################
class TransientRepositoryTestCase(TestCase):
def test_repo_init(self):
name = rand_string()
data_path = rand_string()
max_size = rand_int()
repo = ListRepo(name, data_path, max_size)
self.assertEqual(repo.name, name)
self.assertEqual(repo.data_path, data_path)
self.assertEqual(repo.max_size, max_size)
# ################################################################################################################################
def test_repo_push_max_size(self):
name = rand_string()
data_path = rand_string()
max_size = 2
repo = ListRepo(name, data_path, max_size)
# Push more object than the max size allows ..
for _x in range(max_size + 1):
repo.append(None)
# .. we have reached the maximum size but it should not be greater than that.
self.assertEqual(repo.get_size(), max_size)
# ################################################################################################################################
def test_repo_get(self):
id1 = rand_string()
id2 = rand_string()
id3 = rand_string()
ctx1 = ObjectCtx()
ctx1.id = id1
ctx2 = ObjectCtx()
ctx2.id = id2
ctx3 = ObjectCtx()
ctx3.id = id3
repo = ListRepo()
repo.append(ctx1)
repo.append(ctx2)
repo.append(ctx3)
given_ctx = repo.get(id1)
self.assertIs(given_ctx, ctx1)
# ################################################################################################################################
def test_repo_delete(self):
id1 = rand_string()
id2 = rand_string()
ctx1 = ObjectCtx()
ctx1.id = id1
ctx2 = ObjectCtx()
ctx2.id = id2
repo = ListRepo()
repo.append(ctx1)
repo.append(ctx2)
deleted_ctx = repo.delete(id1)
self.assertIs(ctx1, deleted_ctx)
try:
repo.get(id1)
except KeyError as e:
self.assertEqual(e.args[0], 'Object not found `{}`'.format(id1))
else:
self.fail('KeyError should have been raised because object has been deleted')
# ################################################################################################################################
def test_repo_clear(self):
id1 = rand_string()
id2 = rand_string()
ctx1 = ObjectCtx()
ctx1.id = id1
ctx2 = ObjectCtx()
ctx2.id = id2
repo = ListRepo()
repo.append(ctx1)
repo.append(ctx2)
repo.remove_all()
self.assertEqual(repo.get_size(), 0)
# ################################################################################################################################
def test_repo_get_list(self):
id1 = '1-' + rand_string()
id2 = '2-' + rand_string()
id3 = '3-' + rand_string()
id4 = '4-' + rand_string()
id5 = '5-' + rand_string()
id6 = '6-' + rand_string()
id7 = '7-' + rand_string()
id8 = '8-' + rand_string()
id9 = '9-' + rand_string()
id10 = '10-' + rand_string()
id11 = '11-' + rand_string()
id12 = '12-' + rand_string()
ctx1 = ObjectCtx()
ctx2 = ObjectCtx()
ctx3 = ObjectCtx()
ctx4 = ObjectCtx()
ctx5 = ObjectCtx()
ctx6 = ObjectCtx()
ctx7 = ObjectCtx()
ctx8 = ObjectCtx()
ctx9 = ObjectCtx()
ctx10 = ObjectCtx()
ctx11 = ObjectCtx()
ctx12 = ObjectCtx()
ctx1.id = id1
ctx2.id = id2
ctx3.id = id3
ctx4.id = id4
ctx5.id = id5
ctx6.id = id6
ctx7.id = id7
ctx8.id = id8
ctx9.id = id9
ctx10.id = id10
ctx11.id = id11
ctx12.id = id12
repo = ListRepo()
repo.append(ctx1)
repo.append(ctx2)
repo.append(ctx3)
repo.append(ctx4)
repo.append(ctx5)
repo.append(ctx6)
repo.append(ctx7)
repo.append(ctx8)
repo.append(ctx9)
repo.append(ctx10)
repo.append(ctx11)
repo.append(ctx12)
cur_page = 2
page_size = 3
results = repo.get_list(cur_page, page_size)
result = results['result']
result0 = result[0] # type: ObjectCtx
result1 = result[1] # type: ObjectCtx
result2 = result[2] # type: ObjectCtx
self.assertEqual(result0.id, id9)
self.assertEqual(result1.id, id8)
self.assertEqual(result2.id, id7)
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
| 5,860
|
Python
|
.py
| 150
| 31.06
| 130
| 0.417213
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,019
|
test_zato_kvdb_number.py
|
zatosource_zato/code/zato-server/test/zato/kvdb/test_zato_kvdb_number.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from datetime import datetime
from time import sleep
from unittest import main, TestCase
# dateutil
from dateutil.parser import parse as dt_parse
# Zato
from zato.common.api import StatsKey
from zato.common.test import rand_int, rand_string
from zato.server.connection.kvdb.api import NumberRepo
# ################################################################################################################################
# ################################################################################################################################
utcnow = datetime.utcnow
# ################################################################################################################################
# ################################################################################################################################
sync_threshold = 120_000
sync_interval = 120_000
# ################################################################################################################################
# ################################################################################################################################
class NumberTestCase(TestCase):
def test_repo_init(self):
name1 = rand_string()
data_path1 = rand_string()
name2 = rand_string()
max_value1 = rand_int()
data_path2 = rand_string()
max_value2 = rand_int()
allow_negative1 = False
allow_negative2 = True
repo1 = NumberRepo(name1, data_path1, sync_threshold, sync_interval, max_value1, allow_negative1)
repo2 = NumberRepo(name2, data_path2, sync_threshold, sync_interval, max_value2, allow_negative2)
self.assertEqual(repo1.name, name1)
self.assertEqual(repo1.data_path, data_path1)
self.assertEqual(repo1.max_value, max_value1)
self.assertEqual(repo1.name, name1)
self.assertFalse(repo1.allow_negative)
self.assertEqual(repo2.name, name2)
self.assertEqual(repo2.data_path, data_path2)
self.assertEqual(repo2.max_value, max_value2)
self.assertEqual(repo2.name, name2)
self.assertTrue(repo2.allow_negative)
# ################################################################################################################################
def test_repo_incr(self):
repo_name = rand_string()
key_name = rand_string()
repo = NumberRepo(repo_name, sync_threshold, sync_interval)
value = repo.incr(key_name)
self.assertEqual(value, 1)
value = repo.incr(key_name)
value = repo.incr(key_name)
value = repo.incr(key_name)
self.assertEqual(value, 4)
# ################################################################################################################################
def test_repo_incr_max_value(self):
repo_name = rand_string()
key_name = rand_string()
max_value = 2
repo = NumberRepo(repo_name, sync_threshold, sync_interval, max_value=max_value)
# By multiplying we ensure that max_value is reached ..
for _x in range(max_value * 2):
value = repo.incr(key_name)
# .. yet, it will never be exceeded.
self.assertEqual(value, max_value)
# ################################################################################################################################
def test_repo_decr(self):
repo_name = rand_string()
key_name = rand_string()
repo = NumberRepo(repo_name, sync_threshold, sync_interval)
repo.incr(key_name)
repo.incr(key_name)
repo.incr(key_name)
repo.incr(key_name)
repo.decr(key_name)
value = repo.decr(key_name)
self.assertEqual(value, 2)
# ################################################################################################################################
def test_repo_decr_below_zero_allow_negative_true(self):
repo_name = rand_string()
key_name = rand_string()
allow_negative = True
len_items = 3
total_increases = len_items
total_decreases = len_items * 2
expected_value = total_increases - total_decreases
repo = NumberRepo(repo_name, sync_threshold, sync_interval, allow_negative=allow_negative)
# Add new items ..
for _x in range(total_increases):
repo.incr(key_name)
# By multiplying we ensure that we decrement it below zero ..
for _x in range(total_decreases):
value = repo.decr(key_name)
# .. and we confirm that the below-zero value is as expected (remember, allow_negative is True).
self.assertEqual(value, expected_value)
# ################################################################################################################################
def test_repo_decr_below_zero_allow_negative_false(self):
repo_name = rand_string()
key_name = rand_string()
allow_negative = False
len_items = 3
total_increases = len_items
total_decreases = len_items * 2
repo = NumberRepo(repo_name, sync_threshold, sync_interval, allow_negative=allow_negative)
# Add new items ..
for _x in range(total_increases):
repo.incr(key_name)
# By multiplying we ensure that we decrement it below zero ..
for _x in range(total_decreases):
value = repo.decr(key_name)
# .. and we confirm that the value is zero (remember, allow_negative is True).
self.assertEqual(value, 0)
# ################################################################################################################################
def test_repo_get(self):
repo_name = rand_string()
key_name = rand_string()
repo = NumberRepo(repo_name, sync_threshold, sync_interval)
repo.incr(key_name)
repo.incr(key_name)
repo.incr(key_name)
data = repo.get(key_name) # type: dict
sleep(0.1)
self.assertEqual(data[StatsKey.PerKeyValue], 3)
self.assertIsNone(data[StatsKey.PerKeyLastDuration])
last_timestamp = data[StatsKey.PerKeyLastTimestamp]
last_timestamp = dt_parse(last_timestamp)
self.assertTrue(utcnow() > last_timestamp)
# ################################################################################################################################
def test_repo_set_last_duration(self):
repo_name = rand_string()
key_name = rand_string()
last_duration = rand_int()
repo = NumberRepo(repo_name, sync_threshold, sync_interval)
repo.incr(key_name)
repo.set_last_duration(key_name, last_duration)
data = repo.get(key_name) # type: dict
self.assertEqual(data[StatsKey.PerKeyLastDuration], last_duration)
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
| 7,395
|
Python
|
.py
| 144
| 43.909722
| 130
| 0.473046
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,020
|
test_zato_kvdb_core.py
|
zatosource_zato/code/zato-server/test/zato/kvdb/test_zato_kvdb_core.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from unittest import main, TestCase
# Zato
from zato.common.test import rand_string
from zato.server.connection.kvdb.api import ObjectCtx, ListRepo
from zato.server.connection.kvdb.core import KVDB
# ################################################################################################################################
# ################################################################################################################################
class ListRepoAPITestCase(TestCase):
def test_list_internal_create_repo(self):
repo_name1 = rand_string()
repo_name2 = rand_string()
zato_kvdb = KVDB()
zato_kvdb.internal_create_list_repo(repo_name1)
zato_kvdb.internal_create_list_repo(repo_name2)
repo1 = zato_kvdb.get(repo_name1)
repo2 = zato_kvdb.get(repo_name2)
self.assertIsInstance(repo1, ListRepo)
self.assertIsInstance(repo2, ListRepo)
self.assertIsNot(repo1, repo2)
# ################################################################################################################################
def test_list_get(self):
repo_name = rand_string()
zato_kvdb = KVDB()
zato_kvdb.internal_create_list_repo(repo_name)
repo = zato_kvdb.get(repo_name)
self.assertIsInstance(repo, ListRepo)
# ################################################################################################################################
def test_list_push_get_object(self):
repo_name = rand_string()
object_id = rand_string()
ctx = ObjectCtx()
ctx.id = object_id
zato_kvdb = KVDB()
zato_kvdb.internal_create_list_repo(repo_name)
zato_kvdb.append(repo_name, ctx)
result = zato_kvdb.get_object(repo_name, object_id)
self.assertIsInstance(result, ObjectCtx)
self.assertEqual(result.id, object_id)
# ################################################################################################################################
def test_list_repo_get_list(self):
id1 = '1-' + rand_string()
id2 = '2-' + rand_string()
id3 = '3-' + rand_string()
id4 = '4-' + rand_string()
id5 = '5-' + rand_string()
id6 = '6-' + rand_string()
id7 = '7-' + rand_string()
id8 = '8-' + rand_string()
id9 = '9-' + rand_string()
id10 = '10-' + rand_string()
id11 = '11-' + rand_string()
id12 = '12-' + rand_string()
ctx1 = ObjectCtx()
ctx2 = ObjectCtx()
ctx3 = ObjectCtx()
ctx4 = ObjectCtx()
ctx5 = ObjectCtx()
ctx6 = ObjectCtx()
ctx7 = ObjectCtx()
ctx8 = ObjectCtx()
ctx9 = ObjectCtx()
ctx10 = ObjectCtx()
ctx11 = ObjectCtx()
ctx12 = ObjectCtx()
ctx1.id = id1
ctx2.id = id2
ctx3.id = id3
ctx4.id = id4
ctx5.id = id5
ctx6.id = id6
ctx7.id = id7
ctx8.id = id8
ctx9.id = id9
ctx10.id = id10
ctx11.id = id11
ctx12.id = id12
repo_name = rand_string()
zato_kvdb = KVDB()
zato_kvdb.internal_create_list_repo(repo_name)
zato_kvdb.append(repo_name, ctx1)
zato_kvdb.append(repo_name, ctx2)
zato_kvdb.append(repo_name, ctx3)
zato_kvdb.append(repo_name, ctx4)
zato_kvdb.append(repo_name, ctx5)
zato_kvdb.append(repo_name, ctx6)
zato_kvdb.append(repo_name, ctx7)
zato_kvdb.append(repo_name, ctx8)
zato_kvdb.append(repo_name, ctx9)
zato_kvdb.append(repo_name, ctx10)
zato_kvdb.append(repo_name, ctx11)
zato_kvdb.append(repo_name, ctx12)
cur_page = 2
page_size = 3
results = zato_kvdb.get_list(repo_name, cur_page, page_size)
result = results['result']
result0 = result[0] # type: ObjectCtx
result1 = result[1] # type: ObjectCtx
result2 = result[2] # type: ObjectCtx
self.assertEqual(result0.id, id9)
self.assertEqual(result1.id, id8)
self.assertEqual(result2.id, id7)
# ################################################################################################################################
def test_list_repo_delete(self):
id1 = rand_string()
id2 = rand_string()
ctx1 = ObjectCtx()
ctx1.id = id1
ctx2 = ObjectCtx()
ctx2.id = id2
repo_name = rand_string()
zato_kvdb = KVDB()
zato_kvdb.internal_create_list_repo(repo_name)
zato_kvdb.append(repo_name, ctx1)
zato_kvdb.append(repo_name, ctx2)
deleted_ctx = zato_kvdb.delete(repo_name, id1)
self.assertIs(ctx1, deleted_ctx)
try:
zato_kvdb.get_object(repo_name, id1)
except KeyError as e:
self.assertEqual(e.args[0], 'Object not found `{}`'.format(id1))
else:
self.fail('KeyError should have been raised because object has been deleted')
# ################################################################################################################################
def test_list_repo_clear(self):
id1 = rand_string()
id2 = rand_string()
ctx1 = ObjectCtx()
ctx1.id = id1
ctx2 = ObjectCtx()
ctx2.id = id2
repo_name = rand_string()
zato_kvdb = KVDB()
zato_kvdb.internal_create_list_repo(repo_name)
zato_kvdb.append(repo_name, ctx1)
zato_kvdb.append(repo_name, ctx2)
zato_kvdb.remove_all(repo_name)
self.assertEqual(zato_kvdb.get_size(repo_name), 0)
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
| 6,167
|
Python
|
.py
| 147
| 33.843537
| 130
| 0.476758
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,021
|
test_docstring_service.py
|
zatosource_zato/code/zato-server/test/zato/apispec/test_docstring_service.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from unittest import main, TestCase
# Zato
from zato.common.test.apispec_ import service_name, sio_config
from zato.common.api import APISPEC
from zato.server.apispec.parser.docstring import not_public
from zato.server.apispec.parser.service import ServiceInfo
# ################################################################################################################################
if 0:
from zato.server.apispec.parser.docstring import _DocstringSegment, SimpleIODescription
_DocstringSegment = _DocstringSegment
SimpleIODescription = SimpleIODescription
# ################################################################################################################################
# ################################################################################################################################
class APISpecDocstringParsing(TestCase):
maxDiff = 100000
# ################################################################################################################################
def test_docstring_summary_only(self):
class CyMyService:
""" This is a one-line summary.
"""
info = ServiceInfo(
service_name,
CyMyService, # type: ignore
sio_config,
'public'
)
# This service's docstring has a summary only so it will constitute
# all of its summary, decsription and full docstring.
self.assertEqual(info.docstring.data.summary, 'This is a one-line summary.')
self.assertEqual(info.docstring.data.description, 'This is a one-line summary.')
self.assertEqual(info.docstring.data.full, 'This is a one-line summary.')
# ################################################################################################################################
def test_docstring_multiline(self):
class CyMyService:
""" This is a one-line summary.
This is public information
It is multiline
======= ======= =======
header1 header2 header3
======= ======= =======
column1 column2 column3
======= ======= =======
- This is a list
- It has a sub-list
- And another one
The sub-list has a table
======= ======= =======
header4 header5 header6
======= ======= =======
column4 column5 column6
======= ======= =======
- More bullets in the list
"""
__doc__: 'str'
info = ServiceInfo(
service_name,
CyMyService, # type: ignore
sio_config,
'public'
)
self.assertEqual(info.docstring.data.summary, 'This is a one-line summary.')
service_docstring_lines = CyMyService.__doc__.strip().splitlines()
docstring_full_lines = info.docstring.data.full.splitlines()
for idx, line in enumerate(service_docstring_lines):
expected_line = line.strip() # type: str
given_line = docstring_full_lines[idx].strip() # type: str
self.assertEqual(expected_line, given_line)
# ################################################################################################################################
def test_extract_tags_public_only_implicit(self):
class CyMyService:
""" This is a one-line summary.
This is public information
It is multiline
"""
segments = ServiceInfo(
service_name,
CyMyService, # type: ignore
sio_config,
APISPEC.DEFAULT_TAG).docstring.extract_segments(
CyMyService.__doc__ # type: ignore
)
# There should be only one tag, the default, implicit one called 'public'
expected = {
'tag': 'public',
'description': 'This is public information\nIt is multiline',
'full': 'This is a one-line summary.\n\nThis is public information\nIt is multiline\n',
'summary': 'This is a one-line summary.'
}
self.assertEqual(len(segments), 1)
public = segments[0] # type: _DocstringSegment
segment_dict = public.to_dict()
self.assertEqual(segment_dict['tag'], expected['tag'])
self.assertEqual(segment_dict['description'], expected['description'])
self.assertEqual(segment_dict['full'], expected['full'])
self.assertEqual(segment_dict['summary'], expected['summary'])
# ################################################################################################################################
def test_extract_tags_public_only_explicit(self):
class CyMyService:
""" @public
This is a one-line summary.
This is public information
It is multiline
"""
segments = ServiceInfo(
service_name,
CyMyService, # type: ignore
sio_config,
APISPEC.DEFAULT_TAG).docstring.extract_segments(
CyMyService.__doc__ # type: ignore
)
# There should be only one tag, the explicitly named 'public' one.
expected = {
'tag': 'public',
'description': 'This is public information\nIt is multiline',
'full': 'This is a one-line summary.\n\nThis is public information\nIt is multiline\n',
'summary': 'This is a one-line summary.'
}
self.assertEqual(len(segments), 1)
public = segments[0] # type: _DocstringSegment
segment_dict = public.to_dict()
self.assertEqual(segment_dict['tag'], expected['tag'])
self.assertEqual(segment_dict['description'], expected['description'])
self.assertEqual(segment_dict['full'], expected['full'])
self.assertEqual(segment_dict['summary'], expected['summary'])
# ################################################################################################################################
def test_extract_tags_multi_1(self):
class CyMyService:
""" This is a one-line summary.
This is public information
It is multiline
@internal
One-line summary for internal uses.
This part is internal,
it will not be visible
to public users.
"""
tags = [APISPEC.DEFAULT_TAG, 'internal']
segments = ServiceInfo(
service_name,
CyMyService, # type: ignore
sio_config,
tags).docstring.extract_segments(
CyMyService.__doc__ # type: ignore
)
# There should be only one tag, the default, implicit one called 'public'
expected_public = {
'tag': 'public',
'description': 'This is public information\nIt is multiline',
'full': 'This is a one-line summary.\n\nThis is public information\nIt is multiline\n',
'summary': 'This is a one-line summary.'
}
expected_internal = {
'tag': 'internal',
'description': '\n\n\n.. raw:: html\n\n <span class="zato-tag-name-highlight">@internal</span>\n\nOne-line summary for internal uses.\n\nThis part is internal,\nit will not be visible\nto public users.', # noqa: E501
'full': '\n\n.. raw:: html\n\n <span class="zato-tag-name-highlight">@internal</span>\n\n\nINFORMATION IN THIS SECTION IS NOT PUBLIC.\n\nOne-line summary for internal uses.\n\nThis part is internal,\nit will not be visible\nto public users.\n', # noqa: E501
'summary': not_public
}
self.assertEqual(len(segments), 2)
public = segments[0] # type: _DocstringSegment
segment_dict_public = public.to_dict()
self.assertEqual(segment_dict_public['tag'], expected_public['tag'])
self.assertEqual(segment_dict_public['description'], expected_public['description'])
self.assertEqual(segment_dict_public['full'], expected_public['full'])
self.assertEqual(segment_dict_public['summary'], expected_public['summary'])
internal = segments[1] # type: _DocstringSegment
segment_dict_internal = internal.to_dict()
self.assertEqual(segment_dict_internal['tag'], expected_internal['tag'])
self.assertEqual(segment_dict_internal['description'], expected_internal['description'])
self.assertEqual(segment_dict_internal['full'], expected_internal['full'])
self.assertEqual(segment_dict_internal['summary'], expected_internal['summary'])
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
| 9,449
|
Python
|
.py
| 181
| 41.98895
| 280
| 0.500761
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,022
|
test_dataclass_open_api.py
|
zatosource_zato/code/zato-server/test/zato/apispec/test_dataclass_open_api.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from copy import deepcopy
from unittest import main
# Zato
from zato.common.test.apispec_ import run_common_apispec_assertions, service_name, sio_config
from zato.common.api import APISPEC, URL_TYPE
from zato.common.marshal_.simpleio import DataClassSimpleIO
from zato.common.test import BaseSIOTestCase
from zato.server.apispec.spec.core import Generator
from zato.server.apispec.spec.openapi import OpenAPIGenerator
from zato.server.service.internal.helpers import MyDataclassService
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_
# ################################################################################################################################
class _MatchTestCompiled:
group_names = ['phone_number']
# ################################################################################################################################
# ################################################################################################################################
class DataClassOpenAPITestCase(BaseSIOTestCase):
def test_dataclass_generate_open_api(self):
MyClass = deepcopy(MyDataclassService)
DataClassSimpleIO.attach_sio(None, self.get_server_config(), MyClass)
service_store_services = {
'my.impl.name': {
'name': service_name,
'service_class': MyClass,
}
}
include = ['*']
exclude = []
query = ''
tags = ['public']
generator = Generator(service_store_services, sio_config, include, exclude, query, tags, needs_sio_desc=False)
initial_info = generator.get_info() # type: any_
channel_data = [{
'service_name': service_name,
'transport': URL_TYPE.PLAIN_HTTP,
'url_path': '/test/{phone_number}',
'match_target_compiled': _MatchTestCompiled()
}]
needs_api_invoke = True
needs_rest_channels = True
api_invoke_path = APISPEC.GENERIC_INVOKE_PATH
open_api_generator = OpenAPIGenerator(initial_info, channel_data, needs_api_invoke, needs_rest_channels, api_invoke_path)
result = open_api_generator.generate()
run_common_apispec_assertions(self, result)
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
| 2,986
|
Python
|
.py
| 57
| 46.368421
| 130
| 0.462676
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,023
|
__init__.py
|
zatosource_zato/code/zato-server/test/zato/apispec/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,024
|
test_commands.py
|
zatosource_zato/code/zato-server/test/zato/commands_/test_commands.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from unittest import main
# Zato
from zato.common.test import CommandLineServiceTestCase
# ################################################################################################################################
# ################################################################################################################################
class ServiceCommandsTestCase(CommandLineServiceTestCase):
def test_service_commands(self) -> 'None':
# Test service to execute
service_name = 'helpers.commands-service'
# Run the test now
self.run_zato_service_test(service_name)
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
# ################################################################################################################################
| 1,356
|
Python
|
.py
| 23
| 55.695652
| 130
| 0.287661
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,025
|
__init__.py
|
zatosource_zato/code/zato-server/test/zato/commands_/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,026
|
test_outconn_connect.py
|
zatosource_zato/code/zato-server/test/zato/wsx/test_outconn_connect.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# This needs to run as soon as possible
from gevent.monkey import patch_all
_ = patch_all()
# stdlib
from unittest import main
from uuid import uuid4
# Zato
from zato.common.test.wsx_ import WSXChannelManager, WSXOutconnBaseCase
from zato.common.util.api import fs_safe_now
from zato.server.generic.api.outconn.wsx.base import OutconnWSXWrapper
# ################################################################################################################################
# ################################################################################################################################
class WSXOutconnConnectTestCase(WSXOutconnBaseCase):
def test_connect_credentials_needed_not_needed(self) -> 'None':
with WSXChannelManager(self) as ctx:
config = self._get_config(
'test_connect_credentials_needed_not_needed',
ctx.wsx_channel_address
)
wrapper = OutconnWSXWrapper(config, self._get_test_server()) # type: ignore
wrapper.build_queue()
# Confirm that the client is connected
self._check_connection_result(
wrapper, ctx.wsx_channel_address, needs_credentials=False, should_be_authenticated=True)
# ################################################################################################################################
def test_connect_credentials_needed_and_provided(self) -> 'None':
now = fs_safe_now()
username = 'test.wsx.username.{}'.format(now)
password = 'test.wsx.password.{}.{}'.format(now, uuid4().hex)
with WSXChannelManager(self, username, password, needs_credentials=True) as ctx:
config = self._get_config(
'test_connect_credentials_needed_and_provided',
ctx.wsx_channel_address,
username,
password
)
wrapper = OutconnWSXWrapper(config, self._get_test_server()) # type: ignore
wrapper.build_queue()
# Confirm that the client is connected
self._check_connection_result(
wrapper, ctx.wsx_channel_address, needs_credentials=True, should_be_authenticated=True)
# ################################################################################################################################
def test_connect_credentials_needed_and_not_provided(self) -> 'None':
now = fs_safe_now()
username = 'test.wsx.username.{}'.format(now)
password = 'test.wsx.password.{}.{}'.format(now, uuid4().hex)
with WSXChannelManager(self, username, password, needs_credentials=True) as ctx:
# Note that we are not providing our credentials here,
# which means that will be attempting to connect without credentials
# to a channel with a security definition attached and that should fail
config = self._get_config(
'test_connect_credentials_needed_and_not_provided',
ctx.wsx_channel_address,
queue_build_cap=1
)
wrapper = OutconnWSXWrapper(config, self._get_test_server()) # type: ignore
wrapper.build_queue()
wrapper.delete_queue_connections()
# Confirm that the client is connected
self._check_connection_result(
wrapper, ctx.wsx_channel_address, needs_credentials=False, should_be_authenticated=False)
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
# ################################################################################################################################
| 4,212
|
Python
|
.py
| 72
| 49.513889
| 130
| 0.482599
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,027
|
test_outconn_reconnect.py
|
zatosource_zato/code/zato-server/test/zato/wsx/test_outconn_reconnect.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from logging import basicConfig, getLogger, WARN
from tempfile import gettempdir
from traceback import format_exc
from unittest import main
# gevent
from gevent import sleep
# Zato
from zato.common.test import rand_string, rand_unicode
from zato.common.test.config import TestConfig
from zato.common.test.enmasse_.base import BaseEnmasseTestCase
from zato.common.util.cli import get_zato_sh_command
from zato.common.util.open_ import open_w
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_
# ################################################################################################################################
# ################################################################################################################################
basicConfig(level=WARN, format='%(asctime)s - %(message)s')
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
channel_template = """
web_socket:
- name: test.channel.WSXOutconnReconnectTestCase.{test_suffix}
is_active: true
is_internal: false
service: helpers.web-sockets-pub-sub-gateway
address: ws://localhost:22099/test
sec_def: zato-no-security
data_format: json
new_token_wait_time: 30
ping_interval: 90
pings_missed_threshold: 5
token_ttl: 3600
"""
outconn_template = """
zato_generic_connection:
- name: test.outconn.WSXOutconnReconnectTestCase.{test_suffix}
address: ws://localhost:22099/test
cache_expiry: 0
has_auto_reconnect: true
is_active: true
is_channel: true
is_internal: false
is_outconn: false
is_zato: true
pool_size: 1
sec_use_rbac: false
security_def: ZATO_NONE
subscription_list:
type_: outconn-wsx
"""
# ################################################################################################################################
# ################################################################################################################################
class WSXOutconnReconnectTestCase(BaseEnmasseTestCase):
def _delete(self, command_name:'str', conn_type:'str', test_suffix:'str') -> 'None':
# A shortcut
command = get_zato_sh_command()
# Build the name of the connection to delete
conn_name = f'test.{conn_type}.WSXOutconnReconnectTestCase.{test_suffix}'
# Invoke the delete command ..
out:'any_' = command(
command_name,
'--path', TestConfig.server_location,
'--name', conn_name
)
# .. and make sure there was no error in stdout/stderr ..
self._assert_command_line_result(out)
def _delete_channel(self, test_suffix:'str') -> 'None':
self._delete('delete-wsx-channel', 'channel', test_suffix)
def _delete_outconn(self, test_suffix:'str') -> 'None':
self._delete('delete-wsx-outconn', 'outconn', test_suffix)
# ################################################################################################################################
def _save_enmasse_file(self, template:'str', conn_type:'str', test_suffix:'str') -> 'str':
tmp_dir = gettempdir()
file_name = f'zato-{conn_type}-{test_suffix}.yaml'
config_path = os.path.join(tmp_dir, file_name)
data = template.format(test_suffix=test_suffix)
f = open_w(config_path)
_ = f.write(data)
f.close()
return config_path
# ################################################################################################################################
def _save_enmasse_channel_file(self, test_suffix:'str') -> 'str':
config_path = self._save_enmasse_file(channel_template, 'WSXOutconnReconnectTestCase-channel', test_suffix)
return config_path
# ################################################################################################################################
def _save_enmasse_outconn_file(self, test_suffix:'str') -> 'str':
config_path = self._save_enmasse_file(outconn_template, 'WSXOutconnReconnectTestCase-outconn', test_suffix)
return config_path
# ################################################################################################################################
def test_outconn_reconnect(self) -> 'None':
# A unique ID for our test run
test_suffix = rand_unicode() + '.' + rand_string()
try:
# Prepare a config file for the channel ..
channel_config_file = self._save_enmasse_channel_file(test_suffix)
# .. now, for the outgoing connection ..
outconn_config_file = self._save_enmasse_outconn_file(test_suffix)
# .. create the channel ..
_ = self.invoke_enmasse(channel_config_file)
# .. create the outgoing connection ..
_ = self.invoke_enmasse(outconn_config_file)
# .. now, delete the channel ..
self._delete_channel(test_suffix)
# .. create the channel back ..
_ = self.invoke_enmasse(channel_config_file)
# .. wait a few seconds to make sure that the outgoing connection
# .. has enough time to reconnect ..
sleep(6)
# .. and confirm that it did.
except Exception:
self.fail('Caught an exception -> {}'.format(format_exc()))
finally:
self._delete_channel(test_suffix)
self._delete_outconn(test_suffix)
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
| 6,583
|
Python
|
.py
| 129
| 44.596899
| 130
| 0.451492
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,028
|
test_outconn_pubsub.py
|
zatosource_zato/code/zato-server/test/zato/wsx/test_outconn_pubsub.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# This needs to run as soon as possible
from gevent.monkey import patch_all
_ = patch_all()
# stdlib
from unittest import main
# Zato
from zato.common.api import GENERIC
from zato.common.test.wsx_ import WSXChannelManager, WSXOutconnBaseCase
from zato.common.typing_ import cast_
from zato.common.util.api import fs_safe_now
from zato.distlock import LockManager
from zato.server.connection.pool_wrapper import ConnectionPoolWrapper
from zato.server.generic.api.outconn.wsx.base import OutconnWSXWrapper
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, strlist
any_ = any_
# ################################################################################################################################
# ################################################################################################################################
class Config:
OnSubscribeServiceID = 123
OnSubscribeServiceName = 'zato.test.on-subscribe'
SubscribeToTopics = ['/test1', '/test2']
# ################################################################################################################################
# ################################################################################################################################
class TestServer:
def __init__(self, topics:'strlist') -> 'None':
self.topics = topics
self.ctx = []
self.zato_lock_manager = LockManager('zato-pass-through', 'zato', cast_('any_', None))
self.wsx_connection_pool_wrapper = ConnectionPoolWrapper(cast_('any_', self), GENERIC.CONNECTION.TYPE.OUTCONN_WSX)
# ################################################################################################################################
def is_service_wsx_adapter(self, _ignored_service_name:'str') -> 'bool':
return True
# ################################################################################################################################
def is_active_outconn_wsx(self, _ignored_conn_id:'str') -> 'bool':
return True
# ################################################################################################################################
def api_service_store_get_service_name_by_id(self, service_id:'int') -> 'str':
self.ctx.append({
'api_service_store_get_service_name_by_id':service_id
})
return Config.OnSubscribeServiceName
# ################################################################################################################################
def invoke(self, service_name:'str') -> 'strlist':
self.ctx.append({
'invoke': service_name
})
return self.topics
# ################################################################################################################################
def on_wsx_outconn_stopped_running(self, conn_id:'str') -> 'None':
pass
# ################################################################################################################################
def on_wsx_outconn_connected(self, conn_id:'str') -> 'None':
pass
# ################################################################################################################################
# ################################################################################################################################
class WSXOutconnPubSubTestCase(WSXOutconnBaseCase):
def test_pubsub_on_subscribe_service(self) -> 'None':
now = fs_safe_now()
topic1 = f'/wsx.pubsub.test.{now}.1'
topic2 = f'/wsx.pubsub.test.{now}.2'
topics = [topic1, topic2]
run_cli = True
server = TestServer(topics)
with WSXChannelManager(self, needs_pubsub=True, run_cli=run_cli, topics=topics) as ctx:
config = self._get_config(
'test_pubsub_on_subscribe_service',
ctx.wsx_channel_address,
)
# config['max_connect_attempts'] = 1
config['on_subscribe_service_id'] = Config.OnSubscribeServiceID
wrapper = OutconnWSXWrapper(config, server) # type: ignore
wrapper.build_queue()
# Confirm that the client invoked the expected subscription service.
self.assertEqual(len(server.ctx), 2)
ctx0 = server.ctx[0]
ctx1 = server.ctx[1]
self.assertDictEqual(ctx0, {
'api_service_store_get_service_name_by_id': Config.OnSubscribeServiceID,
})
self.assertDictEqual(ctx1, {
'invoke': Config.OnSubscribeServiceName,
})
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
# ################################################################################################################################
| 5,636
|
Python
|
.py
| 96
| 52.28125
| 130
| 0.373159
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,029
|
__init__.py
|
zatosource_zato/code/zato-server/test/zato/server_rpc/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,030
|
test_all.py
|
zatosource_zato/code/zato-server/test/zato/server_rpc/test_all.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from contextlib import closing
from dataclasses import dataclass
from unittest import main, TestCase
from uuid import uuid4
# Zato
from zato.common.api import INFO_FORMAT
from zato.common.component_info import get_info
from zato.common.odb.model import Base, HTTPBasicAuth, Cluster, Server as ServerModel
from zato.common.odb.api import ODBManager, SQLConnectionPool
from zato.common.test import TestCluster, TestParallelServer
from zato.common.typing_ import cast_
from zato.common.util.api import get_client_from_server_conf, get_new_tmp_full_path
from zato.common.util.open_ import open_w
from zato.server.connection.server.rpc.api import ConfigCtx, ServerRPC
from zato.server.connection.server.rpc.config import CredentialsConfig, ODBConfigSource, RPCServerInvocationCtx
from zato.server.connection.server.rpc.invoker import LocalServerInvoker, RemoteServerInvoker, ServerInvoker
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, anydict, anylist, anytuple, intstrdict
from zato.server.base.parallel import ParallelServer
ParallelServer = ParallelServer
# ################################################################################################################################
# ################################################################################################################################
class TestConfig:
crypto_use_tls = True
cluster_name = 'rpc_test_cluster'
api_credentials_password = 'api_credentials_password'
server1_name = 'server1'
server2_name = 'server2'
server3_name = 'server3'
server1_preferred_address = '10.151.1.1'
server2_preferred_address = '10.152.2.2'
server3_preferred_address = '10.152.3.3'
server1_port = 1111
server2_port = 2222
server3_port = 3333
# ################################################################################################################################
# ################################################################################################################################
class BaseTestServerInvoker:
@dataclass(init=False)
class InvocationEntry:
args: 'anytuple'
kwargs: 'anydict'
# ################################################################################################################################
# ################################################################################################################################
class TestLocalServerInvoker(LocalServerInvoker, BaseTestServerInvoker):
def __init__(self, *args:'any_', **kwargs:'any_'):
# Initialise our base class
super().__init__(*args, **kwargs)
# An entry is added each time self.invoke is called
self.invocation_history = [] # type: anylist
def invoke(self, *args:'any_', **kwargs:'any_') -> 'None':
entry = self.InvocationEntry()
entry.args = args
entry.kwargs = kwargs
self.invocation_history.append(entry)
# ################################################################################################################################
# ################################################################################################################################
class TestRemoteServerInvoker(RemoteServerInvoker, BaseTestServerInvoker):
def __init__(self, *args:'any_', **kwargs:'any_') -> 'None':
# Initialise our base class
super().__init__(*args, **kwargs)
# An entry is added each time self.invoke is called
self.invocation_history = [] # type: anylist
def invoke(self, *args:'any_', **kwargs:'any_') -> 'None':
entry = self.InvocationEntry()
entry.args = args
entry.kwargs = kwargs
self.invocation_history.append(entry)
# ################################################################################################################################
# ################################################################################################################################
class ServerRPCTestCase(TestCase):
def setUp(self) -> 'None':
# Prepare in-memory ODB configuration ..
odb_name = 'ServerRPCTestCase'
odb_config = {
'engine': 'sqlite',
'is_active': True,
'fs_sql_config': {},
'echo': True,
} # type: anydict
# .. set up ODB ..
odb_pool = SQLConnectionPool(odb_name, odb_config, odb_config)
self.odb = ODBManager()
self.odb.init_session(odb_name, odb_config, odb_pool)
# .. create SQL schema ..
Base.metadata.create_all(self.odb.pool.engine)
with closing(self.odb.session()) as session:
cluster = Cluster()
cluster.name = TestConfig.cluster_name # type: ignore
cluster.odb_type = 'sqlite' # type: ignore
cluster.broker_host = 'localhost-test-broker-host' # type: ignore
cluster.broker_port = 123456 # type: ignore
cluster.lb_host = 'localhost-test-lb-host' # type: ignore
cluster.lb_port = 1234561 # type: ignore
cluster.lb_agent_port = 1234562 # type: ignore
server1 = ServerModel()
server1.cluster = cluster
server1.name = TestConfig.server1_name # type: ignore
server1.token = 'abc1' # type: ignore
server1.preferred_address = TestConfig.server1_preferred_address # type: ignore
server1.bind_port = TestConfig.server1_port # type: ignore
server1.crypto_use_tls = TestConfig.crypto_use_tls # type: ignore
server2 = ServerModel()
server2.cluster = cluster
server2.name = TestConfig.server2_name # type: ignore
server2.token = 'abc2' # type: ignore
server2.preferred_address = TestConfig.server2_preferred_address # type: ignore
server2.bind_port = TestConfig.server2_port # type: ignore
server2.crypto_use_tls = TestConfig.crypto_use_tls # type: ignore
server3 = ServerModel()
server3.cluster = cluster
server3.name = TestConfig.server3_name # type: ignore
server3.token = 'abc3' # type: ignore
server3.preferred_address = TestConfig.server3_preferred_address # type: ignore
server3.bind_port = TestConfig.server3_port # type: ignore
server3.crypto_use_tls = TestConfig.crypto_use_tls # type: ignore
api_credentials = HTTPBasicAuth()
api_credentials.cluster = cluster # type: ignore
api_credentials.is_active = True # type: ignore
api_credentials.name = CredentialsConfig.sec_def_name # type: ignore
api_credentials.username = CredentialsConfig.api_user # type: ignore
api_credentials.realm = CredentialsConfig.sec_def_name # type: ignore
api_credentials.password = TestConfig.api_credentials_password # type: ignore
session.add(cluster)
session.add(server1)
session.add(server2)
session.add(server3)
session.add(api_credentials)
session.commit()
# ################################################################################################################################
def get_server_rpc(
self,
odb, # type: ODBManager
local_server_invoker_class=None, # type: type[LocalServerInvoker] | None
remote_server_invoker_class=None # type: type[RemoteServerInvoker] | None
) -> 'ServerRPC':
cluster = TestCluster(TestConfig.cluster_name)
parallel_server = TestParallelServer(cluster, odb, TestConfig.server1_name)
config_source = ODBConfigSource(parallel_server.odb, cluster.name, parallel_server.name, parallel_server.decrypt)
config_ctx = ConfigCtx(
config_source,
cast_('ParallelServer', parallel_server),
local_server_invoker_class = cast_('type[LocalServerInvoker]', local_server_invoker_class),
remote_server_invoker_class = cast_('type[RemoteServerInvoker]', remote_server_invoker_class),
)
return ServerRPC(config_ctx)
# ################################################################################################################################
def get_local_server_invoker(
self,
local_server_invoker_class=LocalServerInvoker # type: type[LocalServerInvoker]
) -> 'ServerInvoker':
rpc = self.get_server_rpc(
cast_('ODBManager', None),
local_server_invoker_class = local_server_invoker_class
)
return rpc.get_invoker_by_server_name(TestConfig.server1_name)
# ################################################################################################################################
def get_remote_server_invoker(
self,
server_name, # type: str
remote_server_invoker_class=RemoteServerInvoker # type: type[RemoteServerInvoker]
) -> 'ServerInvoker':
rpc = self.get_server_rpc(self.odb, remote_server_invoker_class=remote_server_invoker_class)
return rpc.get_invoker_by_server_name(TestConfig.server2_name)
# ################################################################################################################################
def xtest_get_item_local_server(self):
invoker = self.get_local_server_invoker()
self.assertIsInstance(invoker, ServerInvoker)
self.assertIsInstance(invoker, LocalServerInvoker)
# ################################################################################################################################
def xtest_get_item_remote_server(self):
invoker = self.get_remote_server_invoker(TestConfig.server2_name)
self.assertIsInstance(invoker, ServerInvoker)
self.assertIsInstance(invoker, RemoteServerInvoker)
# ################################################################################################################################
def xtest_invoke_local_server(self):
local_server_invoker_class = cast_('type[LocalServerInvoker]', TestLocalServerInvoker)
invoker = self.get_local_server_invoker(
local_server_invoker_class = local_server_invoker_class
)
invoker = cast_('TestLocalServerInvoker', invoker)
args1 = (1, 2, 3, 4)
kwargs1 = {'a1':'a2', 'b1':'b2'}
args2 = (5, 6, 7, 8)
kwargs2 = {'a3':'a4', 'b3':'b4'}
invoker.invoke(*args1, **kwargs1)
invoker.invoke(*args2, **kwargs2)
history1 = invoker.invocation_history[0] # type: TestLocalServerInvoker.InvocationEntry
history2 = invoker.invocation_history[1] # type: TestLocalServerInvoker.InvocationEntry
self.assertTupleEqual(history1.args, args1)
self.assertDictEqual(history1.kwargs, kwargs1)
self.assertTupleEqual(history2.args, args2)
self.assertDictEqual(history2.kwargs, kwargs2)
# ################################################################################################################################
def xtest_invoke_remote_server(self):
remote_server_invoker_class = cast_('type[RemoteServerInvoker]', TestRemoteServerInvoker)
invoker = self.get_remote_server_invoker(
TestConfig.server2_name,
remote_server_invoker_class = remote_server_invoker_class
)
invoker = cast_('TestRemoteServerInvoker', invoker)
args1 = (1, 2, 3, 4)
kwargs1 = {'a1':'a2', 'b1':'b2'}
args2 = (5, 6, 7, 8)
kwargs2 = {'a3':'a4', 'b3':'b4'}
invoker.invoke(*args1, **kwargs1)
invoker.invoke(*args2, **kwargs2)
history1 = invoker.invocation_history[0]
history2 = invoker.invocation_history[1]
self.assertTupleEqual(history1.args, args1)
self.assertDictEqual(history1.kwargs, kwargs1)
self.assertTupleEqual(history2.args, args2)
self.assertDictEqual(history2.kwargs, kwargs2)
# ################################################################################################################################
def xtest_remote_server_invocation_ctx_is_populated(self):
""" Confirms that remote server's invocation_ctx contains remote address and API credentials.
"""
remote_server_invoker_class = cast_('type[RemoteServerInvoker]', TestRemoteServerInvoker)
invoker = self.get_remote_server_invoker(
TestConfig.server2_name,
remote_server_invoker_class = remote_server_invoker_class
)
invoker = cast_('TestRemoteServerInvoker', invoker)
ctx = invoker.invocation_ctx
self.assertEqual(ctx.address, TestConfig.server2_preferred_address)
self.assertEqual(ctx.cluster_name, TestConfig.cluster_name)
self.assertEqual(ctx.server_name, TestConfig.server2_name)
self.assertEqual(ctx.username, CredentialsConfig.api_user)
self.assertEqual(ctx.password, TestConfig.api_credentials_password)
self.assertIs(ctx.crypto_use_tls, TestConfig.crypto_use_tls)
# ################################################################################################################################
def xtest_populate_servers(self):
# Get our RPC client ..
server_rpc = self.get_server_rpc(
self.odb,
local_server_invoker_class=LocalServerInvoker,
remote_server_invoker_class=RemoteServerInvoker
)
# .. this reads all the servers from the database ..
server_rpc.populate_invokers()
# .. so we can start our tests now.
invoker_list = server_rpc._invokers
# The first invoker is a local one. The other ones are remote.
invoker1 = invoker_list['server1']
invoker2 = invoker_list['server2']
invoker3 = invoker_list['server3']
self.assertIsInstance(invoker1, LocalServerInvoker)
self.assertIsInstance(invoker2, RemoteServerInvoker)
self.assertIsInstance(invoker3, RemoteServerInvoker)
# Note that the invocation context is populated only for remote invokers
# which is why we check ctx2 and ctx3 here but not ctx1.
ctx2 = invoker2.invocation_ctx
ctx3 = invoker3.invocation_ctx
self.assertEqual(ctx2.address, TestConfig.server2_preferred_address)
self.assertEqual(ctx2.cluster_name, TestConfig.cluster_name)
self.assertEqual(ctx2.server_name, TestConfig.server2_name)
self.assertEqual(ctx2.username, CredentialsConfig.api_user)
self.assertEqual(ctx2.password, TestConfig.api_credentials_password)
self.assertIs(ctx2.crypto_use_tls, TestConfig.crypto_use_tls)
self.assertEqual(ctx3.address, TestConfig.server3_preferred_address)
self.assertEqual(ctx3.cluster_name, TestConfig.cluster_name)
self.assertEqual(ctx3.server_name, TestConfig.server3_name)
self.assertEqual(ctx3.username, CredentialsConfig.api_user)
self.assertEqual(ctx3.password, TestConfig.api_credentials_password)
self.assertIs(ctx3.crypto_use_tls, TestConfig.crypto_use_tls)
def test_invoke_all_pids_using_a_remote_invoker(self):
if not(server_root_dir := os.environ.get('Zato_Test_Server_Root_Dir')):
return
# We are going to write random data to a file for each worker PID
# and then all the PIDs will be invoked. It is expected that each
# PID will read its data from that file and create a new file
# with its response which we will read to confirm that the PID
# indeed has been invoked.
# Local aliases
service_name = 'zato.ping'
# Note that this test requires that at least two workers be present
# because it tests multi-CPU configuration.
min_workers = 2
# Configuration read from the environment
server_host = os.environ.get('Zato_Test_Server_Host')
server_port = cast_('str', os.environ.get('Zato_Test_Server_Port'))
server_port = int(server_port)
# An invocation client that we can extract the underlying configuration from
client = get_client_from_server_conf(server_root_dir, url_path=RemoteServerInvoker.url_path)
# Build the overall configuration context object
ctx = RPCServerInvocationCtx()
ctx.cluster_name = 'ServerRPCTestCase-Cluster-Name'
ctx.server_name = 'ServerRPCTestCase-Server-Name'
ctx.address = server_host
ctx.port = server_port
ctx.username = client.username
ctx.password = client.password
# Create the invoker ..
invoker = RemoteServerInvoker(ctx)
# .. and ping it immediately to make sure the configuration is correct.
invoker.ping()
# Now, obtain all the PIDs of the workers in the current server.
server_info = get_info(server_root_dir, INFO_FORMAT.DICT)
# Make sure we have enough worker processes to continue
master_proc_workers_no = server_info['master_proc_workers_no']
master_proc_workers_no = cast_('int', master_proc_workers_no)
if master_proc_workers_no < min_workers:
msg = f'Server from {server_root_dir} should have at least {min_workers} workers instead of {master_proc_workers_no}'
raise Exception(msg)
# The request will be populated with data for each PID in the loop below
request = {} # type: intstrdict
# Go through all the PIDs found ..
for pid in server_info['master_proc_workers_pids']:
# This is reusable
random_pid_data = uuid4().hex
# Random data for that pid
pid_data = f'pid:{pid}:' + random_pid_data + '\n'
# A random file for that PID
prefix = f'pid-request-{pid}'
tmp_file_path = get_new_tmp_full_path(prefix=prefix, random_suffix=random_pid_data)
# Write the data for this PID
with open_w(tmp_file_path) as f:
_ = f.write(pid_data)
# Populate the request
request[pid] = random_pid_data
# .. now, we can invoke all the PIDs with our request on input ..
_ = invoker.invoke_all_pids(service_name, request)
# .. close the underlying socket ..
invoker.close()
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
| 19,279
|
Python
|
.py
| 338
| 48.497041
| 130
| 0.55902
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,031
|
test_trigger_notify_pub_sub_tasks.py
|
zatosource_zato/code/zato-server/test/zato/pubsub/test_trigger_notify_pub_sub_tasks.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Run gevent patches first
from gevent.monkey import patch_all
patch_all()
# stdlib
from unittest import TestCase
# Zato
from zato.common.api import PUBSUB
from zato.common.test import TestServer
from zato.server.pubsub import PubSub
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import stranydict
# ################################################################################################################################
# ################################################################################################################################
class TriggerNotifyPubSubTasksTestCase(TestCase):
def _run_sync(
self,
needs_gd:'bool',
needs_non_gd:'bool',
gd_pub_time_max:'float',
non_gd_pub_time:'float',
) -> 'stranydict':
cid = '987'
cluster_id = 123
server = TestServer()
broker_client = None
sub_key = 'sk.123'
topic_id = 222
topic_name = '/my.topic'
endpoint_id = 1
ws_channel_id = 2
cluster_id = 12345
endpoint_type = PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id
# This is used by the one-element non-GD messages
non_gd_pub_msg_id = 'aaa.bbb.111'
non_gd_expiration_time = 123456789123456789
topic_config = {
'id': topic_id,
'name': topic_name,
'is_active': True,
'is_internal': False,
'max_depth_gd': 111,
'max_depth_non_gd': 222,
'has_gd': True,
'depth': 0,
'depth_check_freq': 1,
'pub_buffer_size_gd': 1,
'task_delivery_interval': 1,
'task_sync_interval': 1,
}
sub_config = {
'id': 555,
'sub_key': sub_key,
'topic_id': topic_id,
'topic_name': topic_name,
'ws_channel_id': ws_channel_id,
'ext_client_id': 'my.ext.1',
'endpoint_id': endpoint_id,
'sub_pattern_matched': '/*',
'task_delivery_interval': 0.1,
'unsub_on_wsx_close': True,
'creation_time': 123456,
}
sk_server_config = {
'cluster_id': cluster_id,
'server_name': server.name,
'server_pid': server.pid,
'sub_key': sub_key,
'endpoint_id': endpoint_id,
'endpoint_type': endpoint_type,
}
endpoint_config = {
'id': endpoint_id,
'ws_channel_id': ws_channel_id,
'name': 'my.endpoint',
'endpoint_type': endpoint_type,
'role': PUBSUB.ROLE.PUBLISHER_SUBSCRIBER.id,
'is_active': True,
'is_internal': True,
'security_id': None,
}
sync_max_iters = 1
spawn_trigger_notify = False
ps = PubSub(
cluster_id,
server, # type: ignore
broker_client,
sync_max_iters=sync_max_iters,
spawn_trigger_notify=spawn_trigger_notify)
ps.create_topic_object(topic_config)
ps.create_endpoint(endpoint_config)
ps.add_subscription(sub_config)
ps.set_sub_key_server(sk_server_config)
# Optionally, set a flag to signal that a GD message is available
if needs_gd:
ps.set_sync_has_msg(
topic_id = topic_id,
is_gd = True,
value = True,
source = 'test_max_pub_time_both_gd_and_non_gd',
gd_pub_time_max = gd_pub_time_max
)
# Optionally, store a minimal list of non-GD messages
if needs_non_gd:
sub_keys = [sub_key]
non_gd_msg_list = [{
'pub_msg_id': non_gd_pub_msg_id,
'pub_time': non_gd_pub_time,
'expiration_time': non_gd_expiration_time
}]
ps.store_in_ram(cid, topic_id, topic_name, sub_keys, non_gd_msg_list)
# Trigger a sync call ..
ps.trigger_notify_pubsub_tasks()
# .. and return the dictionary with context data to our caller.
return server.ctx
# ################################################################################################################################
def test_pub_max_time_gd_only(self):
# Only GD max. pub time is given on input and we do not have
# any non-GD messages so we expect for the GD max. pub time
# to be returned in the ctx information.
needs_gd = True
needs_non_gd = False
gd_pub_time_max = 2.0
non_gd_pub_time = 1.0
ctx = self._run_sync(needs_gd, needs_non_gd, gd_pub_time_max, non_gd_pub_time)
pub_time_max = ctx['request']['pub_time_max']
self.assertEqual(pub_time_max, gd_pub_time_max)
# ################################################################################################################################
def test_pub_max_time_non_gd_only(self):
# Only non-GD pub time is given on input and we do not have
# any GD messages so we expect for the non-GD pub time
# to be returned in the ctx information.
needs_gd = False
needs_non_gd = True
gd_pub_time_max = 2.0
non_gd_pub_time = 1.0
ctx = self._run_sync(needs_gd, needs_non_gd, gd_pub_time_max, non_gd_pub_time)
pub_time_max = ctx['request']['pub_time_max']
self.assertEqual(pub_time_max, non_gd_pub_time)
# ################################################################################################################################
def test_pub_max_time_gd_is_greater(self):
# Both GD and non-GD are provided and the former is greater
# which is why we expect for it to form pub_time_max.
needs_gd = True
needs_non_gd = True
gd_pub_time_max = 2.0
non_gd_pub_time = 1.0
ctx = self._run_sync(needs_gd, needs_non_gd, gd_pub_time_max, non_gd_pub_time)
pub_time_max = ctx['request']['pub_time_max']
self.assertEqual(pub_time_max, gd_pub_time_max)
# ################################################################################################################################
def test_pub_max_time_non_gd_is_greater(self):
# Both GD and non-GD are provided and the latter is greater
# which is why we expect for it to form pub_time_max.
needs_gd = True
needs_non_gd = True
gd_pub_time_max = 2.0
non_gd_pub_time = 3.0
ctx = self._run_sync(needs_gd, needs_non_gd, gd_pub_time_max, non_gd_pub_time)
pub_time_max = ctx['request']['pub_time_max']
self.assertEqual(pub_time_max, non_gd_pub_time)
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
from unittest import main
_ = main()
# ################################################################################################################################
# ################################################################################################################################
| 7,773
|
Python
|
.py
| 174
| 35.425287
| 130
| 0.446713
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,032
|
__init__.py
|
zatosource_zato/code/zato-server/test/zato/pubsub/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,033
|
test_publish.py
|
zatosource_zato/code/zato-server/test/zato/pubsub/test_publish.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from copy import deepcopy
from datetime import datetime, timezone
from json import loads
from tempfile import gettempdir
from unittest import main
# ciso8601
try:
from zato.common.util.api import parse_datetime
except ImportError:
from dateutil.parser import parse as parse_datetime
# Zato
from zato.common.pubsub import MSG_PREFIX, PUBSUB
from zato.common.util.api import new_cid
from zato.common.util.file_system import wait_for_file
from zato.common.util.open_ import open_r
from zato.common.test.rest_client import RESTClientTestCase
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import anydict
# ################################################################################################################################
# ################################################################################################################################
_default = PUBSUB.DEFAULT
username = _default.PUBAPI_USERNAME
sec_name = _default.PUBAPI_SECDEF_NAME
# ################################################################################################################################
# ################################################################################################################################
class PubSubTestCase(RESTClientTestCase):
needs_current_app = False
payload_only_messages = False
# ################################################################################################################################
def setUp(self) -> None:
super().setUp()
self.rest_client.init(username=username, sec_name=sec_name)
# ################################################################################################################################
def test_self_publish(self):
random_data = new_cid()
file_name_pub_sub = 'zato-unittest-pubsub-'+ random_data +'.json'
file_name_pub_sub = os.path.join(gettempdir(), file_name_pub_sub)
file_name_hook = file_name_pub_sub + '.hook-before-publish.json'
request = {
'random_data': random_data,
'file_name': file_name_pub_sub,
} # type: anydict
expected_pub_sub_data = deepcopy(request)
expected_pub_sub_data['target_needs_file'] = True
expected_hook_data = deepcopy(request)
expected_hook_data['target_needs_file'] = False
self.rest_client.api_invoke('helpers.pubsub.source', request)
# Sleep for a moment until the files appear in the file system
wait_for_file(file_name_pub_sub, max_wait=99)
wait_for_file(file_name_hook, max_wait=99)
# Attempt to read the files now ..
with open_r(file_name_pub_sub) as f:
pub_sub_data = f.read()
with open_r(file_name_hook) as f:
hook_data = f.read()
# .. load actual JSON data ..
pub_sub_data = loads(pub_sub_data)
hook_data = loads(hook_data)
# .. and run all the tests now ..
# Check hook data first as the message is much smaller
self.assertDictEqual(hook_data, expected_hook_data)
# ##
expiration_time = pub_sub_data['expiration_time']
expiration_time = datetime.fromtimestamp(expiration_time, tz=timezone.utc)
expiration_time = str(expiration_time)
expiration_time_iso = pub_sub_data['expiration_time_iso']
expiration_time_iso = parse_datetime(expiration_time_iso).astimezone(tz=timezone.utc)
expiration_time_iso = str(expiration_time_iso)
# ##
pub_time = pub_sub_data['pub_time']
pub_time = datetime.fromtimestamp(pub_time, tz=timezone.utc)
pub_time = str(pub_time)
pub_time_iso = pub_sub_data['pub_time_iso']
pub_time_iso = parse_datetime(pub_time_iso).astimezone(tz=timezone.utc)
pub_time_iso = str(pub_time_iso)
# ##
recv_time = pub_sub_data['recv_time']
recv_time = datetime.fromtimestamp(recv_time, tz=timezone.utc)
recv_time = str(recv_time)
recv_time_iso = pub_sub_data['recv_time_iso']
recv_time_iso = parse_datetime(recv_time_iso).astimezone(tz=timezone.utc)
recv_time_iso = str(recv_time_iso)
# ##
self.assertEqual(pub_sub_data['data_prefix'], '')
self.assertEqual(pub_sub_data['data_prefix_short'], '')
self.assertEqual(pub_sub_data['delivery_count'], 1)
self.assertEqual(pub_sub_data['delivery_status'], '')
self.assertEqual(pub_sub_data['expiration'], _default.LimitMessageExpiry)
self.assertEqual(pub_sub_data['ext_pub_time_iso'], '')
self.assertEqual(pub_sub_data['mime_type'], _default.MIME_TYPE)
self.assertEqual(pub_sub_data['priority'], 5)
self.assertEqual(pub_sub_data['server_name'], '')
self.assertEqual(pub_sub_data['sub_pattern_matched'], 'sub=/zato/s/to/*')
self.assertEqual(pub_sub_data['topic_name'], '/zato/s/to/helpers_pubsub_target')
self.assertEqual(pub_sub_data['zato_ctx']['target_service_name'], 'helpers.pubsub.target')
self.assertEqual(pub_sub_data['zato_ctx']['zato_mime_type'], 'application/vnd.zato.ps.msg')
self.assertIsInstance(pub_sub_data['topic_id'], int)
self.assertIsInstance(pub_sub_data['cluster_id'], int)
self.assertIsInstance(pub_sub_data['server_pid'], int)
self.assertIsInstance(pub_sub_data['published_by_id'], int)
self.assertDictEqual(pub_sub_data['data'], expected_pub_sub_data)
self.assertEqual(pub_sub_data['pub_pattern_matched'], '')
self.assertTrue(pub_sub_data['size'] >= 100)
self.assertTrue(pub_sub_data['has_gd'])
self.assertTrue(pub_sub_data['pub_msg_id'].startswith(MSG_PREFIX.MSG_ID))
self.assertTrue(pub_sub_data['sub_key'].startswith('zpsk.srv'))
self.assertFalse(pub_sub_data['is_in_sub_queue'])
self.assertListEqual(pub_sub_data['deliver_to_sk'], [])
self.assertListEqual(pub_sub_data['reply_to_sk'], [])
# This waits until datetime_from_ms is changed so as not to require the "* 1000" multiplication,
# i.e. until it uses datetime.fromtimestamp(ms, tz=timezone.utc)
"""
now = datetime.now(tz=timezone.utc).isoformat()
self.assertEqual(expiration_time, expiration_time_iso)
self.assertEqual(pub_time, pub_time_iso)
self.assertEqual(recv_time, recv_time_iso)
self.assertLess(now, expiration_time_iso)
self.assertGreater(now, pub_time_iso)
self.assertGreater(now, recv_time_iso)
"""
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
# ################################################################################################################################
| 7,571
|
Python
|
.py
| 135
| 49.133333
| 130
| 0.523764
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,034
|
test_pubapi_services.py
|
zatosource_zato/code/zato-server/test/zato/pubsub/test_pubapi_services.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from unittest import main
# Zato
from zato.common.test import CommandLineServiceTestCase
# ################################################################################################################################
# ################################################################################################################################
class WSXServicesInvokerTest(CommandLineServiceTestCase):
def test_wsx_services_invoker(self) -> 'None':
# This service invokes a test suite that invokes all the services
# that pubapi clients use for publish/subscribe.
service_name = 'helpers.pubsub.pubapi-invoker'
# Run the test now
_ = self.run_zato_service_test(service_name)
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
# ################################################################################################################################
| 1,465
|
Python
|
.py
| 24
| 57.541667
| 130
| 0.320504
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,035
|
test_pubapi_rest.py
|
zatosource_zato/code/zato-server/test/zato/pubsub/test_pubapi_rest.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from datetime import datetime
from time import sleep
# Zato
from zato.common.pubsub import prefix_sk
from zato.common.test.config import TestConfig
from zato.common.test.pubsub.common import FullPathTester
from zato.common.test.unittest_ import BasePubSubRestTestCase, PubSubConfig
from zato.common.typing_ import cast_
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import anydict
anydict = anydict
# ################################################################################################################################
# ################################################################################################################################
class PubAPITestCase(BasePubSubRestTestCase):
# ################################################################################################################################
def tearDown(self) -> 'None':
super().tearDown()
_ = self.delete_pubsub_topics_by_pattern(TestConfig.pubsub_topic_name_unique_auto_create)
# ################################################################################################################################
def test_self_subscribe(self):
# In this test, we check subscriptions to shared topics
topic_name = TestConfig.pubsub_topic_shared
# Before subscribing, make sure we are not currently subscribed
_ = self._unsubscribe(topic_name)
# Subscribe to the topic
response_initial = self.rest_client.post(PubSubConfig.PathSubscribe + topic_name)
# Wait a moment to make sure the subscription data is created
sleep(4)
sub_key = cast_('str', response_initial['sub_key'])
queue_depth = cast_('int', response_initial['queue_depth'])
#
# Validate sub_key
#
self.assertIsInstance(sub_key, str)
self.assertTrue(sub_key.startswith(prefix_sk))
len_sub_key = len(sub_key)
len_prefix = len(prefix_sk)
self.assertTrue(len_sub_key >= len_prefix + 5) # We expect at least a few random characters here
#
# Validate queue_depth
#
self.assertIsInstance(queue_depth, int)
# Subscribe once more - this should be allowed although we expect an empty response now
response_already_subscribed = self.rest_client.post(PubSubConfig.PathSubscribe + topic_name)
self.assertDictEqual(response_already_subscribed, {})
# ################################################################################################################################
def test_self_unsubscribe(self):
# In this test, we check subscriptions to shared topics
topic_name = TestConfig.pubsub_topic_shared
# Unsubscribe once ..
response = self._unsubscribe(topic_name)
# .. we expect an empty dict on reply
self.assertDictEqual(response, {})
# .. unsubscribe once more - it is not an error to unsubscribe
# .. even if we are already unsubscribed.
response = self._unsubscribe(topic_name)
self.assertDictEqual(response, {})
# ################################################################################################################################
def test_full_path_subscribe_before_publication(self):
tester = FullPathTester(self, True) # type: ignore
tester.run()
# ################################################################################################################################
def test_full_path_subscribe_after_publication(self):
prefix = TestConfig.pubsub_topic_name_unique_auto_create
topic_name = prefix + datetime.utcnow().isoformat()
# Command to invoke ..
cli_params = ['pubsub', 'create-topic', '--name', topic_name]
self.logger.info(f'Creating topic {topic_name} ({self.__class__.__name__})')
# .. get its response as a dict ..
out = self.run_zato_cli_json_command(cli_params) # type: anydict
topic_name = out['name']
sleep(4)
tester = FullPathTester(self, False, topic_name) # type: ignore
tester.run()
# ################################################################################################################################
def test_receive_has_no_sub(self):
# In this test, we check subscriptions to shared topics
topic_name = TestConfig.pubsub_topic_shared
# Make sure we are not subscribed
_ = self._unsubscribe(topic_name)
# Try to receive messages without a subscription
response = cast_('anydict', self._receive(topic_name, False, False))
self.assertIsNotNone(response['cid'])
self.assertEqual(response['result'], 'Error')
self.assertEqual(response['details'], 'You are not subscribed to topic `{}`'.format(topic_name))
# ################################################################################################################################
def test_receive_many(self):
# In this test, we check subscriptions to shared topics
topic_name = TestConfig.pubsub_topic_shared
# Make sure we are subscribed
_ = self._subscribe(topic_name, needs_unsubscribe=True)
data1 = '111'
data2 = '222'
data3 = '333'
# Publish #1
response1 = self._publish(topic_name, data1)
expected_msg_id1 = response1['msg_id']
# Publish #2
response2 = self._publish(topic_name, data2)
expected_msg_id2 = response2['msg_id']
# Publish #3
response3 = self._publish(topic_name, data3)
expected_msg_id3 = response3['msg_id']
# Receive and confirm the order of messages received. This will be a list of messages
# and we expect to find all of them, in LIFO order.
received = self._receive(topic_name)
received_msg1 = received[0]
received_msg2 = received[1]
received_msg3 = received[2]
self.assertEqual(expected_msg_id3, received_msg1['msg_id'])
self.assertEqual(expected_msg_id2, received_msg2['msg_id'])
self.assertEqual(expected_msg_id1, received_msg3['msg_id'])
self.assertEqual(data3, received_msg1['data'])
self.assertEqual(data2, received_msg2['data'])
self.assertEqual(data1, received_msg3['data'])
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
from unittest import main
_ = main()
# ################################################################################################################################
# ################################################################################################################################
| 7,376
|
Python
|
.py
| 130
| 49.569231
| 130
| 0.483575
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,036
|
test_publisher_ctx.py
|
zatosource_zato/code/zato-server/test/zato/pubsub/publisher/test_publisher_ctx.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from unittest import TestCase
# Zato
from zato.common.test.pubsub.publisher import PublisherTestData
from zato.common.typing_ import cast_
from zato.server.pubsub.publisher import PubCtx
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import anylistnone
from zato.server.base.parallel import ParallelServer
ParallelServer = ParallelServer
# ################################################################################################################################
# ################################################################################################################################
class PublisherCtxTestCase(TestCase):
def _get_test_ctx(self, gd_msg_list:'anylistnone', non_gd_msg_list:'anylistnone') -> 'PubCtx':
ctx = PubCtx(
cid = PublisherTestData.cid,
cluster_id = PublisherTestData.cluster_id,
pubsub = PublisherTestData.pubsub,
topic = PublisherTestData.topic,
endpoint_id = PublisherTestData.endpoint_id,
endpoint_name = PublisherTestData.endpoint_name,
subscriptions_by_topic = PublisherTestData.subscriptions_by_topic,
msg_id_list = PublisherTestData.msg_id_list,
pub_pattern_matched = PublisherTestData.pub_pattern_matched,
ext_client_id = PublisherTestData.ext_client_id,
is_first_run = PublisherTestData.is_first_run,
now = PublisherTestData.now,
is_wsx = PublisherTestData.is_wsx,
service_invoke_func = PublisherTestData.service_invoke_func,
new_session_func = PublisherTestData.new_session_func,
gd_msg_list = cast_('list', gd_msg_list),
non_gd_msg_list = cast_('list', non_gd_msg_list),
)
return ctx
# ################################################################################################################################
def test_msg_id_lists_are_none(self):
# We do not provide any list on input
gd_msg_list = None
non_gd_msg_list = None
with self.assertRaises(ValueError) as cm:
self._get_test_ctx(gd_msg_list, non_gd_msg_list)
# Extract the exception ..
exception = cm.exception
# .. and run the assertions now.
self.assertIs(type(exception), ValueError)
self.assertEqual(str(exception), 'At least one of gd_msg_list or non_gd_msg_list must be provided')
# ################################################################################################################################
def test_msg_id_gd_msg_list_is_none(self):
# One of the elements is a list, but an empty one
gd_msg_list = []
non_gd_msg_list = None
with self.assertRaises(ValueError) as cm:
self._get_test_ctx(gd_msg_list, non_gd_msg_list)
# Extract the exception ..
exception = cm.exception
# .. and run the assertions now.
self.assertIs(type(exception), ValueError)
self.assertEqual(str(exception), 'At least one of gd_msg_list or non_gd_msg_list must be provided')
# ################################################################################################################################
def test_msg_id_non_gd_msg_list_is_none(self):
# Another of the elements is a list, but an empty one
gd_msg_list = None
non_gd_msg_list = []
with self.assertRaises(ValueError) as cm:
self._get_test_ctx(gd_msg_list, non_gd_msg_list)
# Extract the exception ..
exception = cm.exception
# .. and run the assertions now.
self.assertIs(type(exception), ValueError)
self.assertEqual(str(exception), 'At least one of gd_msg_list or non_gd_msg_list must be provided')
# ################################################################################################################################
def test_msg_id_both_lists_are_empty(self):
# Both elements are lists but both are empty
gd_msg_list = []
non_gd_msg_list = []
with self.assertRaises(ValueError) as cm:
self._get_test_ctx(gd_msg_list, non_gd_msg_list)
# Extract the exception ..
exception = cm.exception
# .. and run the assertions now.
self.assertIs(type(exception), ValueError)
self.assertEqual(str(exception), 'At least one of gd_msg_list or non_gd_msg_list must be provided')
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
from unittest import main
_ = main()
# ################################################################################################################################
# ################################################################################################################################
| 5,496
|
Python
|
.py
| 96
| 49.510417
| 130
| 0.462012
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,037
|
test_publisher_message.py
|
zatosource_zato/code/zato-server/test/zato/pubsub/publisher/test_publisher_message.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from copy import deepcopy
from unittest import TestCase
# Zato
from zato.common.test.pubsub.publisher import PublisherTestData
from zato.common.api import PUBSUB
from zato.common.typing_ import cast_
from zato.common.util.time_ import utcnow_as_ms
from zato.server.pubsub.publisher import Publisher, PubRequest
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.server.base.parallel import ParallelServer
from zato.server.pubsub.publisher import PubSubMessage
ParallelServer = ParallelServer
PubSubMessage = PubSubMessage
# ################################################################################################################################
# ################################################################################################################################
class PublisherMessageTestCase(TestCase):
def get_default_request(
self,
*,
cid, # type: str
data, # type: str
mime_type, # type: str
ext_client_id, # type: str
ext_pub_time, # type: str
) -> 'PubRequest':
data = {
'data': data,
'mime_type': mime_type,
'ext_client_id': ext_client_id,
'ext_pub_time': ext_pub_time
} # type: ignore
# Correlation ID (cid) is provided via extra parameters so as to use the same mechanism that the publish service uses.
out = PubRequest._zato_from_dict(data, extra={'cid':cid})
return out
# ################################################################################################################################
def get_test_publisher(self, test_data:'type[PublisherTestData]') -> 'Publisher':
publisher = Publisher(
pubsub = test_data.pubsub,
server = cast_('ParallelServer', test_data.server),
marshal_api = test_data.server.marshal_api,
service_invoke_func = test_data.service_invoke_func,
new_session_func = test_data.new_session_func,
)
return publisher
# ################################################################################################################################
def test_get_data_prefixes(self) -> 'None':
# Make a deep copy so as not to interfere with other tests.
test_data = deepcopy(PublisherTestData)
# Make them shorted for the purposes of our test
test_data.pubsub.data_prefix_len = 7
test_data.pubsub.data_prefix_short_len = 3
publisher = self.get_test_publisher(test_data)
data = '1234567890'
data_prefix, data_prefix_short = publisher.get_data_prefixes(data)
self.assertEqual(data_prefix, '1234567')
self.assertEqual(data_prefix_short, '123')
# ################################################################################################################################
def test_build_message_simple(self) -> 'None':
# Make a deep copy so as not to interfere with other tests.
test_data = deepcopy(PublisherTestData)
# This is information about the message ..
cid = 'cid.123'
data = '{"Hello":"This is my data"}'
mime_type = 'application/json'
ext_client_id = 'my.ext.client.id.1'
ext_pub_time = '2018-10-08T09:08:20.894193'
# .. this is information about the publication ..
now = 1.0
endpoint_id = 8
topic = test_data.topic
has_no_sk_server = False
pub_pattern_matched = '/*'
subscriptions_by_topic = []
# .. generate a default message based on the message's data ..
request = self.get_default_request(
cid = cid,
data = data,
mime_type = mime_type,
ext_client_id = ext_client_id,
ext_pub_time = ext_pub_time
)
# .. build a publisher object ..
publisher = self.get_test_publisher(test_data)
# .. build a timestamp before the test for later comparisons ..
now_before_test = utcnow_as_ms()
# .. transform the message data into an actual business object ..
message = cast_('PubSubMessage', publisher.build_message(
topic, request, now, pub_pattern_matched, endpoint_id, subscriptions_by_topic, has_no_sk_server))
# .. build a timestamp after the test, also for later comparisons ..
now_after_test = utcnow_as_ms()
# .. and run the assertions now ..
self.assertEqual(message.cluster_id, test_data.cluster_id)
self.assertEqual(message.data, data)
self.assertEqual(message.data_prefix, data)
self.assertEqual(message.data_prefix_short, data)
self.assertEqual(message.delivery_count, 0)
self.assertEqual(message.delivery_status, str(PUBSUB.DELIVERY_STATUS.INITIALIZED))
self.assertEqual(message.expiration, PUBSUB.DEFAULT.LimitMessageExpiry)
self.assertEqual(message.expiration_time, 86401.0)
self.assertEqual(message.expiration_time_iso, '')
self.assertEqual(message.ext_pub_time_iso, '')
self.assertIsNone(message.group_id)
self.assertTrue(message.has_gd)
self.assertIsNone(message.in_reply_to)
self.assertFalse(message.is_in_sub_queue)
self.assertEqual(message.mime_type, mime_type)
self.assertEqual(message.position_in_group, 1)
self.assertIsNone(message.pub_correl_id)
self.assertTrue(message.pub_msg_id.startswith('zpsm'))
self.assertTrue(len(message.pub_msg_id) >= 24)
self.assertEqual(message.pub_pattern_matched, '/*')
self.assertEqual(message.pub_time, '1.0000000')
self.assertEqual(message.published_by_id, endpoint_id)
self.assertLess(now_before_test, message.recv_time)
self.assertGreater(now_after_test, message.recv_time)
self.assertEqual(message.recv_time_iso, '')
self.assertEqual(message.reply_to_sk, [])
self.assertEqual(message.server_name, '')
self.assertEqual(message.server_pid, 0)
self.assertEqual(message.size, len(data))
self.assertEqual(message.sub_key, '')
self.assertEqual(message.sub_pattern_matched, {})
self.assertEqual(message.topic_id, topic.id)
self.assertEqual(message.topic_name, topic.name)
self.assertIsNone(message.user_ctx)
self.assertEqual(message.zato_ctx, '{\n\n}')
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
from unittest import main
_ = main()
# ################################################################################################################################
# ################################################################################################################################
| 7,426
|
Python
|
.py
| 141
| 44.87234
| 130
| 0.523251
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,038
|
__init__.py
|
zatosource_zato/code/zato-server/test/zato/pubsub/publisher/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,039
|
test_service_stat_client.py
|
zatosource_zato/code/zato-server/test/zato/stats/test_service_stat_client.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from unittest import main, TestCase
# Zato
from zato.common.test import rand_int, rand_string
from zato.common.events.client import Client as EventsClient
from zato.common.events.common import EventInfo
from zato.server.connection.stats import ServiceStatsClient
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.events.common import PushCtx
PushCtx = PushCtx
# ################################################################################################################################
# ################################################################################################################################
class TestImplClass(EventsClient):
def __init__(self, host, port):
# type: (str, int) -> None
super().__init__(host, port)
self.host = host
self.port = port
self.push_counter = 0
self.is_run_called = False
self.is_connect_called = False
# ################################################################################################################################
def connect(self):
self.is_connect_called = True
# ################################################################################################################################
def run(self):
self.is_run_called = True
# ################################################################################################################################
def push(self, *args, **kwargs):
self.push_counter += 1
# ################################################################################################################################
def close(self):
pass
# ################################################################################################################################
def send(self, *args, **kwargs):
pass
# ################################################################################################################################
# ################################################################################################################################
class ServiceStatsClientTestCase(TestCase):
# ################################################################################################################################
def test_init(self):
host = rand_string()
port = rand_int()
stats_client = ServiceStatsClient(impl_class=TestImplClass)
stats_client.init(host, port)
self.assertTrue(stats_client.impl.is_connect_called)
self.assertEqual(stats_client.host, host)
self.assertEqual(stats_client.port, port)
# ################################################################################################################################
def test_run(self):
host = rand_string()
port = rand_int()
stats_client = ServiceStatsClient(impl_class=TestImplClass)
stats_client.init(host, port)
stats_client.run()
self.assertTrue(stats_client.impl.is_run_called)
# ################################################################################################################################
def test_push_id_is_given(self):
cid = rand_string()
timestamp = None
service_name = None
is_request = True
total_time_ms = None
id = rand_string(prefix='id')
stats_client = ServiceStatsClient()
stats_client.push(
cid,
timestamp,
service_name,
is_request,
total_time_ms,
id,
)
self.assertEqual(1, len(stats_client.backlog))
ctx1 = stats_client.backlog[0] # type: PushCtx
self.assertEqual(ctx1.id, id)
self.assertEqual(ctx1.cid, cid)
# ################################################################################################################################
def test_push_id_is_not_given(self):
cid = rand_string()
timestamp = None
service_name = None
is_request = True
total_time_ms = None
stats_client = ServiceStatsClient()
stats_client.push(
cid,
timestamp,
service_name,
is_request,
total_time_ms,
)
self.assertEqual(1, len(stats_client.backlog))
ctx1 = stats_client.backlog[0] # type: PushCtx
self.assertTrue(len(ctx1.id) >= 23) # The ID is built using new_cid which defaults to at least 23 characters.
self.assertEqual(ctx1.cid, cid)
# ################################################################################################################################
def test_push_is_request_true(self):
cid = None
timestamp = None
service_name = None
is_request = True
total_time_ms = None
id = None
stats_client = ServiceStatsClient()
stats_client.push(
cid,
timestamp,
service_name,
is_request,
total_time_ms,
id,
)
ctx1 = stats_client.backlog[0] # type: PushCtx
self.assertEqual(ctx1.event_type, EventInfo.EventType.service_request)
# ################################################################################################################################
def test_push_is_request_false(self):
cid = None
timestamp = None
service_name = None
is_request = False
total_time_ms = None
id = None
stats_client = ServiceStatsClient()
stats_client.push(
cid,
timestamp,
service_name,
is_request,
total_time_ms,
id,
)
ctx1 = stats_client.backlog[0] # type: PushCtx
self.assertEqual(ctx1.event_type, EventInfo.EventType.service_response)
# ################################################################################################################################
def test_push_no_impl(self):
# The client has no self.impl so it should have only enqueued the messages
# without actually invoking the implementation.
cid1 = rand_string()
timestamp1 = rand_string()
service_name1 = rand_string()
is_request1 = True
total_time_ms1 = rand_int()
id1 = rand_string(prefix='id1')
cid2 = rand_string()
timestamp2 = rand_string()
service_name2 = rand_string()
is_request2 = True
total_time_ms2 = rand_int()
id2 = rand_string(prefix='id2')
request1 = {
'cid': cid1,
'timestamp': timestamp1,
'service_name': service_name1,
'is_request': is_request1,
'total_time_ms': total_time_ms1,
'id': id1,
}
request2 = {
'cid': cid2,
'timestamp': timestamp2,
'service_name': service_name2,
'is_request': is_request2,
'total_time_ms': total_time_ms2,
'id': id2,
}
stats_client = ServiceStatsClient()
stats_client.push(**request1)
stats_client.push(**request2)
ctx1 = stats_client.backlog[0] # type: PushCtx
ctx2 = stats_client.backlog[1] # type: PushCtx
self.assertEqual(ctx1.cid, cid1)
self.assertEqual(ctx1.timestamp, timestamp1)
self.assertEqual(ctx1.object_id, service_name1)
self.assertEqual(ctx1.event_type, EventInfo.EventType.service_request)
self.assertEqual(ctx1.total_time_ms, total_time_ms1)
self.assertEqual(ctx2.cid, cid2)
self.assertEqual(ctx2.timestamp, timestamp2)
self.assertEqual(ctx2.object_id, service_name2)
self.assertEqual(ctx2.event_type, EventInfo.EventType.service_request)
self.assertEqual(ctx2.total_time_ms, total_time_ms2)
# ################################################################################################################################
def test_push_has_impl(self):
# The client has self.impl so there should be no enqueued messages. Moreover, the implementation
# should be called twice because there are two requests.
host = rand_string()
port = rand_int()
cid1 = rand_string()
timestamp1 = rand_string()
service_name1 = rand_string()
is_request1 = True
total_time_ms1 = rand_int()
id1 = rand_string(prefix='id1')
cid2 = rand_string()
timestamp2 = rand_string()
service_name2 = rand_string()
is_request2 = True
total_time_ms2 = rand_int()
id2 = rand_string(prefix='id2')
request1 = {
'cid': cid1,
'timestamp': timestamp1,
'service_name': service_name1,
'is_request': is_request1,
'total_time_ms': total_time_ms1,
'id': id1,
}
request2 = {
'cid': cid2,
'timestamp': timestamp2,
'service_name': service_name2,
'is_request': is_request2,
'total_time_ms': total_time_ms2,
'id': id2,
}
stats_client = ServiceStatsClient(impl_class=TestImplClass)
stats_client.init(host, port)
stats_client.push(**request1)
stats_client.push(**request2)
self.assertEqual(len(stats_client.backlog), 0)
self.assertEqual(stats_client.impl.push_counter, 2)
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
| 10,265
|
Python
|
.py
| 232
| 35.655172
| 130
| 0.430567
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,040
|
__init__.py
|
zatosource_zato/code/zato-server/test/zato/stats/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,041
|
test_database.py
|
zatosource_zato/code/zato-server/test/zato/stats/test_database.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
import os
from datetime import datetime
from tempfile import gettempdir
from time import sleep
from unittest import main, TestCase
# dateutil
from dateutil.rrule import SECONDLY, rrule
# Zato
from zato.common.api import Stats
from zato.common.events.common import EventInfo, PushCtx
from zato.common.test import rand_int, rand_string
from zato.common.typing_ import asdict, instance_from_dict
from zato.server.connection.connector.subprocess_.impl.events.database import EventsDatabase, OpCode
# ################################################################################################################################
# ################################################################################################################################
if 0:
from pandas import DataFrame
from pandas.core.groupby.generic import SeriesGroupBy
DataFrame = DataFrame
SeriesGroupBy = SeriesGroupBy
# ################################################################################################################################
# ################################################################################################################################
log_format = '%(asctime)s - %(levelname)s - %(process)d:%(threadName)s - %(name)s:%(lineno)d - %(message)s'
logging.basicConfig(level=logging.WARN, format=log_format)
zato_logger = logging.getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
utcnow = datetime.utcnow
# ################################################################################################################################
# ################################################################################################################################
class Default:
LenEvents = 4
LenServices = 3
IterMultiplier = 11
SyncThreshold = 100_000_000
SyncInterval = 100_000_000
# ################################################################################################################################
# ################################################################################################################################
class ScenarioConfig:
TimestampFormat = '%Y-%m-%d %H:%M:%S'
RawStart = '2056-01-02 03:04:00'
RawEnd = '2056-01-02 03:05:59'
# ################################################################################################################################
# ################################################################################################################################
class EventsDatabaseTestCase(TestCase):
# ################################################################################################################################
def yield_scenario_events(self, len_events=None, len_services=None, iter_multiplier=None, events_multiplier=1):
# This method returns a list of events forming a scenario, with various events
# belonging to various time buckets. This is unlike yield_raw_events which returns events
# as they happen, one by one.
#
# Our scenario covers two minutes, as configured via ScenarioConfig.
#
# For each second within that timeframe we generate len_events for each of the services.
# How many services there are is configured via len_services.
#
start = datetime.strptime(ScenarioConfig.RawStart, ScenarioConfig.TimestampFormat)
end = datetime.strptime(ScenarioConfig.RawEnd, ScenarioConfig.TimestampFormat)
len_events = len_events or Default.LenEvents
len_services = len_services or Default.LenServices
iter_multiplier = iter_multiplier or Default.IterMultiplier
for time_bucket in rrule(SECONDLY, dtstart=start, until=end):
for service_idx in range(1, len_services+1):
for event_idx in range(1, len_events+1):
yield {
'timestamp': time_bucket,
'object_id': 'service-{}'.format(service_idx),
'total_time_ms': service_idx * event_idx * iter_multiplier,
}
# ################################################################################################################################
def yield_scenario_aggr_data(self):
pass
# ################################################################################################################################
def yield_raw_events(self, len_events=None, len_services=None, iter_multiplier=None, events_multiplier=1):
# This method returns a list of raw events, simply as if they were taking
# place in the system, one by one. This is unlike yield_scenario_events
# which returns events broken down into specific time buckets, forming a scenario.
len_events = len_events or Default.LenEvents
len_services = len_services or Default.LenServices
iter_multiplier = iter_multiplier or Default.IterMultiplier
for service_idx in range(1, len_services+1):
service_idx_str = str(service_idx)
service_name = 'service-{}'.format(service_idx)
for event_idx in range(1, len_events+1):
id = 'id-{}{}'.format(service_idx_str, event_idx)
cid = 'cid-{}{}'.format(service_idx_str, event_idx)
ctx = PushCtx()
ctx.id = id
ctx.cid = cid
ctx.timestamp = utcnow().isoformat()
ctx.event_type = EventInfo.EventType.service_response
ctx.object_type = EventInfo.ObjectType.service
ctx.object_id = service_name
ctx.total_time_ms = service_idx * event_idx * iter_multiplier
# We are adding a short pause to be better able to observe
# that each context object has a different timestamp assigned.
sleep(0.005)
yield asdict(ctx)
# ################################################################################################################################
def get_random_fs_data_path(self):
file_name = 'zato-test-events-db-' + rand_string()
temp_dir = gettempdir()
fs_data_path = os.path.join(temp_dir, file_name)
return fs_data_path
# ################################################################################################################################
def get_events_db(self, logger=None, fs_data_path=None, sync_threshold=None, sync_interval=None, max_retention=None):
logger = logger or zato_logger
fs_data_path = fs_data_path or os.path.join(gettempdir(), rand_string(prefix='fs_data_path'))
sync_threshold = sync_threshold or Default.SyncThreshold
sync_interval = sync_interval or Default.SyncInterval
max_retention = max_retention or Stats.MaxRetention
return EventsDatabase(logger, fs_data_path, sync_threshold, sync_interval, max_retention)
# ################################################################################################################################
def xtest_init(self):
sync_threshold = rand_int()
sync_interval = rand_int()
events_db = self.get_events_db(sync_threshold=sync_threshold, sync_interval=sync_interval)
self.assertEqual(events_db.sync_threshold, sync_threshold)
self.assertEqual(events_db.sync_interval, sync_interval)
# ################################################################################################################################
def xtest_modify_state_push(self):
total_events = Default.LenEvents * Default.LenServices
start = utcnow().isoformat()
events_db = self.get_events_db()
for event_data in self.yield_raw_events():
events_db.access_state(OpCode.Push, event_data)
self.assertEqual(len(events_db.in_ram_store), total_events)
self.assertEqual(events_db.num_events_since_sync, total_events)
self.assertEqual(events_db.total_events, total_events)
ctx_list = []
for item in events_db.in_ram_store:
ctx = instance_from_dict(PushCtx, item)
ctx_list.append(ctx)
self.assertEqual(len(ctx_list), total_events)
self.assertEqual(events_db.telemetry[OpCode.Internal.GetFromRAM], 0)
self.assertEqual(events_db.telemetry[OpCode.Internal.CreateNewDF], 0)
self.assertEqual(events_db.telemetry[OpCode.Internal.ReadParqet], 0)
ctx1 = ctx_list[0] # type: PushCtx
ctx2 = ctx_list[1] # type: PushCtx
ctx3 = ctx_list[2] # type: PushCtx
ctx4 = ctx_list[3] # type: PushCtx
ctx5 = ctx_list[4] # type: PushCtx
ctx6 = ctx_list[5] # type: PushCtx
ctx7 = ctx_list[6] # type: PushCtx
ctx8 = ctx_list[7] # type: PushCtx
ctx9 = ctx_list[8] # type: PushCtx
ctx10 = ctx_list[9] # type: PushCtx
ctx11 = ctx_list[10] # type: PushCtx
ctx12 = ctx_list[11] # type: PushCtx
#
# ctx.id
#
self.assertEqual(ctx1.id, 'id-11')
self.assertEqual(ctx2.id, 'id-12')
self.assertEqual(ctx3.id, 'id-13')
self.assertEqual(ctx4.id, 'id-14')
self.assertEqual(ctx5.id, 'id-21')
self.assertEqual(ctx6.id, 'id-22')
self.assertEqual(ctx7.id, 'id-23')
self.assertEqual(ctx8.id, 'id-24')
self.assertEqual(ctx9.id, 'id-31')
self.assertEqual(ctx10.id, 'id-32')
self.assertEqual(ctx11.id, 'id-33')
self.assertEqual(ctx12.id, 'id-34')
#
# ctx.cid
#
self.assertEqual(ctx1.cid, 'cid-11')
self.assertEqual(ctx2.cid, 'cid-12')
self.assertEqual(ctx3.cid, 'cid-13')
self.assertEqual(ctx4.cid, 'cid-14')
self.assertEqual(ctx5.cid, 'cid-21')
self.assertEqual(ctx6.cid, 'cid-22')
self.assertEqual(ctx7.cid, 'cid-23')
self.assertEqual(ctx8.cid, 'cid-24')
self.assertEqual(ctx9.cid, 'cid-31')
self.assertEqual(ctx10.cid, 'cid-32')
self.assertEqual(ctx11.cid, 'cid-33')
self.assertEqual(ctx12.cid, 'cid-34')
#
# ctx.timestamp
#
self.assertGreater(ctx1.timestamp, start)
self.assertGreater(ctx2.timestamp, ctx1.timestamp)
self.assertGreater(ctx3.timestamp, ctx2.timestamp)
self.assertGreater(ctx4.timestamp, ctx3.timestamp)
self.assertGreater(ctx5.timestamp, ctx4.timestamp)
self.assertGreater(ctx6.timestamp, ctx5.timestamp)
self.assertGreater(ctx7.timestamp, ctx6.timestamp)
self.assertGreater(ctx8.timestamp, ctx7.timestamp)
self.assertGreater(ctx9.timestamp, ctx8.timestamp)
self.assertGreater(ctx10.timestamp, ctx9.timestamp)
self.assertGreater(ctx11.timestamp, ctx10.timestamp)
self.assertGreater(ctx12.timestamp, ctx11.timestamp)
#
# ctx.event_type
#
self.assertEqual(ctx1.event_type, EventInfo.EventType.service_response)
self.assertEqual(ctx2.event_type, EventInfo.EventType.service_response)
self.assertEqual(ctx3.event_type, EventInfo.EventType.service_response)
self.assertEqual(ctx4.event_type, EventInfo.EventType.service_response)
self.assertEqual(ctx5.event_type, EventInfo.EventType.service_response)
self.assertEqual(ctx6.event_type, EventInfo.EventType.service_response)
self.assertEqual(ctx7.event_type, EventInfo.EventType.service_response)
self.assertEqual(ctx8.event_type, EventInfo.EventType.service_response)
self.assertEqual(ctx9.event_type, EventInfo.EventType.service_response)
self.assertEqual(ctx10.event_type, EventInfo.EventType.service_response)
self.assertEqual(ctx11.event_type, EventInfo.EventType.service_response)
self.assertEqual(ctx12.event_type, EventInfo.EventType.service_response)
#
# ctx.object_type
#
self.assertEqual(ctx1.object_type, EventInfo.ObjectType.service)
self.assertEqual(ctx2.object_type, EventInfo.ObjectType.service)
self.assertEqual(ctx3.object_type, EventInfo.ObjectType.service)
self.assertEqual(ctx4.object_type, EventInfo.ObjectType.service)
self.assertEqual(ctx5.object_type, EventInfo.ObjectType.service)
self.assertEqual(ctx6.object_type, EventInfo.ObjectType.service)
self.assertEqual(ctx7.object_type, EventInfo.ObjectType.service)
self.assertEqual(ctx8.object_type, EventInfo.ObjectType.service)
self.assertEqual(ctx9.object_type, EventInfo.ObjectType.service)
self.assertEqual(ctx10.object_type, EventInfo.ObjectType.service)
self.assertEqual(ctx11.object_type, EventInfo.ObjectType.service)
self.assertEqual(ctx12.object_type, EventInfo.ObjectType.service)
#
# ctx.object_id
#
self.assertEqual(ctx1.object_id, 'service-1')
self.assertEqual(ctx2.object_id, 'service-1')
self.assertEqual(ctx3.object_id, 'service-1')
self.assertEqual(ctx4.object_id, 'service-1')
self.assertEqual(ctx5.object_id, 'service-2')
self.assertEqual(ctx6.object_id, 'service-2')
self.assertEqual(ctx7.object_id, 'service-2')
self.assertEqual(ctx8.object_id, 'service-2')
self.assertEqual(ctx9.object_id, 'service-3')
self.assertEqual(ctx10.object_id, 'service-3')
self.assertEqual(ctx11.object_id, 'service-3')
self.assertEqual(ctx12.object_id, 'service-3')
#
# ctx.total_time_ms
#
self.assertEqual(ctx1.total_time_ms, 11)
self.assertEqual(ctx2.total_time_ms, 22)
self.assertEqual(ctx3.total_time_ms, 33)
self.assertEqual(ctx4.total_time_ms, 44)
self.assertEqual(ctx5.total_time_ms, 22)
self.assertEqual(ctx6.total_time_ms, 44)
self.assertEqual(ctx7.total_time_ms, 66)
self.assertEqual(ctx8.total_time_ms, 88)
self.assertEqual(ctx9.total_time_ms, 33)
self.assertEqual(ctx10.total_time_ms, 66)
self.assertEqual(ctx11.total_time_ms, 99)
self.assertEqual(ctx12.total_time_ms, 132)
# ################################################################################################################################
def xtest_get_data_from_ram(self):
start = utcnow().isoformat()
events_db = self.get_events_db()
for event_data in self.yield_raw_events():
events_db.access_state(OpCode.Push, event_data)
data = events_db.get_data_from_ram()
self.assertEqual(events_db.telemetry[OpCode.Internal.GetFromRAM], 1)
self.assertEqual(events_db.telemetry[OpCode.Internal.CreateNewDF], 0)
self.assertEqual(events_db.telemetry[OpCode.Internal.ReadParqet], 0)
#
# data.id
#
data_id = data['id']
self.assertEqual(data_id[0], 'id-11')
self.assertEqual(data_id[1], 'id-12')
self.assertEqual(data_id[2], 'id-13')
self.assertEqual(data_id[3], 'id-14')
self.assertEqual(data_id[4], 'id-21')
self.assertEqual(data_id[5], 'id-22')
self.assertEqual(data_id[6], 'id-23')
self.assertEqual(data_id[7], 'id-24')
self.assertEqual(data_id[8], 'id-31')
self.assertEqual(data_id[9], 'id-32')
self.assertEqual(data_id[10], 'id-33')
self.assertEqual(data_id[11], 'id-34')
#
# data.cid
#
data_cid = data['cid']
self.assertEqual(data_cid[0], 'cid-11')
self.assertEqual(data_cid[1], 'cid-12')
self.assertEqual(data_cid[2], 'cid-13')
self.assertEqual(data_cid[3], 'cid-14')
self.assertEqual(data_cid[4], 'cid-21')
self.assertEqual(data_cid[5], 'cid-22')
self.assertEqual(data_cid[6], 'cid-23')
self.assertEqual(data_cid[7], 'cid-24')
self.assertEqual(data_cid[8], 'cid-31')
self.assertEqual(data_cid[9], 'cid-32')
self.assertEqual(data_cid[10], 'cid-33')
self.assertEqual(data_cid[11], 'cid-34')
#
# data.timestamp
#
data_timestamp = data['timestamp']
self.assertGreater(data_timestamp[0], start)
self.assertGreater(data_timestamp[1], data_timestamp[0])
self.assertGreater(data_timestamp[2], data_timestamp[1])
self.assertGreater(data_timestamp[3], data_timestamp[2])
self.assertGreater(data_timestamp[4], data_timestamp[3])
self.assertGreater(data_timestamp[5], data_timestamp[4])
self.assertGreater(data_timestamp[6], data_timestamp[5])
self.assertGreater(data_timestamp[7], data_timestamp[6])
self.assertGreater(data_timestamp[8], data_timestamp[7])
self.assertGreater(data_timestamp[9], data_timestamp[8])
self.assertGreater(data_timestamp[10], data_timestamp[9])
self.assertGreater(data_timestamp[11], data_timestamp[10])
#
# data.event_type
#
data_event_type = data['event_type']
self.assertEqual(data_event_type[0], EventInfo.EventType.service_response)
self.assertEqual(data_event_type[1], EventInfo.EventType.service_response)
self.assertEqual(data_event_type[2], EventInfo.EventType.service_response)
self.assertEqual(data_event_type[3], EventInfo.EventType.service_response)
self.assertEqual(data_event_type[4], EventInfo.EventType.service_response)
self.assertEqual(data_event_type[5], EventInfo.EventType.service_response)
self.assertEqual(data_event_type[6], EventInfo.EventType.service_response)
self.assertEqual(data_event_type[7], EventInfo.EventType.service_response)
self.assertEqual(data_event_type[8], EventInfo.EventType.service_response)
self.assertEqual(data_event_type[9], EventInfo.EventType.service_response)
self.assertEqual(data_event_type[10], EventInfo.EventType.service_response)
self.assertEqual(data_event_type[11], EventInfo.EventType.service_response)
#
# ctx.object_type
#
data_object_type = data['object_type']
self.assertEqual(data_object_type[0], EventInfo.ObjectType.service)
self.assertEqual(data_object_type[1], EventInfo.ObjectType.service)
self.assertEqual(data_object_type[2], EventInfo.ObjectType.service)
self.assertEqual(data_object_type[3], EventInfo.ObjectType.service)
self.assertEqual(data_object_type[4], EventInfo.ObjectType.service)
self.assertEqual(data_object_type[5], EventInfo.ObjectType.service)
self.assertEqual(data_object_type[6], EventInfo.ObjectType.service)
self.assertEqual(data_object_type[7], EventInfo.ObjectType.service)
self.assertEqual(data_object_type[8], EventInfo.ObjectType.service)
self.assertEqual(data_object_type[9], EventInfo.ObjectType.service)
self.assertEqual(data_object_type[10], EventInfo.ObjectType.service)
self.assertEqual(data_object_type[11], EventInfo.ObjectType.service)
#
# ctx.object_id
#
data_object_id = data['object_id']
self.assertEqual(data_object_id[0], 'service-1')
self.assertEqual(data_object_id[1], 'service-1')
self.assertEqual(data_object_id[2], 'service-1')
self.assertEqual(data_object_id[3], 'service-1')
self.assertEqual(data_object_id[4], 'service-2')
self.assertEqual(data_object_id[5], 'service-2')
self.assertEqual(data_object_id[6], 'service-2')
self.assertEqual(data_object_id[7], 'service-2')
self.assertEqual(data_object_id[8], 'service-3')
self.assertEqual(data_object_id[9], 'service-3')
self.assertEqual(data_object_id[10], 'service-3')
self.assertEqual(data_object_id[11], 'service-3')
#
# ctx.total_time_ms
#
data_total_time_ms = data['total_time_ms']
self.assertEqual(data_total_time_ms[0], 11)
self.assertEqual(data_total_time_ms[1], 22)
self.assertEqual(data_total_time_ms[2], 33)
self.assertEqual(data_total_time_ms[3], 44)
self.assertEqual(data_total_time_ms[4], 22)
self.assertEqual(data_total_time_ms[5], 44)
self.assertEqual(data_total_time_ms[6], 66)
self.assertEqual(data_total_time_ms[7], 88)
self.assertEqual(data_total_time_ms[8], 33)
self.assertEqual(data_total_time_ms[9], 66)
self.assertEqual(data_total_time_ms[10], 99)
self.assertEqual(data_total_time_ms[11], 132)
# ################################################################################################################################
def xtest_get_data_from_storage_path_does_not_exist(self):
# Be explicit about the fact that we are using a random path, one that does not exist
fs_data_path = rand_string()
# Create a new instance
events_db = self.get_events_db(fs_data_path=fs_data_path)
# This should return an empty DataFrame because the path did not exist
data = events_db.load_data_from_storage() # type: DataFrame
self.assertEqual(len(data), 0)
self.assertEqual(events_db.telemetry[OpCode.Internal.GetFromRAM], 0)
self.assertEqual(events_db.telemetry[OpCode.Internal.CreateNewDF], 1)
self.assertEqual(events_db.telemetry[OpCode.Internal.ReadParqet], 0)
# ################################################################################################################################
def xtest_get_data_from_storage_path_exists(self):
# Pandas
import pandas as pd
# This is where we keep Parquet data
fs_data_path = self.get_random_fs_data_path()
# Obtain test data
test_data = list(self.yield_raw_events())
# Turn it into a DataFrame
data_frame = pd.DataFrame(test_data)
# Save it as as a Parquet file
data_frame.to_parquet(fs_data_path)
# Create a new DB instance
events_db = self.get_events_db(fs_data_path=fs_data_path)
# This should return an empty DataFrame because the path did not exist
data = events_db.load_data_from_storage() # type: DataFrame
self.assertEqual(len(data), len(test_data))
self.assertEqual(events_db.telemetry[OpCode.Internal.GetFromRAM], 0)
self.assertEqual(events_db.telemetry[OpCode.Internal.CreateNewDF], 0)
self.assertEqual(events_db.telemetry[OpCode.Internal.ReadParqet], 1)
# ################################################################################################################################
def xtest_sync_state(self):
# Pandas
import pandas as pd
# This is where we keep Parquet data
fs_data_path = self.get_random_fs_data_path()
# Obtain test data
test_data = list(self.yield_raw_events())
# Turn it into a DataFrame
data_frame = pd.DataFrame(test_data)
# Save it as as a Parquet file
data_frame.to_parquet(fs_data_path)
# Create a new test DB instance ..
events_db = self.get_events_db(fs_data_path=fs_data_path)
# Push data to RAM ..
for event_data in self.yield_raw_events():
events_db.access_state(OpCode.Push, event_data)
# At this point, we should have data on disk and in RAM
# and syncing should push data from RAM to disk.
events_db.sync_state()
# This should data from what was previously in RAM combined with what was on disk
data = events_db.load_data_from_storage() # type: DataFrame
# The length should be equal to twice the defaults - it is twice
# because we generated test data two times, once for Parquet and once when it was added to RAM
self.assertTrue(len(data), 2 * Default.LenEvents * Default.LenServices)
self.assertEqual(events_db.telemetry[OpCode.Internal.GetFromRAM], 1)
self.assertEqual(events_db.telemetry[OpCode.Internal.CreateNewDF], 0)
self.assertEqual(events_db.telemetry[OpCode.Internal.ReadParqet], 2)
self.assertEqual(events_db.telemetry[OpCode.Internal.CombineData], 1)
self.assertEqual(events_db.telemetry[OpCode.Internal.SaveData], 1)
self.assertEqual(events_db.telemetry[OpCode.Internal.SyncState], 1)
# ################################################################################################################################
def xtest_sync_threshold(self):
num_iters = 3
sync_threshold = 1
events_db = self.get_events_db(sync_threshold=sync_threshold)
for _x in range(num_iters):
events_db.access_state(OpCode.Push, {'timestamp':'unused'})
# This is 0 because we were syncing state after each modification
self.assertEqual(events_db.num_events_since_sync, 0)
self.assertEqual(events_db.telemetry[OpCode.Internal.SaveData], 3)
self.assertEqual(events_db.telemetry[OpCode.Internal.SyncState], 3)
self.assertEqual(events_db.telemetry[OpCode.Internal.GetFromRAM], 3)
self.assertEqual(events_db.telemetry[OpCode.Internal.ReadParqet], 2)
self.assertEqual(events_db.telemetry[OpCode.Internal.CreateNewDF], 1)
self.assertEqual(events_db.telemetry[OpCode.Internal.CombineData], 3)
# ################################################################################################################################
def xtest_sync_interval(self):
num_iters = 3
sync_interval = 0.001
events_db = self.get_events_db(sync_interval=sync_interval)
for _x in range(num_iters):
events_db.access_state(OpCode.Push, {'timestamp':'unused'})
sleep(sync_interval * 5)
# This is 0 because we were syncing state after each modification
self.assertEqual(events_db.num_events_since_sync, 0)
self.assertEqual(events_db.telemetry[OpCode.Internal.SaveData], 3)
self.assertEqual(events_db.telemetry[OpCode.Internal.SyncState], 3)
self.assertEqual(events_db.telemetry[OpCode.Internal.GetFromRAM], 3)
self.assertEqual(events_db.telemetry[OpCode.Internal.ReadParqet], 2)
self.assertEqual(events_db.telemetry[OpCode.Internal.CreateNewDF], 1)
self.assertEqual(events_db.telemetry[OpCode.Internal.CombineData], 3)
# ################################################################################################################################
def xtest_max_retention(self):
# Synchronise after each push
sync_threshold=1
# This is in milliseconds
max_retention = 200
max_retension_sec = max_retention / 1000.0
sleep_time = max_retension_sec + (max_retension_sec * 0.1)
# This is where we keep Parquet data
fs_data_path = self.get_random_fs_data_path()
# Create a new DB instance upfront
events_db = self.get_events_db(fs_data_path=fs_data_path, sync_threshold=sync_threshold, max_retention=max_retention)
# Get events ..
event_data_list = list(self.yield_raw_events(len_events=3, len_services=1))
event_data1 = event_data_list[0] # type: PushCtx
event_data2 = event_data_list[1] # type: PushCtx
event_data3 = event_data_list[2] # type: PushCtx
# First call, set its timestamp and push the event
event_data1['timestamp'] = utcnow().isoformat()
events_db.access_state(OpCode.Push, event_data1)
# Sleep longer than retention time
sleep(sleep_time)
# Second call, set its timestamp too and push the event
event_data2['timestamp'] = utcnow().isoformat()
# Again, longer than retentiom time
sleep(sleep_time)
# The last call - there is no sleep afterwards, only push, which, given that the retention time is big enough,
# means that it should be the only event left around in the storage.
# Note that we assume that our max_retention will be enough for this push to succeed.
event_data3['timestamp'] = utcnow().isoformat()
events_db.access_state(OpCode.Push, event_data3)
# Read the state from persistent storage ..
data = events_db.load_data_from_storage()
# .. only the last push should be available ..
self.assertEqual(len(data), 1)
# .. convert to a form that is easier to test ..
data = data.transpose()
data = data[0]
# .. run all the remaining assertions now.
self.assertEqual(data['id'], event_data3['id'])
self.assertEqual(data['cid'], event_data3['cid'])
self.assertEqual(data['timestamp'], event_data3['timestamp'])
self.assertEqual(data['event_type'], event_data3['event_type'])
self.assertEqual(data['object_type'], event_data3['object_type'])
self.assertEqual(data['object_id'], event_data3['object_id'])
self.assertEqual(data['total_time_ms'], event_data3['total_time_ms'])
self.assertIs(data['source_type'], event_data3['source_type'])
self.assertIs(data['source_id'], event_data3['source_id'])
self.assertIs(data['recipient_type'], event_data3['recipient_type'])
self.assertIs(data['recipient_id'], event_data3['recipient_id'])
# ################################################################################################################################
def xtest_aggregate(self):
# Pandas
import pandas as pd
# Generate test events ..
data = self.yield_scenario_events()
data = pd.DataFrame(data)
# .. create a new DB instance ..
events_db = self.get_events_db()
# .. aggregate test events ..
aggregated = events_db.aggregate(data)
# .. convert it to a dict to make it easier to construct assertions ..
aggregated = aggregated.to_dict()
# .. create helper objects ..
item_max = aggregated['item_max']
item_min = aggregated['item_min']
item_total_time = aggregated['item_total_time']
item_mean = aggregated['item_mean']
item_max = sorted(item_max.items())
item_min = sorted(item_min.items())
item_total_time = sorted(item_total_time.items())
item_mean = sorted(item_mean.items())
item_max0 = item_max[0]
# item_max1 = item_max[1]
# item_max2 = item_max[2]
# item_max3 = item_max[3]
# item_max4 = item_max[4]
item_max5 = item_max[5]
# item_min0 = item_min[0]
# item_min1 = item_min[1]
# item_min2 = item_min[2]
# item_min3 = item_min[3]
# item_min4 = item_min[4]
# item_min5 = item_min[5]
# item_total_time0 = item_total_time[0]
# item_total_time1 = item_total_time[1]
# item_total_time2 = item_total_time[2]
# item_total_time3 = item_total_time[3]
# item_total_time4 = item_total_time[4]
# item_total_time5 = item_total_time[5]
# item_mean0 = item_mean[0]
# item_mean1 = item_mean[1]
# item_mean2 = item_mean[2]
# item_mean3 = item_mean[3]
# item_mean4 = item_mean[4]
# item_mean5 = item_mean[5]
# .. and run the asssertions now - note that currently we are only checking only item_max and only
# the very first and last row ..
#
# item_max -> 0
#
timestamp = item_max0[0][0] # type: pd.Timestamp
object_id = item_max0[0][1] # type: str
value = item_max0[1] # type: int
self.assertEqual(timestamp.year, 2056)
self.assertEqual(timestamp.month, 1)
self.assertEqual(timestamp.day, 2)
self.assertEqual(timestamp.hour, 3)
self.assertEqual(timestamp.minute, 0)
self.assertEqual(timestamp.second, 0)
self.assertEqual(object_id, 'service-1')
self.assertEqual(value, 44)
#
# item_max -> 5
#
timestamp = item_max5[0][0] # type: pd.Timestamp
object_id = item_max5[0][1] # type: str
value = item_max5[1] # type: int
self.assertEqual(timestamp.year, 2056)
self.assertEqual(timestamp.month, 1)
self.assertEqual(timestamp.day, 2)
self.assertEqual(timestamp.hour, 3)
self.assertEqual(timestamp.minute, 5)
self.assertEqual(timestamp.second, 0)
self.assertEqual(object_id, 'service-3')
self.assertEqual(value, 132)
# ################################################################################################################################
def xtest_tabulate(self):
# .. create a new DB instance ..
events_db = self.get_events_db()
# .. push test events ..
for event_data in self.yield_scenario_events():
events_db.access_state(OpCode.Push, event_data)
# .. save to the file system ..
events_db.sync_state()
# .. tabulate test events ..
tabulated = events_db.get_table()
# .. convert it to a dict to make it easier to construct assertions ..
tabulated = tabulated.to_dict()
# .. create helper objects ..
service1 = tabulated['service-1']
service2 = tabulated['service-2']
service3 = tabulated['service-3']
# .. and run the asssertions now.
self.assertEqual(service1['item_min'], 11.0)
self.assertEqual(service1['item_max'], 44.0)
self.assertEqual(service1['item_mean'], 27.5)
self.assertEqual(service1['item_total_time'], 13_200)
self.assertEqual(service1['item_total_usage'], 480.0)
self.assertEqual(service2['item_min'], 22.0)
self.assertEqual(service2['item_max'], 88.0)
self.assertEqual(service2['item_mean'], 55.0)
self.assertEqual(service2['item_total_time'], 26_400)
self.assertEqual(service2['item_total_usage'], 480.0)
self.assertEqual(service3['item_min'], 33.0)
self.assertEqual(service3['item_max'], 132.0)
self.assertEqual(service3['item_mean'], 82.5)
self.assertEqual(service3['item_total_time'], 39_600)
self.assertEqual(service3['item_total_usage'], 480.0)
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
| 35,346
|
Python
|
.py
| 640
| 46.403125
| 130
| 0.576608
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,042
|
test_invoke_dataclasses.py
|
zatosource_zato/code/zato-server/test/zato/rest/test_invoke_dataclasses.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from unittest import main
# Zato
from zato.common.test.rest_client import RESTClientTestCase
# ################################################################################################################################
# ################################################################################################################################
class InvocationTestCase(RESTClientTestCase):
needs_bunch = False
needs_current_app = False
payload_only_messages = False
# ################################################################################################################################
def setUp(self) -> None:
super().setUp()
self.rest_client.init()
# ################################################################################################################################
def test_invoke_helpers_api_spec_user(self) -> 'None':
# Test data
username = 'my.username'
# Prepare our request ..
request = {
'username': username
}
# .. invoke the helper service ..
response = self.get('/zato/api/invoke/helpers.api-spec.user', request)
# .. and check the response.
user = response['user']
parent_user = response['parent_user']
previous_user = response['previous_user']
self.assertListEqual(user, [
{'user_id': 222, 'username': 'username.222', 'display_name': 'display_name.222.' + username},
{'user_id': 111, 'username': 'username.111', 'display_name': 'display_name.111.' + username}
])
self.assertListEqual(parent_user, [])
self.assertListEqual(previous_user, [])
# ################################################################################################################################
def test_invoke_helpers_api_account_list_with_user_id(self) -> 'None':
# Test data
user_id = 999
account_id = 5555
# Prepare our request ..
request = {
'user_id': user_id,
'account_id': account_id,
}
# .. invoke the helper service ..
response = self.get('/zato/api/invoke/helpers.api-spec.account-list', request)
# .. and check the response.
user_account_list = response['user_account_list']
account1 = user_account_list[0]
account2 = user_account_list[1]
self.assertDictEqual(account1, {
'user': {'user_id': 222, 'username': 'username.222', 'display_name': 'display_name.222.999'},
'account_id': 7575,
'account_type': 2222
})
self.assertDictEqual(account2, {
'user': {'user_id': 111, 'username': 'username.111', 'display_name': 'display_name.111.999'},
'account_id': 6565,
'account_type': 1111
})
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
# ################################################################################################################################
| 3,614
|
Python
|
.py
| 71
| 43.535211
| 130
| 0.391465
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,043
|
test_invoke_service.py
|
zatosource_zato/code/zato-server/test/zato/rest/test_invoke_service.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from unittest import main
# Zato
from zato.common.api import ZATO_OK
from zato.common.test.rest_client import RESTClientTestCase
# ################################################################################################################################
# ################################################################################################################################
class PingTestCase(RESTClientTestCase):
needs_bunch = False
needs_current_app = False
payload_only_messages = False
def test_invoke_ping(self) -> 'None':
# Invoke the default ping service ..
response = self.get('/zato/ping')
# .. and check all the detail.
self.assertEqual(response['pong'], 'zato')
self.assertEqual(response['zato_env']['result'], ZATO_OK)
self.assertEqual(response['zato_env']['details'], '')
len_cid = len(response['zato_env']['cid'])
self.assertTrue(len_cid >= 23) # We cannot be certain but it should be at least 23 characters of random data
# ################################################################################################################################
# ################################################################################################################################
class APIInvokeTestCase(RESTClientTestCase):
needs_bunch = False
needs_current_app = False
payload_only_messages = False
def setUp(self) -> None:
super().setUp()
self.rest_client.init()
def test_api_invoke(self):
response = self.rest_client.api_invoke('pub.zato.ping')
self.assertDictEqual({'pong':'zato'}, response)
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
# ################################################################################################################################
| 2,440
|
Python
|
.py
| 43
| 52.139535
| 130
| 0.368797
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,044
|
__init__.py
|
zatosource_zato/code/zato-server/test/zato/hl7/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,045
|
__init__.py
|
zatosource_zato/code/zato-server/test/zato/hl7/fhir_/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,046
|
test_hl7_fhir_read.py
|
zatosource_zato/code/zato-server/test/zato/hl7/fhir_/test_hl7_fhir_read.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Monkey-patch first
from gevent import monkey
monkey.patch_all()
# stdlib
import logging
import os
from time import sleep
from unittest import main, TestCase
# Bunch
from bunch import bunchify
# Zato
from zato.common.test import TestCluster, TestParallelServer
from zato.common.typing_ import cast_
from zato.server.generic.api.outconn_hl7_fhir import OutconnHL7FHIRWrapper
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.odb.api import ODBManager
ODBManager = ODBManager
# ################################################################################################################################
# ################################################################################################################################
log_level = logging.DEBUG
log_format = '%(asctime)s - %(levelname)s - %(process)d:%(threadName)s - %(name)s:%(lineno)d - %(message)s'
logging.basicConfig(level=log_level, format=log_format)
# ################################################################################################################################
# ################################################################################################################################
class ModuleCtx:
Env_Key_FHIR_ID = 'Zato_Test_FHIR_ID'
Env_Key_FHIR_Meta_Last_Updated = 'Zato_Test_FHIR_Meta_Last_Updated'
Env_Key_FHIR_Username = 'Zato_Test_FHIR_Username'
Env_Key_FHIR_Password = 'Zato_Test_FHIR_Password'
Env_Key_FHIR_Address = 'Zato_Test_FHIR_Address'
Env_Key_FHIR_Person_Given_Name1 = 'Zato_Test_FHIR_Person_Given_Name1'
Env_Key_FHIR_Person_Given_Name2 = 'Zato_Test_FHIR_Person_Given_Name2'
Env_Key_FHIR_Person_Family_Name = 'Zato_Test_FHIR_Person_Family_Name'
Queue_Build_Cap = 0.5
# ################################################################################################################################
# ################################################################################################################################
class HL7FHIRReadTestCase(TestCase):
def test_hl7_fhir_read(self):
# Try to get the main environment variable ..
username = os.environ.get(ModuleCtx.Env_Key_FHIR_Username)
# .. and if it does not exist, do not run the test
if not username:
return
# If we are here, it means that we can proceed with the test
# Get the rest of the configuration
password = os.environ.get(ModuleCtx.Env_Key_FHIR_Password) or 'Missing_Env_Key_FHIR_Password'
address = os.environ.get(ModuleCtx.Env_Key_FHIR_Address) or 'Missing_Env_Key_FHIR_Password'
resource_id = os.environ.get(ModuleCtx.Env_Key_FHIR_ID) or 'Missing_Env_Key_FHIR_ID'
meta_last_updated = os.environ.get(ModuleCtx.Env_Key_FHIR_Meta_Last_Updated) or 'Missing_Env_Key_FHIR_Meta_Last_Updated'
given_name1 = os.environ.get(ModuleCtx.Env_Key_FHIR_Person_Given_Name1) or 'Missing_Env_Key_FHIR_Person_Given_Name1'
given_name2 = os.environ.get(ModuleCtx.Env_Key_FHIR_Person_Given_Name2) or 'Missing_Env_Key_FHIR_Person_Given_Name2'
family_name = os.environ.get(ModuleCtx.Env_Key_FHIR_Person_Family_Name) or 'Missing_Env_Key_FHIR_Person_Family_Name'
# Build the entire config dictionary that the wrapper expects
config = bunchify({
'id': 123,
'name': 'Wrapper-HL7FHIRReadTestCase',
'is_active': True,
'pool_size': 1,
'queue_build_cap': ModuleCtx.Queue_Build_Cap,
'address': address,
'username': username,
'secret': password,
})
# Create a test server
cluster = TestCluster('Cluster-HL7FHIRReadTestCase')
odb = cast_('ODBManager', None)
server_name = 'Server-HL7FHIRReadTestCase'
server = TestParallelServer(cluster, odb, server_name)
# Build the wrapper
wrapper = OutconnHL7FHIRWrapper(config, server)
wrapper.build_queue()
# Sleep for a while to ensure that the queue is built
sleep_time = ModuleCtx.Queue_Build_Cap * 1.1
sleep(sleep_time)
# Given, ping the connection
wrapper.ping()
# Now, obtain a test resource
with wrapper.client() as client:
# We are going to look up patients
patients = client.resources('Patient') # type: ignore
# Look for this particular one
result = patients.search(name__contains=ModuleCtx.Env_Key_FHIR_Person_Given_Name1)
# Get the patient
result = result.first()
# Log information received
logging.info('FHIR result -> %s', dict(result))
# Now, check that all the details match
self.assertFalse(result.active)
self.assertEqual(result.id, resource_id)
self.assertEqual(result.resourceType, 'Patient')
self.assertEqual(result.meta.versionId, '1')
self.assertEqual(result.meta.lastUpdated, meta_last_updated)
names = result.name
self.assertEqual(len(names), 1)
name = names[0]
expected_given_names = sorted([given_name1, given_name2])
received_given_names = sorted(name.given)
self.assertListEqual(expected_given_names, received_given_names)
self.assertEqual(name.use, 'official')
self.assertEqual(name.family, family_name)
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
# ################################################################################################################################
| 6,349
|
Python
|
.py
| 113
| 49.362832
| 130
| 0.507104
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,047
|
test_ftp.py
|
zatosource_zato/code/zato-server/test/zato/file_transfer/test_ftp.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
import os
from unittest import main, TestCase
from uuid import uuid4
# pyfilesystem
from fs.ftpfs import FTPFS
# pyftpdlib
from pyftpdlib.log import config_logging as pyftpdlib_config_logging
# Zato
from zato.common.util.api import wait_until_port_taken
from zato.common.test.ftp import config as ftp_config, FTPServer
from zato.server.connection.file_client.api import FTPFileClient
# ################################################################################################################################
# ################################################################################################################################
pyftpdlib_config_logging(level=logging.WARN)
# ################################################################################################################################
# ################################################################################################################################
class FTPFileTransferTestCase(TestCase):
def test_ftp(self):
base_test_dir = 'base_test_dir.{}'.format(uuid4().hex)
base_test_path = '/{}'.format(base_test_dir)
test_file = 'zxc.txt'
test_data = 'test_data.{}'.format(uuid4()).encode()
test_file_path = os.path.join(base_test_path, test_file)
def check_directory(client, should_exist):
# type: (FTPFileClient, bool)
result = client.list('/')
directory_list = result['directory_list']
for item in directory_list:
if item['name'] == base_test_dir:
if not should_exist:
raise ValueError('Directory `{}` should not exist'.format(
os.path.normpath(os.path.join(ftp_config.directory, base_test_dir))))
else:
self.assertTrue(item['is_dir'])
break
else:
if should_exist:
raise ValueError('Expected for directory `{}` to exist'.format(
os.path.normpath(os.path.join(ftp_config.directory, base_test_dir))))
# Create an embedded FTP server ..
server = FTPServer()
# .. start it in a new thread ..
server.start()
# .. wait a moment to make sure it is started ..
wait_until_port_taken(ftp_config.port)
# .. create an underlying FTP connection object ..
conn = FTPFS('localhost', ftp_config.username, ftp_config.password, port=ftp_config.port)
# .. create a higher-level FTP client ..
client = FTPFileClient(conn, {
'encoding': 'utf8'
})
# .. confirm we are connected ..
ping_response = client.ping()
self.assertTrue(ping_response)
# .. create a new directory ..
client.create_directory(base_test_dir)
# .. make sure the directory was created ..
check_directory(client, True)
# .. store a new file ..
client.store(test_file_path, test_data)
# .. download the uploaded file ..
received_data = client.get(test_file_path)
# .. compare the downloaded and uploaded data ..
self.assertEqual(received_data, test_data)
# .. delete the test directory ..
client.delete_directory(base_test_dir)
# .. make sure the directory was deleted ..
check_directory(client, False)
# .. stop the client ..
client.close()
# .. finally, stop the embedded server.
server.stop()
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
| 4,174
|
Python
|
.py
| 84
| 41.154762
| 130
| 0.471777
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,048
|
__init__.py
|
zatosource_zato/code/zato-server/test/zato/file_transfer/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 148
|
Python
|
.py
| 5
| 28.2
| 64
| 0.687943
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,049
|
__init__.py
|
zatosource_zato/code/zato-server/src/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,050
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2010 Dariusz Suchojad <dsuch at zato.io>
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
__import__('pkg_resources').declare_namespace(__name__)
| 287
|
Python
|
.py
| 8
| 34.375
| 64
| 0.698182
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,051
|
startup_callable.py
|
zatosource_zato/code/zato-server/src/zato/server/startup_callable.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from importlib import import_module
from logging import getLogger
from traceback import format_exc
# ################################################################################################################################
if 0:
from bunch import Bunch
Bunch = Bunch
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
class PhaseCtx:
""" Describes a particular phase of a server startup process.
"""
def __init__(self, phase, args, kwargs):
# type: (str, list, dict)
self.phase = phase
self.args = args or [] # type: list
self.kwargs = kwargs or {} # type: dict
# ################################################################################################################################
class StartupCallableTool:
""" Handles logic related to server startup callables.
"""
def __init__(self, server_config):
# type: (Bunch)
self._callable_names = server_config.misc.startup_callable # type: list
self._callable_names = self._callable_names if isinstance(self._callable_names, list) else \
[self._callable_names] # type: list
self.callable_list = []
self.init()
def init(self):
for item in self._callable_names:
if item:
if '.' not in item:
logger.warning('Invalid startup callable name `%s`, must be a dotted Python name', item)
continue
try:
module_name, object_name = item.rsplit('.', 1)
mod = import_module(module_name)
obj = getattr(mod, object_name)
self.callable_list.append(obj)
except Exception as e:
logger.warning('Could not import startup callable `%s`, e:`%s`', item, e)
def invoke(self, phase, args=None, kwargs=None):
# type: (PhaseCtx, list, dict)
ctx = PhaseCtx(phase, args, kwargs)
for callable_object in self.callable_list:
try:
callable_object(ctx)
except Exception:
logger.warning('Could not invoke `%s`, e:`%s`', callable_object, format_exc())
# ################################################################################################################################
def default_callable(ctx):
""" Default startup callable added for demonstration purposes.
"""
# type: (PhaseCtx)
logger.info('Default startup callable entering phase `%s`', ctx.phase)
# ################################################################################################################################
| 3,154
|
Python
|
.py
| 64
| 41.28125
| 130
| 0.454723
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,052
|
config.py
|
zatosource_zato/code/zato-server/src/zato/server/config.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from copy import deepcopy
from logging import getLogger
from threading import RLock
# Paste
from paste.util.multidict import MultiDict
# Bunch
from zato.bunch import Bunch
# Python 2/3 compatibility
from zato.common.ext.future.utils import itervalues
from zato.common.py23_.past.builtins import unicode
# Zato
from zato.common.api import ZATO_NONE
from zato.common.const import SECRETS
from zato.common.util.config import resolve_name, resolve_value
from zato.common.util.sql import ElemsWithOpaqueMaker
# ################################################################################################################################
if 0:
from zato.common.typing_ import anylist, stranydict
from zato.server.connection.ftp import FTPStore
FTPStore = FTPStore
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
class ConfigDict:
""" Stores configuration of a particular item of interest, such as an
outgoing HTTP connection. Could've been a dict and we wouldn't have been using
.get and .set but things like connection names aren't necessarily proper
Python attribute names. Also, despite certain dict operations being atomic
in CPython, the class employs a threading.Lock in critical places so the code
doesn't assume anything about CPython's byte code-specific implementation
details.
"""
def __init__(self, name, _bunch=None):
self.name = name # type: unicode
self._impl = _bunch # type: Bunch
self.lock = RLock()
# ################################################################################################################################
def get(self, key, default=None):
with self.lock:
key = key.strip()
return self._impl.get(key, default)
# ################################################################################################################################
def set(self, key, value):
with self.lock:
self._impl[key] = value
__setitem__ = set
# ################################################################################################################################
def __getitem__(self, key):
with self.lock:
key = key.strip()
return self._impl.__getitem__(key)
# ################################################################################################################################
def __delitem__(self, key):
with self.lock:
del self._impl[key]
# ################################################################################################################################
def pop(self, key, default):
with self.lock:
return self._impl.pop(key, default)
# ################################################################################################################################
def update(self, dict_):
# type: (dict_)
with self.lock:
self._impl.update(dict_)
# ################################################################################################################################
def __iter__(self):
with self.lock:
return iter(self._impl)
# ################################################################################################################################
def __repr__(self):
with self.lock:
return '<{} at {} keys:[{}]>'.format(self.__class__.__name__,
hex(id(self)), sorted(self._impl.keys()))
__str__ = __repr__
# ################################################################################################################################
def __nonzero__(self):
with self.lock:
return bool(self._impl)
# ################################################################################################################################
def keys(self):
with self.lock:
return self._impl.keys()
# ################################################################################################################################
def values(self):
with self.lock:
return self._impl.values()
# ################################################################################################################################
def itervalues(self):
with self.lock:
return itervalues(self._impl)
# ################################################################################################################################
def items(self):
with self.lock:
return self._impl.items()
# ################################################################################################################################
def get_by_id(self, key_id, default=None):
with self.lock:
key = self._impl.get('_zato_id_%s' % key_id)
return self._impl.get(key, default)
# ################################################################################################################################
def set_key_id_data(self, config):
with self.lock:
key_id = config['id']
key = config['name']
self._impl['_zato_id_%s' % key_id] = key
# ################################################################################################################################
def copy(self):
""" Returns a new instance of ConfigDict with items copied over from self.
"""
with self.lock:
config_dict = ConfigDict(self.name)
config_dict._impl = Bunch()
config_dict._impl.update(deepcopy(self._impl))
return config_dict
# ################################################################################################################################
def get_config_list(self, predicate=lambda value: value):
""" Returns a list of deepcopied config Bunch objects.
"""
out = []
with self.lock:
for value in self.values():
if isinstance(value, dict):
config = value['config']
if predicate(config):
out.append(deepcopy(config))
return out
# ################################################################################################################################
def copy_keys(self, skip_ids=True):
""" Returns a deepcopy of the underlying Bunch's keys
"""
with self.lock:
keys = self._impl.keys()
if skip_ids:
keys = [elem for elem in keys if not elem.startswith('_zato_id')]
return deepcopy(keys)
# ################################################################################################################################
@staticmethod
def from_query(name, query_data, impl_class=Bunch, item_class=Bunch, list_config=False, decrypt_func=None, drop_opaque=False):
""" Return a new ConfigDict with items taken from an SQL query.
"""
config_dict = ConfigDict(name)
config_dict._impl = impl_class()
if query_data:
query, attrs = query_data
for item in query:
if hasattr(item, 'name'):
item_name = item.name
else:
item_name = item.get_name()
item_name = resolve_name(item_name)
if list_config:
list_dict = Bunch()
if item_name not in config_dict._impl:
config_dict._impl[item_name] = []
config_dict._impl[item_name].append(list_dict)
else:
config_dict._impl[item_name] = item_class()
if list_config:
for attr_name in attrs.keys():
list_dict[attr_name] = getattr(item, attr_name)
else:
config_dict._impl[item_name].config = item_class()
for attr_name in attrs.keys():
config = config_dict._impl[item_name]['config']
original = value = getattr(item, attr_name)
value = resolve_value(attr_name, value, decrypt_func)
config[attr_name] = value
# Temporarily, add a flag to indicate whether the password in ODB was encrypted or not.
if attr_name in SECRETS.PARAMS:
if original is None:
original = ''
config['_encryption_needed'] = True
if not isinstance(original, unicode):
orig_uni = original.decode('utf8')
else:
orig_uni = original
if orig_uni.startswith(SECRETS.PREFIX):
config['_encrypted_in_odb'] = True
else:
config['_encrypted_in_odb'] = False
# Post-process data before it is returned to resolve any opaque attributes
for value in config_dict.values():
value_config = value['config']
if ElemsWithOpaqueMaker.has_opaque_data(value_config):
ElemsWithOpaqueMaker.process_config_dict(value_config, drop_opaque)
return config_dict
# ################################################################################################################################
@staticmethod
def from_generic(config_dict):
return config_dict
# ################################################################################################################################
class ConfigStore:
""" The central place for storing a Zato server's thread configuration.
May /not/ be shared across threads - each thread should get its own copy
using the .copy method.
"""
def __init__(self):
# Outgoing connections
self.out_ftp = None # type: ConfigDict
self.out_sftp = None # type: ConfigDict
self.out_odoo = None # type: ConfigDict
self.out_soap = None # type: ConfigDict
self.out_sql = None # type: ConfigDict
self.out_sap = None # type: ConfigDict
self.out_plain_http = None # type: ConfigDict
self.out_amqp = None # type: ConfigDict
self.cloud_aws_s3 = None # type: ConfigDict
self.email_smtp = None # type: ConfigDict
self.email_imap = None # type: ConfigDict
self.channel_zmq = None # type: ConfigDict
self.out_zmq = None # type: ConfigDict
self.channel_web_socket = None # type: ConfigDict
self.generic_connection = None # type: ConfigDict
self.notif_sql = None # type: ConfigDict
self.service = None # type: ConfigDict
self.sms_twilio = None # type: ConfigDict
self.search_es = None # type: ConfigDict
self.search_solr = None # type: ConfigDict
self.cassandra_conn = None # type: ConfigDict
self.cassandra_query = None # type: ConfigDict
self.cache_builtin = None # type: ConfigDict
self.cache_memcached = None # type: ConfigDict
self.pubsub = None # type: ConfigDict
self.pubsub_endpoint = None # type: ConfigDict
self.pubsub_topic = None # type: ConfigDict
self.pubsub_subscription = None # type: ConfigDict
# Local on-disk configuraion repository
self.repo_location = None # type: str
# Security definitions
self.apikey = None # type: ConfigDict
self.aws = None # type: ConfigDict
self.basic_auth = None # type: ConfigDict
self.jwt = None # type: ConfigDict
self.ntlm = None # type: ConfigDict
self.oauth = None # type: ConfigDict
self.rbac_permission = None # type: ConfigDict
self.rbac_role = None # type: ConfigDict
self.rbac_client_role = None # type: ConfigDict
self.rbac_role_permission = None # type: ConfigDict
self.tls_ca_cert = None # type: ConfigDict
self.tls_channel_sec = None # type: ConfigDict
self.tls_key_cert = None # type: ConfigDict
self.vault_conn_sec = None # type: ConfigDict
# URL security
self.url_sec = None # type: ConfigDict
# HTTP channels
self.http_soap = None # type: anylist
# Configuration for broker clients
self.broker_config = None
# ODB
self.odb_data = Bunch()
# SimpleIO
self.simple_io = None # type: stranydict
# Namespace
self.msg_ns = None # type: ConfigDict
# JSON Pointer
self.json_pointer = None # type: ConfigDict
# Services
self.service = None # type: ConfigDict
# IBM MQ
self.definition_wmq = None # type: ConfigDict
self.out_wmq = None # type: ConfigDict
self.channel_wmq = None # type: ConfigDict
self.channel_amqp = None # type: ConfigDict
self.definition_amqp = None # type: ConfigDict
# ################################################################################################################################
def get_config_by_item_id(self, attr_name, item_id):
# type: (str, object) -> dict
# Imported here to avoid circular references
from zato.server.connection.ftp import FTPStore
item_id = int(item_id)
config = getattr(self, attr_name) # type: dict
if isinstance(config, FTPStore):
needs_inner_config = False
values = config.conn_params.values()
else:
needs_inner_config = True
values = config.values()
for value in values:
if needs_inner_config:
config_dict = value['config']
else:
config_dict = value
if config_dict['id'] == item_id:
return config_dict
# ################################################################################################################################
def __getitem__(self, key):
return getattr(self, key)
# ################################################################################################################################
def outgoing_connections(self):
""" Returns all the outgoing connections.
"""
return self.out_ftp, self.out_sftp, self.out_odoo, self.out_plain_http, self.out_soap, self.out_sap
# ################################################################################################################################
def copy(self):
""" Creates a copy of this ConfigStore. All configuration data is copied
over except for SQL connections.
"""
config_store = ConfigStore()
# Grab all ConfigDicts - even if they're actually ZATO_NONE - and make their copies
for attr_name in dir(self):
attr = getattr(self, attr_name)
if isinstance(attr, ConfigDict):
copy_func = attr.copy
setattr(config_store, attr_name, copy_func())
elif attr is ZATO_NONE:
setattr(config_store, attr_name, ZATO_NONE)
http_soap = MultiDict()
dict_of_lists = self.http_soap.dict_of_lists()
for url_path, lists in dict_of_lists.items():
_info = Bunch()
for elem in lists:
for soap_action, item in elem.items():
_info[soap_action] = Bunch()
_info[soap_action].id = item.id
_info[soap_action].name = item.name
_info[soap_action].is_active = item.is_active
_info[soap_action].is_internal = item.is_internal
_info[soap_action].url_path = item.url_path
_info[soap_action].method = item.method
_info[soap_action].soap_version = item.soap_version
_info[soap_action].service_id = item.service_id
_info[soap_action].service_name = item.service_name
_info[soap_action].impl_name = item.impl_name
_info[soap_action].transport = item.transport
_info[soap_action].connection = item.connection
http_soap.add(url_path, _info)
config_store.http_soap = http_soap
config_store.url_sec = self.url_sec
config_store.broker_config = self.broker_config
config_store.odb_data = deepcopy(self.odb_data)
return config_store
# ################################################################################################################################
| 17,365
|
Python
|
.py
| 339
| 40.784661
| 130
| 0.448883
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,053
|
live_browser.py
|
zatosource_zato/code/zato-server/src/zato/server/live_browser.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
# gevent
from gevent import spawn
# SQLAlchemy
from sqlalchemy import and_
# TextBlob
from textblob import TextBlob
# Python 2/3 compatibility
from zato.common.ext.future.utils import iteritems
from zato.common.py23_.past.builtins import basestring
# Zato
from zato.common.api import WEB_SOCKET
from zato.common.odb.model import Server, WebSocketClient, WebSocketSubscription
# ################################################################################################################################
_wsgi_ignore = (
'HTTP_ACCEPT', 'zato.oauth.post_data', 'zato.channel_item', 'zato.http.response.headers', 'zato.http.GET', 'zato.http.POST'
)
_wsgi_sub_dicts = ('zato.http.response.headers', 'zato.http.GET', 'zato.http.POST')
# ################################################################################################################################
def match_pattern(text, pattern):
""" Returns True if every element in pattern is contained in words extracted ouf of text,
pattern is assumed to be a set of lower-cased string elements.
"""
return pattern <= {elem.lower() for elem in TextBlob(text).words}
# ################################################################################################################################
def live_browser_patterns(session, cluster_id):
return session.query(WebSocketClient.ext_client_id, WebSocketSubscription.pattern).\
filter(WebSocketSubscription.client_id==WebSocketClient.id).\
filter(Server.cluster_id==cluster_id).\
filter(and_(
WebSocketSubscription.is_by_ext_id.is_(False),
WebSocketSubscription.is_by_channel.is_(False))).\
outerjoin(Server, Server.id==WebSocketClient.server_id).\
all()
# ################################################################################################################################
def notify_msg_browser(service, step):
with closing(service.odb.session()) as session:
subs = {}
for ext_client_id, pattern in live_browser_patterns(session, service.server.cluster_id):
pattern = pattern.replace(WEB_SOCKET.PATTERN.MSG_BROWSER_PREFIX, '', 1)
patterns = subs.setdefault(ext_client_id, set())
patterns.add(pattern)
# All metadata
meta = []
# WSGI keys and values of interest
wsgi = []
for key, value in sorted(service.wsgi_environ.items()):
if key.startswith('SERVER_'):
continue
if key.startswith('wsgi.'):
continue
if key.startswith('gunicorn.'):
continue
if key in _wsgi_ignore:
continue
if not isinstance(key, basestring):
key = str(key)
if not isinstance(value, basestring):
value = str(value)
wsgi.append('{} {}'.format(key.lower(), value.lower()))
for _sub_dict in _wsgi_sub_dicts:
for key, value in iteritems(service.wsgi_environ[_sub_dict]):
wsgi.append('{} {}'.format(key.lower(), value.lower()))
channel_name = service.channel.name or 'invoker'
wsgi_text = ' '.join(wsgi).strip() or ''
meta = ' '.join([step, service.channel.type, channel_name, wsgi_text])
# Concatenation of input data + WSGI + other metadata
text = ('{} {}'.format(meta, service.request.raw_request.lower())).strip()
# Match all data against each subscription
for ext_client_id, pattern in subs.items():
# Ok, something matched, send that client a notification
if match_pattern(text, pattern):
spawn(service.out.websockets.invoke, {'meta':meta, 'request':service.request.raw_request}, id=ext_client_id)
# ################################################################################################################################
| 4,104
|
Python
|
.py
| 81
| 44.641975
| 130
| 0.570211
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,054
|
jwt_.py
|
zatosource_zato/code/zato-server/src/zato/server/jwt_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import uuid
from contextlib import closing
from datetime import datetime
from logging import getLogger
# Bunch
from bunch import bunchify, Bunch
# Cryptography
from cryptography.fernet import Fernet
# JWT
import jwt
# Zato
from zato.common.odb.model import JWT as JWTModel
from zato.server.jwt_cache import JWTCache
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
class AuthInfo:
__slots__ = 'sec_def_id', 'sec_def_username', 'token'
def __init__(self, sec_def_id, sec_def_username, token):
# type: (int, str, str)
self.sec_def_id = sec_def_id
self.sec_def_username = sec_def_username
self.token = token
# ################################################################################################################################
class JWT:
""" JWT authentication backend.
"""
ALGORITHM = 'HS256'
# ################################################################################################################################
def __init__(self, odb, decrypt_func, secret):
self.odb = odb
self.cache = JWTCache(odb)
self.decrypt_func = decrypt_func
self.secret = secret
self.fernet = Fernet(self.secret)
# ################################################################################################################################
def _lookup_jwt(self, username, password):
# type: (str, str) -> JWTModel
with closing(self.odb.session()) as session:
item = session.query(JWTModel).\
filter(JWTModel.username==username).\
first()
if item:
if self.decrypt_func(item.password) == password:
return item
# ################################################################################################################################
def _create_token(self, **data):
token_data = {
'session_id': uuid.uuid4().hex,
'creation_time': datetime.utcnow().isoformat()
}
token_data.update(data)
token = jwt.encode(token_data, self.secret, algorithm=self.ALGORITHM)
if not isinstance(token, bytes):
token = token.encode('utf8')
return self.fernet.encrypt(token).decode('utf8')
# ################################################################################################################################
def authenticate(self, username, password):
""" Validate cretentials and generate a new token if valid.
1. Validate cretentials against ODB
2.a: If not valid, return nothing
2.b: If valid:
3. Create a new token
4. Cache the new token synchronously (we wait for it to be truly stored).
5. Return the token
"""
item = self._lookup_jwt(username, password)
if item:
token = self._create_token(username=username, ttl=item.ttl)
self.cache.put(token, token, item.ttl, is_async=False)
suffix = 's' if item.ttl > 1 else ''
logger.info('New token generated for user `%s` with a TTL of `%i` second{}'.format(suffix), username, item.ttl)
return AuthInfo(item.id, item.username, token)
# ################################################################################################################################
def validate(self, expected_username, token):
""" Check if the given token is (still) valid.
1. Look for the token in Cache without decrypting/decoding it.
2.a If not found, return "Invalid"
2.b If found:
3. decrypt
4. decode
5. renew the cache expiration asynchronously (do not wait for the update confirmation).
5. return "valid" + the token contents
"""
if self.cache.get(token):
decrypted = self.fernet.decrypt(token)
token_data = bunchify(jwt.decode(decrypted, self.secret))
if token_data.username == expected_username:
# Renew the token expiration
self.cache.put(token, token, token_data.ttl, is_async=True)
return Bunch(valid=True, token=token_data, raw_token=token)
else:
return Bunch(valid=False, message='Unexpected user for token found')
else:
return Bunch(valid=False, message='Invalid token')
# ################################################################################################################################
def delete(self, token):
""" Deletes a token in ODB.
"""
self.cache.delete(token)
# ################################################################################################################################
| 5,298
|
Python
|
.py
| 108
| 41.092593
| 130
| 0.465049
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,055
|
rbac_.py
|
zatosource_zato/code/zato-server/src/zato/server/rbac_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
# simple-rbac
from rbac.acl import Registry as _Registry
# gevent
from gevent.lock import RLock
# Zato
from zato.common.api import ZATO_NONE
from zato.common.util.api import make_repr, wait_for_dict_key
# ################################################################################################################################
logger = getLogger('zato_rbac')
# ################################################################################################################################
class Registry(_Registry):
def __init__(self, delete_role_callback):
super(Registry, self).__init__()
self.delete_role_callback = delete_role_callback
def delete_role(self, delete_role):
self.delete_role_callback(delete_role)
# Delete the role itself.
del self._roles[delete_role]
# Recursively delete any children along with their own children.
for child_id, child_parents in self._roles.items():
if delete_role in child_parents:
self.delete_role(child_id)
# Remove the role from any permissions it may have been involved in.
self.delete_from_permissions('role', delete_role)
def delete_resource(self, delete_resource):
""" Remove the resource from any grants it may have been involved in but only if we actually had it.
"""
if self._resources.pop(delete_resource, ZATO_NONE) != ZATO_NONE:
self.delete_from_permissions('resource', delete_resource)
def delete_from_permissions(self, compare_name, delete_item):
reg_del = {'_allowed':[], '_denied':[]}
name_to_idx = {'role':0, 'operation':1, 'resource':2}
for name in reg_del:
item = getattr(self, name)
for values in item:
if values[name_to_idx[compare_name]] == delete_item:
reg_del[name].append(values)
for name in reg_del:
item = getattr(self, name)
for value in reg_del[name]:
del item[value]
def delete_allow(self, config):
del self._allowed[config]
def delete_deny(self, config):
del self._denied[config]
# ################################################################################################################################
class RBAC:
def __init__(self):
self.registry = Registry(self._delete_callback)
self.update_lock = RLock()
self.permissions = {}
self.http_permissions = {}
self.role_id_to_name = {}
self.role_name_to_id = {}
self.client_def_to_role_id = {}
self.role_id_to_client_def = {}
# ################################################################################################################################
def __repr__(self):
return make_repr(self)
# ################################################################################################################################
def create_permission(self, id, name):
with self.update_lock:
self.permissions[id] = name
def edit_permission(self, id, new_name):
with self.update_lock:
if not id in self.permissions:
raise ValueError('Permission ID `{}` ({}) not found among `{}`'.format(id, new_name, self.permissions))
self.permissions[id] = new_name
def delete_permission(self, id):
with self.update_lock:
del self.permissions[id]
self.registry.delete_from_permissions('operation', id)
def set_http_permissions(self):
""" Maps HTTP verbs to CRUD permissions.
"""
verb_map = {
'GET': 'Read',
'POST': 'Create',
'PATCH': 'Update',
'PUT': 'Update',
'DELETE': 'Delete',
}
for verb, target_perm_name in verb_map.items():
for perm_id, perm_name in self.permissions.items():
if target_perm_name == perm_name:
self.http_permissions[verb] = perm_id
break
# ################################################################################################################################
def _rbac_create_role(self, id, name, parent_id):
self.role_id_to_name[id] = name
self.role_name_to_id[name] = id
self.registry.add_role(id, parents=[parent_id] if parent_id and id != parent_id else [])
def _delete_callback(self, id):
self._rbac_delete_role(id, self.role_id_to_name[id])
def _rbac_delete_role(self, id, name):
self.role_id_to_name.pop(id)
self.role_name_to_id.pop(name)
def create_role(self, id, name, parent_id):
with self.update_lock:
self._rbac_create_role(id, name, parent_id)
def edit_role(self, id, old_name, name, parent_id):
with self.update_lock:
self._rbac_delete_role(id, old_name)
self.registry._roles[id].clear() # Roles can have one parent only
self._rbac_create_role(id, name, parent_id)
def delete_role(self, id, name):
with self.update_lock:
self.registry.delete_role(id)
# ################################################################################################################################
def create_client_role(self, client_def, role_id):
with self.update_lock:
if role_id not in self.role_id_to_name:
raise ValueError('Role `{}` not found among `{}`'.format(role_id, self.role_id_to_name))
self.client_def_to_role_id.setdefault(client_def, []).append(role_id)
self.role_id_to_client_def.setdefault(role_id, []).append(client_def)
def delete_client_role(self, client_def, role_id):
with self.update_lock:
self.client_def_to_role_id[client_def].remove(role_id)
self.role_id_to_client_def[role_id].remove(client_def)
def wait_for_client_role(self, role_id):
wait_for_dict_key(self.role_id_to_name, role_id)
# ################################################################################################################################
def create_resource(self, resource):
with self.update_lock:
self.registry.add_resource(resource)
def delete_resource(self, resource):
with self.update_lock:
self.registry.delete_resource(resource)
# ################################################################################################################################
def create_role_permission_allow(self, role_id, perm_id, resource):
with self.update_lock:
self.registry.allow(role_id, perm_id, resource)
def create_role_permission_deny(self, role_id, perm_id, resource):
with self.update_lock:
self.registry.deny(role_id, perm_id, resource)
def delete_role_permission_allow(self, role_id, perm_id, resource):
with self.update_lock:
self.registry.delete_allow((role_id, perm_id, resource))
def delete_role_permission_deny(self, role_id, perm_id, resource):
with self.update_lock:
self.registry.delete_deny((role_id, perm_id, resource))
# ################################################################################################################################
def is_role_allowed(self, role_id, perm_id, resource):
""" Returns True/False depending on whether a given role is allowed to obtain a selected permission for a resource.
"""
return self.registry.is_allowed(role_id, perm_id, resource)
def is_client_allowed(self, client_def, perm_id, resource):
""" Returns True/False depending on whether a given client is allowed to obtain a selected permission for a resource.
All of the client's roles are consulted and if any is allowed, True is returned. If none is, False is returned.
"""
roles = self.client_def_to_role_id.get(client_def, ZATO_NONE)
return self.registry.is_any_allowed(roles, perm_id, resource) if roles != ZATO_NONE else False
def is_http_client_allowed(self, client_def, http_verb, resource):
""" Same as is_client_allowed but accepts a HTTP verb rather than a permission ID.
"""
return self.is_client_allowed(client_def, self.http_permissions[http_verb], resource)
# ################################################################################################################################
| 8,801
|
Python
|
.py
| 165
| 44.806061
| 130
| 0.52804
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,056
|
sso.py
|
zatosource_zato/code/zato-server/src/zato/server/sso.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
# Zato
from zato.common.api import RATE_LIMIT, SEC_DEF_TYPE
# ################################################################################################################################
# Type checking
if 0:
from bunch import Bunch
from zato.server.base.parallel import ParallelServer
Bunch = Bunch
ParallelServer = ParallelServer
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# Definitions of these security types may be linked to SSO users and their rate limiting definitions
_sec_def_sso_rate_limit = SEC_DEF_TYPE.BASIC_AUTH, SEC_DEF_TYPE.JWT
# ################################################################################################################################
# ################################################################################################################################
class SSOTool:
""" Server-wide utilities related to SSO.
"""
def __init__(self, server):
# type: (ParallelServer)
self.server = server
def on_external_auth(self, sec_type, sec_def_id, sec_def_username, cid, wsgi_environ, ext_session_id=None,
totp_code=None, _rate_limit_type_sso_user=RATE_LIMIT.OBJECT_TYPE.SSO_USER, _basic_auth=SEC_DEF_TYPE.BASIC_AUTH):
# type: (str, int, str, str, dict, object, object)
if sec_type in _sec_def_sso_rate_limit:
# Do we have an SSO user related to this sec_def?
auth_id_link_map = self.server.sso_api.user.auth_id_link_map['zato.{}'.format(sec_type)] # type: dict
sso_user_id = auth_id_link_map.get(sec_def_id)
if sso_user_id:
# At this point we have an SSO user and we know that credentials
# from the request were valid so we may check rate-limiting
# first and then create or extend the user's associated SSO session.
# In other words, we can already act as though the user was already
# logged in because in fact he or she is logged in, just using
# a security definition from sec_def.
# Check rate-limiting
self.server.rate_limiting.check_limit(cid, _rate_limit_type_sso_user,
sso_user_id, wsgi_environ['zato.http.remote_addr'], False)
# Rate-limiting went fine, we can now create or extend
# the person's SSO session linked to credentials from the request.
current_app = wsgi_environ.get(self.server.sso_config.apps.http_header) or \
self.server.sso_config.apps.default
session_info = self.server.sso_api.user.session.on_external_auth_succeeded(
cid,
sec_type,
sec_def_id,
sec_def_username,
sso_user_id,
ext_session_id,
totp_code,
current_app,
wsgi_environ['zato.http.remote_addr'],
wsgi_environ.get('HTTP_USER_AGENT'),
)
if session_info:
wsgi_environ['zato.http.response.headers']['X-Zato-SSO-UST'] = self.server.encrypt(
session_info.ust, _prefix='')
# ################################################################################################################################
# ################################################################################################################################
| 4,038
|
Python
|
.py
| 68
| 48.779412
| 130
| 0.468695
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,057
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,058
|
query.py
|
zatosource_zato/code/zato-server/src/zato/server/query.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
# Zato
from zato.server.store import BaseAPI, BaseStore
logger = getLogger(__name__)
class CassandraQueryAPI(BaseAPI):
""" API to query Cassandra through prepared statements.
"""
class CassandraQueryStore(BaseStore):
""" Stores Cassandra prepared statements.
"""
def create_impl(self, config, config_no_sensitive, **extra):
conn = extra['def_'].conn
if not conn:
logger.warning('Could not create a Cassandra query `%s`, conn is None`', config_no_sensitive)
else:
return conn.prepare(config.value)
def update_by_def(self, del_name, new_def):
""" Invoked when the underlying definition got updated.
Iterates through all the queries that were using it and updates them accordingly.
"""
with self.lock:
for value in self.items.values():
if value.config.def_name == del_name:
self._edit(value.config.name, value.config, def_=new_def)
| 1,264
|
Python
|
.py
| 31
| 34.580645
| 105
| 0.673203
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,059
|
store.py
|
zatosource_zato/code/zato-server/src/zato/server/store.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import weakref
from copy import deepcopy
from logging import getLogger
from traceback import format_exc
# Bunch
from bunch import Bunch
# Zato
from zato.common.api import SECRET_SHADOW
from zato.common.exception import Inactive
logger = getLogger(__name__)
class BaseAPI:
""" A base class for connection/query APIs.
"""
def __init__(self, conn_store):
self._conn_store = conn_store
def get(self, name, skip_inactive=False):
item = self._conn_store.get(name)
if not item:
msg = 'No such item `{}` in `{}`'.format(name, sorted(self._conn_store.items))
logger.warning(msg)
raise KeyError(msg)
if not item.config.is_active and not skip_inactive:
msg = '`{}` is inactive'.format(name)
logger.warning(msg)
raise Inactive(msg)
return item
def __getitem__(self, name):
return self.get(name, False)
def create(self, name, msg, *args, **extra):
return self._conn_store.create(name, msg, **extra)
def edit(self, name, msg, **extra):
return self._conn_store.edit(name, msg, **extra)
def delete(self, name):
return self._conn_store.delete(name)
def change_password(self, config):
return self._conn_store.change_password(config)
# ################################################################################################################################
class BaseStore:
""" A base class for connection/query stores.
"""
def __init__(self):
self.items = {}
# gevent
from gevent.lock import RLock
self.lock = RLock()
def __getitem__(self, name):
return self.items[name]
def get(self, name):
return self.items.get(name)
def _create(self, name, config, **extra):
""" Actually adds a new definition, must be called with self.lock held.
"""
config_no_sensitive = deepcopy(config)
if 'password' in config:
config_no_sensitive['password'] = SECRET_SHADOW
item = Bunch(config=config, config_no_sensitive=config_no_sensitive, is_created=False, impl=None)
# It's optional
conn = extra.get('def_', {'conn':None})['conn']
try:
logger.debug('Creating `%s`', config_no_sensitive)
impl = self.create_impl(config, config_no_sensitive, **extra)
def execute(session, statement):
def execute_impl(**kwargs):
if not session:
raise Exception('Cannot execute the query without a session')
return session.execute(statement, kwargs)
return execute_impl
item.execute = execute(conn, impl)
logger.debug('Created `%s`', config_no_sensitive)
except Exception:
logger.warning('Could not create `%s`, config:`%s`, e:`%s`', name, config_no_sensitive, format_exc())
else:
item.impl = impl
item.is_created = True
if conn:
item.extra = weakref.proxy(conn)
else:
item.conn = item.impl
self.items[name] = item
return item
def create(self, name, config, **extra):
""" Adds a new connection definition.
"""
with self.lock:
return self._create(name, config, **extra)
def _delete(self, name):
""" Actually deletes a definition. Must be called with self.lock held.
"""
try:
if not name in self.items:
raise Exception('No such name `{}` among `{}`'.format(name, self.items.keys()))
self.delete_impl()
except Exception:
logger.warning('Error while deleting `%s`, e:`%s`', name, format_exc())
finally:
if name in self.items:
del self.items[name]
def delete(self, name):
""" Deletes an existing connection.
"""
with self.lock:
self._delete(name)
def _edit(self, name, config, **extra):
self._delete(name)
return self._create(config.name, config, **extra)
def edit(self, name, config, **extra):
with self.lock:
self._edit(name, config, **extra)
def change_password(self, password_data):
with self.lock:
# This may not exist if change-password is invoked from enmasse before create finished
item = self.items.get(password_data.name)
if item:
new_config = deepcopy(item.config_no_sensitive)
new_config.password = password_data.password
self.edit(password_data.name, new_config)
def create_impl(self):
raise NotImplementedError('Should be overridden by subclasses (BaseStore.create_impl)')
def delete_impl(self):
pass # It's OK - sometimes deleting a connection doesn't have to mean doing anything unusual
| 5,220
|
Python
|
.py
| 127
| 32.015748
| 130
| 0.589109
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,060
|
main.py
|
zatosource_zato/code/zato-server/src/zato/server/main.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Monkey-patching modules individually can be about 20% faster,
# or, in absolute terms, instead of 275 ms it may take 220 ms.
from gevent.monkey import patch_builtins, patch_contextvars, patch_thread, patch_time, patch_os, patch_queue, patch_select, \
patch_selectors, patch_signal, patch_socket, patch_ssl, patch_subprocess, patch_sys
# Note that the order of patching matters, just like in patch_all
patch_os()
patch_time()
patch_thread()
patch_sys()
patch_socket()
patch_select()
patch_selectors()
patch_ssl()
patch_subprocess()
patch_builtins()
patch_signal()
patch_queue()
patch_contextvars()
# stdlib
import locale
import logging
import os
import ssl
import sys
from logging.config import dictConfig
# ConcurrentLogHandler - updates stlidb's logging config on import so this needs to stay
try:
import cloghandler # type: ignore
except ImportError:
pass
else:
cloghandler = cloghandler # For pyflakes
# Update logging.Logger._log to make it a bit faster
from zato.common.microopt import logging_Logger_log
from logging import Logger
Logger._log = logging_Logger_log # type: ignore
# YAML
import yaml
# Zato
from zato.common.api import IPC, OS_Env, SERVER_STARTUP, TRACE1, ZATO_CRYPTO_WELL_KNOWN_DATA
from zato.common.crypto.api import ServerCryptoManager
from zato.common.ext.configobj_ import ConfigObj
from zato.common.ipaddress_ import get_preferred_ip
from zato.common.kvdb.api import KVDB
from zato.common.odb.api import ODBManager, PoolStore
from zato.common.repo import RepoManager
from zato.common.simpleio_ import get_sio_server_config
from zato.common.typing_ import cast_
from zato.common.util.api import absjoin, asbool, get_config, get_kvdb_config_for_log, is_encrypted, parse_cmd_line_options, \
register_diag_handlers, store_pidfile
from zato.common.util.env import populate_environment_from_file
from zato.common.util.platform_ import is_linux, is_mac, is_windows
from zato.common.util.open_ import open_r
from zato.server.base.parallel import ParallelServer
from zato.server.ext import zunicorn
from zato.server.ext.zunicorn.app.base import Application
from zato.server.service.store import ServiceStore
from zato.server.startup_callable import StartupCallableTool
from zato.sso.api import SSOAPI
from zato.sso.util import new_user_id, normalize_sso_config
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.common.typing_ import any_, callable_, dictnone, strintnone
from zato.server.ext.zunicorn.config import Config as ZunicornConfig
callable_ = callable_
# ################################################################################################################################
# ################################################################################################################################
class ModuleCtx:
num_threads = 'num_threads'
bind_host = 'bind_host'
bind_port = 'bind_port'
Env_Num_Threads = 'Zato_Config_Num_Threads'
Env_Bind_Host = 'Zato_Config_Bind_Host'
Env_Bind_Port = 'Zato_Config_Bind_Port'
Env_Map = {
num_threads: Env_Num_Threads,
bind_host: Env_Bind_Host,
bind_port: Env_Bind_Port,
}
# ################################################################################################################################
# ################################################################################################################################
class ZatoGunicornApplication(Application):
cfg: 'ZunicornConfig'
def __init__(
self,
zato_wsgi_app:'ParallelServer',
repo_location:'str',
config_main:'Bunch',
crypto_config:'Bunch',
*args:'any_',
**kwargs:'any_'
) -> 'None':
self.zato_wsgi_app = zato_wsgi_app
self.repo_location = repo_location
self.config_main = config_main
self.crypto_config = crypto_config
self.zato_host = ''
self.zato_port = -1
self.zato_config = {}
super(ZatoGunicornApplication, self).__init__(*args, **kwargs)
# ################################################################################################################################
def get_config_value(self, config_key:'str') -> 'strintnone':
# First, map the config key to its corresponding environment variable
env_key = ModuleCtx.Env_Map[config_key]
# First, check if we have such a value among environment variables ..
if value := os.environ.get(env_key):
# .. if yes, we can return it now ..
return value
# .. we are here if there was no such environment variable ..
# .. but maybe there is a config key on its own ..
if value := self.config_main.get(config_key): # type: ignore
# ..if yes, we can return it ..
return value # type: ignore
# .. we are here if we have nothing to return, so let's do it explicitly.
return None
# ################################################################################################################################
def init(self, *ignored_args:'any_', **ignored_kwargs:'any_') -> 'None':
self.cfg.set('post_fork', self.zato_wsgi_app.post_fork) # Initializes a worker
self.cfg.set('on_starting', self.zato_wsgi_app.on_starting) # Generates the deployment key
self.cfg.set('before_pid_kill', self.zato_wsgi_app.before_pid_kill) # Cleans up before the worker exits
self.cfg.set('worker_exit', self.zato_wsgi_app.worker_exit) # Cleans up after the worker exits
for k, v in self.config_main.items():
if k.startswith('gunicorn') and v:
k = k.replace('gunicorn_', '')
if k == 'bind':
if not ':' in v:
raise ValueError('No port found in main.gunicorn_bind')
else:
host, port = v.split(':')
self.zato_host = host
self.zato_port = port
self.cfg.set(k, v)
else:
if 'deployment_lock' in k:
v = int(v)
self.zato_config[k] = v
# Override pre-3.2 names with non-gunicorn specific ones ..
# .. number of processes / threads ..
if num_threads := self.get_config_value('num_threads'):
self.cfg.set('workers', num_threads)
# .. what interface to bind to ..
if bind_host := self.get_config_value('bind_host'): # type: ignore
self.zato_host = bind_host
# .. what is our main TCP port ..
if bind_port := self.get_config_value('bind_port'): # type: ignore
self.zato_port = bind_port
# .. now, set the bind config value once more in self.cfg ..
# .. because it could have been overwritten via bind_host or bind_port ..
bind = f'{self.zato_host}:{self.zato_port}'
self.cfg.set('bind', bind)
for name in('deployment_lock_expires', 'deployment_lock_timeout'):
setattr(self.zato_wsgi_app, name, self.zato_config[name])
if asbool(self.crypto_config.use_tls):
self.cfg.set('ssl_version', getattr(ssl, 'PROTOCOL_{}'.format(self.crypto_config.tls_protocol)))
self.cfg.set('ciphers', self.crypto_config.tls_ciphers)
self.cfg.set('cert_reqs', getattr(ssl, 'CERT_{}'.format(self.crypto_config.tls_client_certs.upper())))
self.cfg.set('ca_certs', absjoin(self.repo_location, self.crypto_config.ca_certs_location))
self.cfg.set('keyfile', absjoin(self.repo_location, self.crypto_config.priv_key_location))
self.cfg.set('certfile', absjoin(self.repo_location, self.crypto_config.cert_location))
self.cfg.set('do_handshake_on_connect', True)
self.zato_wsgi_app.has_gevent = 'gevent' in self.cfg.settings['worker_class'].value
def load(self):
return self.zato_wsgi_app.on_wsgi_request
# ################################################################################################################################
def get_bin_dir() -> 'str':
# This is where the py or python.exe command is
bin_dir = os.path.dirname(sys.executable)
return bin_dir
# ################################################################################################################################
def get_code_dir(bin_dir:'str') -> 'str':
# Now, built the path up to the code_dir, which is is the directory with our code, not the directory where the server is.
if is_linux or is_mac:
levels = ['..']
else:
levels = ['..', '..', '..']
code_dir = os.path.join(bin_dir, *levels)
code_dir = os.path.abspath(code_dir)
return code_dir
# ################################################################################################################################
def get_util_dir(code_dir:'str') -> 'str':
util_dir = os.path.join(code_dir, 'util')
return util_dir
# ################################################################################################################################
def get_env_manager_base_dir(code_dir:'str') -> 'str':
if is_windows:
base_dir = os.path.join(code_dir, 'bundle-ext', 'python-windows')
return base_dir
else:
return code_dir
# ################################################################################################################################
def run(base_dir:'str', start_gunicorn_app:'bool'=True, options:'dictnone'=None) -> 'ParallelServer | None':
# Zato
from zato.common.util.cli import read_stdin_data
# Type hints
preferred_address: 'str'
options = options or {}
# Store a pidfile before doing anything else
store_pidfile(base_dir)
# Now, import environment variables and store the variable for later use
if env_file := options.get('env_file', ''):
initial_env_variables = populate_environment_from_file(env_file)
else:
initial_env_variables = []
# For dumping stacktraces
if is_linux:
register_diag_handlers()
# Capture warnings to log files
logging.captureWarnings(True)
#
# Look up the standalone zato_environment.py module to import its manager object.
# The module needs to be standalone because it runs when install.sh does,
# that is, before the entire codebase is compiled, which is why we, in runtime,
# need to add its path here explicitly.
#
bin_dir = get_bin_dir()
code_dir = get_code_dir(bin_dir)
util_dir = get_util_dir(code_dir)
env_manager_base_dir = get_env_manager_base_dir(code_dir)
# .. make it importable ..
sys.path.insert(0, util_dir)
# .. now, we can import the environment manager class ..
from zato_environment import EnvironmentManager # type: ignore
# .. build the object that we now have access to ..
env_manager:'any_' = EnvironmentManager(env_manager_base_dir, bin_dir)
# .. and run the initial runtime setup, based on environment variables.
env_manager.runtime_setup_with_env_variables()
# Start initializing the server now
os.chdir(base_dir)
# We know we don't need warnings because users may explicitly configure no certificate validation.
# We don't want for urllib3 to warn us about it.
import requests as _r
_r.packages.urllib3.disable_warnings() # type: ignore
repo_location = os.path.join(base_dir, 'config', 'repo')
# Configure the logging first, before configuring the actual server.
logging.addLevelName('TRACE1', TRACE1) # type: ignore
logging_conf_path = os.path.join(repo_location, 'logging.conf')
with open_r(logging_conf_path) as f:
logging_config = yaml.load(f, yaml.FullLoader)
dictConfig(logging_config)
logger = logging.getLogger(__name__)
kvdb_logger = logging.getLogger('zato_kvdb')
crypto_manager = ServerCryptoManager(repo_location, secret_key=options['secret_key'], stdin_data=read_stdin_data())
secrets_config = ConfigObj(os.path.join(repo_location, 'secrets.conf'), use_zato=False)
server_config = get_config(repo_location, 'server.conf', crypto_manager=crypto_manager, secrets_conf=secrets_config)
pickup_config = get_config(repo_location, 'pickup.conf')
if server_config.main.get('debugger_enabled'):
import debugpy
debugger_host = server_config.main.debugger_host
debugger_port = server_config.main.debugger_port
logger.info('Debugger waiting for connections on %s:%s', debugger_host, debugger_port)
_ = debugpy.listen((debugger_host, debugger_port))
debugpy.wait_for_client()
sio_config = get_config(repo_location, 'simple-io.conf', needs_user_config=False)
sio_config = get_sio_server_config(sio_config)
sso_config = get_config(repo_location, 'sso.conf', needs_user_config=False)
normalize_sso_config(sso_config)
# Now that we have access to server.conf, greenify libraries required to be made greenlet-friendly,
# assuming that there are any - otherwise do not do anything.
to_greenify = []
for key, value in server_config.get('greenify', {}).items():
if asbool(value):
if not os.path.exists(key):
raise ValueError('No such path `{}`'.format(key))
else:
to_greenify.append(key)
# Go ahead only if we actually have anything to greenify
if to_greenify:
import greenify # type: ignore
greenify.greenify()
for name in to_greenify:
result = greenify.patch_lib(name)
if not result:
raise ValueError('Library `{}` could not be greenified'.format(name))
else:
logger.info('Greenified library `%s`', name)
server_config.main.token = server_config.main.token.encode('utf8')
# Do not proceed unless we can be certain our own preferred address or IP can be obtained.
preferred_address = server_config.preferred_address.get('address') or ''
if not preferred_address:
preferred_address = get_preferred_ip(server_config.main.gunicorn_bind, server_config.preferred_address)
if not preferred_address and not server_config.server_to_server.boot_if_preferred_not_found:
msg = 'Unable to start the server. Could not obtain a preferred address, please configure [bind_options] in server.conf'
logger.warning(msg)
raise Exception(msg)
# Create the startup callable tool as soon as practical
startup_callable_tool = StartupCallableTool(server_config)
# Run the hook before there is any server object created
startup_callable_tool.invoke(SERVER_STARTUP.PHASE.FS_CONFIG_ONLY, kwargs={
'server_config': server_config,
'pickup_config': pickup_config,
'sio_config': sio_config,
'sso_config': sso_config,
'base_dir': base_dir,
})
# Start monitoring as soon as possible
if server_config.get('newrelic', {}).get('config'):
import newrelic.agent # type: ignore
newrelic.agent.initialize(
server_config.newrelic.config, server_config.newrelic.environment or None, server_config.newrelic.ignore_errors or None,
server_config.newrelic.log_file or None, server_config.newrelic.log_level or None)
zunicorn.SERVER_SOFTWARE = server_config.misc.get('http_server_header', 'Apache')
# Store KVDB config in logs, possibly replacing its password if told to
kvdb_config = get_kvdb_config_for_log(server_config.kvdb)
kvdb_logger.info('Main process config `%s`', kvdb_config)
user_locale = server_config.misc.get('locale', None)
if user_locale:
_ = locale.setlocale(locale.LC_ALL, user_locale)
value = 12345
logger.info('Locale is `%s`, amount of %s -> `%s`', user_locale, value, locale.currency(
value, grouping=True))
if server_config.misc.http_proxy:
os.environ['http_proxy'] = server_config.misc.http_proxy
# Basic components needed for the server to boot up
kvdb = KVDB()
odb_manager = ODBManager()
odb_manager.well_known_data = ZATO_CRYPTO_WELL_KNOWN_DATA
sql_pool_store = PoolStore()
# Create it upfront here
server = ParallelServer()
service_store = ServiceStore(
services={},
odb=odb_manager,
server=server,
is_testing=False
)
server.odb = odb_manager
server.service_store = service_store
server.service_store.server = server
server.sql_pool_store = sql_pool_store
server.kvdb = kvdb
server.stderr_path = options.get('stderr_path') or ''
# Assigned here because it is a circular dependency
odb_manager.parallel_server = server
stop_after = options.get('stop_after') or os.environ.get('Zato_Stop_After') or os.environ.get('ZATO_STOP_AFTER')
if stop_after:
stop_after = int(stop_after)
zato_gunicorn_app = ZatoGunicornApplication(server, repo_location, server_config.main, server_config.crypto)
server.has_fg = options.get('fg') or False
server.env_file = env_file
server.env_variables_from_files[:] = initial_env_variables
server.deploy_auto_from = options.get('deploy_auto_from') or ''
server.crypto_manager = crypto_manager
server.odb_data = server_config.odb
server.host = zato_gunicorn_app.zato_host
server.port = zato_gunicorn_app.zato_port
server.use_tls = server_config.crypto.use_tls
server.repo_location = repo_location
server.pickup_config = pickup_config
server.base_dir = base_dir
server.user_conf_location = server.set_up_user_config_location()
server.logs_dir = os.path.join(server.base_dir, 'logs')
server.tls_dir = os.path.join(server.base_dir, 'config', 'repo', 'tls')
server.static_dir = os.path.join(server.base_dir, 'config', 'repo', 'static')
server.json_schema_dir = os.path.join(server.base_dir, 'config', 'repo', 'schema', 'json')
server.fs_server_config = server_config
server.fs_sql_config = get_config(repo_location, 'sql.conf', needs_user_config=False)
server.logging_config = logging_config
server.logging_conf_path = logging_conf_path
server.sio_config = sio_config
server.sso_config = sso_config
server.user_config.update(server_config.user_config_items)
server.preferred_address = preferred_address
server.sync_internal = options['sync_internal']
server.env_manager = env_manager
server.jwt_secret = server.fs_server_config.misc.jwt_secret.encode('utf8')
server.startup_callable_tool = startup_callable_tool
server.stop_after = stop_after # type: ignore
server.is_sso_enabled = server.fs_server_config.component_enabled.sso
if server.is_sso_enabled:
server.sso_api = SSOAPI(server, sso_config, cast_('callable_', None), crypto_manager.encrypt, server.decrypt,
crypto_manager.hash_secret, crypto_manager.verify_hash, new_user_id)
if scheduler_api_password := server.fs_server_config.scheduler.get('scheduler_api_password'):
if is_encrypted(scheduler_api_password):
server.fs_server_config.scheduler.scheduler_api_password = crypto_manager.decrypt(scheduler_api_password)
server.return_tracebacks = asbool(server_config.misc.get('return_tracebacks', True))
server.default_error_message = server_config.misc.get('default_error_message', 'An error has occurred')
# Turn the repo dir into an actual repository and commit any new/modified files
RepoManager(repo_location).ensure_repo_consistency()
# For IPC communication
ipc_password = crypto_manager.generate_secret()
ipc_password = ipc_password.decode('utf8')
# .. this is for our own process ..
server.set_ipc_password(ipc_password)
# .. this is for other processes.
ipc_password_encrypted = crypto_manager.encrypt(ipc_password, needs_str=True)
_ipc_password_key = IPC.Credentials.Password_Key
os.environ[_ipc_password_key] = ipc_password_encrypted
profiler_enabled = server_config.get('profiler', {}).get('enabled', False)
sentry_config = server_config.get('sentry') or {}
dsn = sentry_config.pop('dsn', None)
if dsn:
from raven import Client
from raven.handlers.logging import SentryHandler
handler_level = sentry_config.pop('level')
client = Client(dsn, **sentry_config)
handler = SentryHandler(client=client)
handler.setLevel(getattr(logging, handler_level))
logger = logging.getLogger('')
logger.addHandler(handler)
for name in logging.Logger.manager.loggerDict:
if name.startswith('zato'):
logger = logging.getLogger(name)
logger.addHandler(handler)
if asbool(profiler_enabled):
# Repoze
from repoze.profile import ProfileMiddleware
profiler_dir = os.path.abspath(os.path.join(base_dir, server_config.profiler.profiler_dir))
server.on_wsgi_request = ProfileMiddleware(
server.on_wsgi_request,
log_filename = os.path.join(profiler_dir, server_config.profiler.log_filename),
cachegrind_filename = os.path.join(profiler_dir, server_config.profiler.cachegrind_filename),
discard_first_request = server_config.profiler.discard_first_request,
flush_at_shutdown = server_config.profiler.flush_at_shutdown,
path = server_config.profiler.url_path,
unwind = server_config.profiler.unwind)
os_environ = server_config.get('os_environ', {})
for key, value in os_environ.items():
os.environ[key] = value
# Run the hook right before the Gunicorn-level server actually starts
startup_callable_tool.invoke(SERVER_STARTUP.PHASE.IMPL_BEFORE_RUN, kwargs={
'zato_gunicorn_app': zato_gunicorn_app,
})
# This will optionally enable the RAM usage profiler.
memory_profiler_key = OS_Env.Zato_Enable_Memory_Profiler
enable_memory_profiler = os.environ.get(memory_profiler_key)
if enable_memory_profiler:
# stdlib
from tempfile import mkdtemp
# memray
import memray # type: ignore
# Create an empty directory to store the output in ..
dir_name = mkdtemp(prefix='zato-memory-profiler-')
# .. now, the full path to the memory profile file ..
full_path = os.path.join(dir_name, 'zato-memory-profile.bin')
# .. we can start the memray's tracker now ..
with memray.Tracker(full_path):
logger.info('Starting with memory profiler; output in -> %s', full_path)
# .. finally, start the server
start_wsgi_app(zato_gunicorn_app, start_gunicorn_app)
# .. no memory profiler here.
start_wsgi_app(zato_gunicorn_app, start_gunicorn_app)
# ################################################################################################################################
def start_wsgi_app(zato_gunicorn_app:'any_', start_gunicorn_app:'bool') -> 'None':
if start_gunicorn_app:
zato_gunicorn_app.run()
else:
return zato_gunicorn_app.zato_wsgi_app
# ################################################################################################################################
if __name__ == '__main__':
env_key_name = 'ZATO_SERVER_BASE_DIR'
env_server_base_dir = os.environ.get(env_key_name)
if env_server_base_dir:
logging.info('Using environment key %s -> %s', env_key_name, env_server_base_dir)
server_base_dir = env_server_base_dir
cmd_line_options = {
'fg': True,
'sync_internal': True,
'secret_key': '',
'stderr_path': None,
'env_file': '',
'stop_after': None,
'deploy_auto_from': ''
}
else:
server_base_dir = sys.argv[1]
cmd_line_options = sys.argv[2]
cmd_line_options = parse_cmd_line_options(cmd_line_options)
if not os.path.isabs(server_base_dir):
server_base_dir = os.path.abspath(os.path.join(os.getcwd(), server_base_dir))
_ = run(server_base_dir, options=cmd_line_options)
# ################################################################################################################################
# ################################################################################################################################
| 24,879
|
Python
|
.py
| 479
| 45.039666
| 132
| 0.615952
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,061
|
jwt_cache.py
|
zatosource_zato/code/zato-server/src/zato/server/jwt_cache.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import datetime
from contextlib import closing
from logging import getLogger
# gevent
import gevent
# Zato
from zato.common.odb.model import KVData
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import unicode
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
class JWTCache:
""" A previous-generation, JWT-only, cache that uses ODB.
"""
# ################################################################################################################################
def __init__(self, odb, miss_fallback=False, cluster_id=None):
self.odb = odb
self.miss_fallback = miss_fallback
self.cluster_id = cluster_id
# ################################################################################################################################
def _get_odb_key(self, key):
return 'cluster_id:{}/{}'.format(self.cluster_id, key) if self.cluster_id else key
# ################################################################################################################################
def _odb_put(self, key, value, ttl):
key = self._get_odb_key(key)
if isinstance(key, unicode):
key = key.encode('utf8')
if isinstance(value, unicode):
value = value.encode('utf8')
with closing(self.odb.session()) as session:
try:
item = session.query(KVData).filter_by(key=key).first()
if not item:
item = KVData()
now = datetime.datetime.utcnow()
item.key = key
item.value = value
item.creation_time = now
item.expiry_time = now + datetime.timedelta(seconds=ttl)
session.add(item)
session.commit()
except Exception:
logger.exception('Unable to put key/value `%r` `%r` (%s %s) into ODB', key, value, type(key), type(value))
session.rollback()
raise
# ################################################################################################################################
def _odb_get(self, key):
with closing(self.odb.session()) as session:
return session.query(KVData).filter_by(key=self._get_odb_key(key)).first()
# ################################################################################################################################
def put(self, key, value, ttl=None, is_async=True):
"""Put key/value into ODB. If is_async is False, we join the greenlets until they are done.
otherwise, we do not wait for them to finish.
"""
greenlets = [
gevent.spawn(self._odb_put, key, value, ttl)
]
if not is_async:
gevent.joinall(greenlets)
# ################################################################################################################################
def get(self, key):
return self._odb_get(key)
# ################################################################################################################################
def delete(self, key):
# Delete from ODB
key = self._get_odb_key(key)
with closing(self.odb.session()) as session:
item = session.query(KVData).filter_by(key=key).first()
if item:
session.delete(item)
session.commit()
# ################################################################################################################################
| 4,077
|
Python
|
.py
| 80
| 42.725
| 130
| 0.402223
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,062
|
commands.py
|
zatosource_zato/code/zato-server/src/zato/server/commands.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from dataclasses import dataclass
from datetime import datetime
from inspect import isclass
from logging import getLogger
# gevent
from gevent import spawn
from gevent.subprocess import run as subprocess_run, TimeoutExpired
# Humanize
from humanize import naturalsize
# Zato
from zato.common.marshal_.api import Model
from zato.common.util.platform_ import is_windows
from zato.common.typing_ import cast_
from zato.common.util import new_cid
from zato.common.util.api import get_zato_command
# ################################################################################################################################
# ################################################################################################################################
if 0:
from pathlib import Path
from gevent.subprocess import CompletedProcess
from zato.common.typing_ import any_
from zato.server.base.parallel import ParallelServer
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class Config:
UsePubSub = False
Timeout = 600.0 # In seconds
Encoding = 'utf8'
ReplaceChar = '�' # U+FFFD � REPLACEMENT CHARACTER
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class CommandResult(Model):
cid: 'str'
command: 'str'
callback: 'any_' = None
stdin: 'str' = ''
stdout: 'str' = ''
stderr: 'str' = ''
is_async: 'bool' = False
use_pubsub: 'bool' = Config.UsePubSub
is_ok: 'bool' = False
timeout: 'float' = Config.Timeout
exit_code: 'int' = -1
len_stdout_bytes: 'int' = 0
len_stderr_bytes: 'int' = 0
len_stdout_human: 'str' = ''
len_stderr_human: 'str' = ''
encoding: 'str' = Config.Encoding
replace_char: 'str' = Config.ReplaceChar
is_timeout: 'bool' = False
timeout_msg:'str '= ''
start_time: 'datetime' = None # type: ignore
start_time_iso:'str' = ''
end_time: 'datetime' = None # type: ignore
end_time_iso: 'str' = ''
total_time: 'str' = ''
total_time_sec: 'float' = -1.0
# ################################################################################################################################
# ################################################################################################################################
class CommandsFacade:
""" An accessor object through which shell commands can be invoked.
"""
server: 'ParallelServer'
def init(self, server:'ParallelServer') -> 'None':
self.server = server
def _append_time_details(self, out:'CommandResult') -> 'None':
# .. compute the command's end time ..
out.end_time = datetime.utcnow()
out.end_time_iso = out.end_time.isoformat()
total_time = (out.end_time - out.start_time)
out.total_time = str(total_time)
out.total_time_sec = total_time.total_seconds()
# ################################################################################################################################
def _append_result_details(
self,
out: 'CommandResult',
result: 'CompletedProcess',
encoding: 'str',
replace_char:'str',
) -> 'None':
# .. otherwise, we process the result received from the command ..
# .. populate our output object with basic information ..
out.command = result.args
out.exit_code = result.returncode
# For now, we assume that only exit code 0 means success
out.is_ok = out.exit_code == 0
# Try to parse out string objects out of bytes. We assume that this will succeed.
# But, if that fails, we will repeat, asking Python to give us
# a string with the Unicode default replace character's instances inside. At this point,
# we may still want to use our own character instead of the default one.
# First, stdout ..
try:
stdout:'str' = result.stdout.decode(encoding)
except UnicodeDecodeError:
stdout:'str' = result.stdout.decode(encoding, 'replace') # type: str
if replace_char != Config.ReplaceChar:
stdout = stdout.replace(Config.ReplaceChar, replace_char)
# .. now, stderr ..
try:
stderr:'str' = result.stderr.decode(encoding)
except UnicodeDecodeError:
stderr:'str' = result.stderr.decode(encoding, 'replace') # type: str
if replace_char != Config.ReplaceChar:
stderr = stderr.replace(Config.ReplaceChar, replace_char)
out.stdout = stdout
out.stderr = stderr
out.encoding = encoding
out.replace_char = replace_char
# ################################################################################################################################
def _run(
self,
*,
cid: 'str',
command: 'str',
callback: 'any_',
stdin: 'any_',
timeout: 'float',
encoding: 'str',
replace_char: 'str',
use_pubsub: 'bool'
) -> 'CommandResult':
# Our response to produce
out = CommandResult()
# Make sure stdin is a bytes object, as expected by the underlying implementation ..
# .. make a copy because we are returning it on output 1:1 in a moment ..
orig_stdin = stdin
# .. now,
if not isinstance(stdin, bytes):
stdin = stdin.encode(encoding)
# This is taken 1:1 from input parameters
out.cid = cid
out.command = command
out.stdin = orig_stdin
out.timeout = timeout
out.encoding = encoding
# Invoke the subprocess ..
try:
# Log what we are about to do ..
logger.info('Invoking command: `%s` (%s)', command, cid)
# .. start measuring the response time ..
out.start_time = datetime.utcnow()
out.start_time_iso = out.start_time.isoformat()
# .. this needs to be None if we do not want it
timeout = cast_('float', timeout or None)
# .. invoke the command ..
result:'CompletedProcess' = subprocess_run(
command, input=stdin, timeout=timeout, shell=True, capture_output=True)
# .. if we are here, it means that there was no timeout ..
# .. store the length of stdout and stderr in bytes, before we convert them to string objects ..
out.len_stdout_bytes = len(result.stdout)
out.len_stderr_bytes = len(result.stderr)
out.len_stdout_human = naturalsize(out.len_stdout_bytes)
out.len_stderr_human = naturalsize(out.len_stderr_bytes)
# .. first, append end time-related details ..
self._append_time_details(out)
# .. now, populate details of the actual command's result ..
self._append_result_details(out, result, encoding, replace_char)
# .. we enter here if the command timed out ..
except TimeoutExpired as e:
# .. append details about how long the command took ..
self._append_time_details(out)
# .. populate timeout metadata ..
out.is_timeout = True
out.timeout_msg = str(e)
# .. replace ' seconds' with ' sec.' to avoid expressions like '1 seconds' ..
# .. (we assume that there will be only one such instance in the string) ..
if out.timeout_msg.endswith(' seconds'):
out.timeout_msg = out.timeout_msg.replace(' seconds', ' sec.')
# .. issue information about what happened ..
logger.warning('Timeout: %s (%s)', out.timeout_msg, cid)
# .. we get here only if there was no timeout ..
else:
logger.info('Command `%s` completed in %s, exit_code -> %s; len-out=%s (%s); len-err=%s (%s); cid -> %s',
command, out.total_time, out.exit_code,
out.len_stdout_bytes,
out.len_stdout_human,
out.len_stderr_bytes,
out.len_stderr_human,
cid)
# .. no matter if there was a timeout or not, we can invoke our callback, if we have any,
# .. and return our output now ..
finally:
# .. run the callback ..
if callback:
self._run_callback(cid, callback, out, use_pubsub)
# .. return the output, assuming that the callback did not raise an exception.
return out
# ################################################################################################################################
def _run_callback(self, cid:'str', callback:'any_', result:'CommandResult', use_pubsub:'bool') -> 'None':
# We need to import it here to avoid circular references
from zato.server.service import Service
# Local aliases
is_service = isclass(callback) and issubclass(callback, Service)
# This is a function or another callable, but not a service, and we can invoke that callable as is
if callable(callback) and (not is_service):
_ = callback(result)
else:
# We are going to publish a message to the target (service or topic) by its name ..
if use_pubsub:
func = self.server.publish
data_key = 'data'
target_key = 'name'
result = result.to_dict() # type: ignore
# We are going to invoke the taret synchronously
else:
func = self.server.invoke
data_key = 'request'
target_key = 'service'
# Extract the service's name ..
if is_service:
target = callback.get_name() # type: ignore
# .. or use it directly ..
else:
target = callback
# Now, we are ready to invoke the callable
func(**{
data_key: result,
target_key: target,
'cid': cid
}) # type: ignore
# ################################################################################################################################
def invoke_async(
self,
command:'str',
*,
cid: 'str' = '',
timeout: 'float' = Config.Timeout,
callback: 'any_' = None,
stdin: 'str' = '',
encoding: 'str' = Config.Encoding,
use_pubsub: 'bool' = Config.UsePubSub,
replace_char:'str' = Config.ReplaceChar,
) -> 'CommandResult':
# Accept input or create a new Correlation ID
cid = cid or 'zcma' + new_cid()
# For consistency, we return the same object that self.invoke does
out = CommandResult()
out.cid = cid
out.command = command
out.stdin = stdin
out.timeout = timeout
out.encoding = encoding
out.replace_char = replace_char
out.is_async = True
out.is_ok = True
out.use_pubsub = use_pubsub
# .. run in background ..
_ = spawn(
self.invoke, cid=cid, command=command, callback=callback, stdin=stdin, timeout=timeout,
use_pubsub=use_pubsub, encoding=encoding, replace_char=replace_char)
# .. and return the basic information to our caller ..
return out
# ################################################################################################################################
def invoke(
self,
command:'str',
*,
cid: 'str' = '',
timeout: 'float' = Config.Timeout,
callback: 'any_' = None,
stdin: 'str' = '',
encoding: 'str' = Config.Encoding,
use_pubsub: 'bool' = Config.UsePubSub,
replace_char:'str' = Config.ReplaceChar,
) -> 'CommandResult':
# Accept input or create a new Correlation ID
cid = cid or 'zcmd' + new_cid()
return self._run(
cid=cid, command=command, callback=callback, stdin=stdin, timeout=timeout, encoding=encoding,
use_pubsub=use_pubsub, replace_char=replace_char)
# ################################################################################################################################
def run_zato_cli_async(
self,
command: 'str',
callback: 'any_' = None,
) -> 'CommandResult':
# This will differ depending on our current OS
zato_bin = 'zato.bat' if is_windows else get_zato_command()
# Build the full command to execute
command = f'{zato_bin} {command}'
return self.invoke_async(command, callback=callback)
# ################################################################################################################################
def run_enmasse_async(self, file_path:'str | Path') -> 'CommandResult':
command = f'enmasse --import --replace --input {file_path} {self.server.base_dir} --verbose'
result = self.run_zato_cli_async(command, callback=self._on_enmasse_completed)
return result
# ################################################################################################################################
def _on_enmasse_completed(self, result:'CommandResult') -> 'None':
logger.info('Enmasse stdout -> `%s`', result.stdout.strip())
logger.info('Enmasse stderr -> `%s`', result.stderr.strip())
# ################################################################################################################################
# ################################################################################################################################
| 14,779
|
Python
|
.py
| 302
| 40.178808
| 130
| 0.482617
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,063
|
api.py
|
zatosource_zato/code/zato-server/src/zato/server/pattern/api.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Import all patterns into one place
from zato.server.pattern.base import FanOut, ParallelExec
from zato.server.pattern.invoke_retry import InvokeRetry
# For flake8
FanOut = FanOut
InvokeRetry = InvokeRetry
ParallelExec = ParallelExec
| 392
|
Python
|
.py
| 12
| 31.25
| 64
| 0.786667
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,064
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/pattern/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,065
|
base.py
|
zatosource_zato/code/zato-server/src/zato/server/pattern/base.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from datetime import datetime
from logging import getLogger
# Zato
from zato.common import CHANNEL
from zato.common.util import spawn_greenlet
from zato.server.pattern.model import CacheEntry, InvocationResponse, ParallelCtx, Target
# ################################################################################################################################
if 0:
from gevent.lock import RLock
from zato.server.service import Service
Service = Service
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class ParallelBase:
""" A base class for most parallel integration patterns. An instance of this class is created for each service instance.
"""
call_channel = '<parallel-base-call-channel-not-set>'
on_target_channel = '<parallel-base-target-channel-not-set>'
on_final_channel = '<parallel-base-final-channel-not-set>'
needs_on_final = False
def __init__(self, source, cache, lock):
# type: (Service, dict, RLock) -> None
self.source = source
self.cache = cache
self.lock = lock
self.cid = source.cid
# ################################################################################################################################
def _invoke(self, ctx):
# type: (ParallelCtx)
# Store metadata about our invocation ..
with self.lock:
# .. create a new entry ..
entry = CacheEntry()
entry.cid = ctx.cid
entry.req_ts_utc = ctx.req_ts_utc
entry.len_targets = len(ctx.target_list)
entry.remaining_targets = entry.len_targets
entry.target_responses = []
entry.final_responses = {}
entry.on_target_list = ctx.on_target_list
entry.on_final_list = ctx.on_final_list
# .. and add it to the cache.
self.cache[ctx.cid] = entry
# Now that metadata is stored, we can actually invoke each of the serviced from our list of targets.
for item in ctx.target_list: # type: Target
self.source.invoke_async(item.name, item.payload, channel=self.call_channel, cid=ctx.cid)
# ################################################################################################################################
def invoke(self, targets, on_final, on_target=None, cid=None, _utcnow=datetime.utcnow):
""" Invokes targets collecting their responses, can be both as a whole or individual ones,
and executes callback(s).
"""
# type: (dict, list, list, str, object) -> None
# Establish what our CID is ..
cid = cid or self.cid
# .. set up targets to invoke ..
target_list = []
for target_name, payload in targets.items():
target = Target()
target.name = target_name
target.payload = payload
target_list.append(target)
# .. create an execution context ..
ctx = ParallelCtx()
ctx.cid = cid
ctx.req_ts_utc = _utcnow()
ctx.source_name = self.source.name
ctx.target_list = target_list
# .. on-final is always available ..
ctx.on_final_list = [on_final] if isinstance(on_final, str) else on_final
# .. but on-target may be None ..
if on_target:
ctx.on_target_list = [on_target] if isinstance(on_target, str) else on_target
# .. invoke our implementation in background ..
try:
spawn_greenlet(self._invoke, ctx)
except Exception:
# Explicitly ignore any exception caught - this is because we are catching
# deeper in the call stack to provide it to callback services so we do not want
# to raise it here too.
pass
# .. and return the CID to the caller.
return cid
# ################################################################################################################################
def on_call_finished(self, invoked_service, response, exception, _utcnow=datetime.utcnow):
# type: (Service, object, Exception, object)
# Update metadata about the current parallel execution under a server-wide lock ..
with self.lock:
# .. find our cache entry ..
entry = self.cache.get(invoked_service.cid) # type: CacheEntry
# .. exit early if we cannot find the entry for any reason ..
if not entry:
logger.warning('No such parallel cache key `%s`', invoked_service.cid)
return
# .. alright, we can proceed ..
else:
# .. update the number of targets already invoked ..
entry.remaining_targets -= 1
# .. build information about the response that we have ..
invocation_response = InvocationResponse()
invocation_response.cid = invoked_service.cid
invocation_response.req_ts_utc = entry.req_ts_utc
invocation_response.resp_ts_utc = _utcnow()
invocation_response.response = response
invocation_response.exception = exception
invocation_response.ok = False if exception else True
invocation_response.source = self.source.name
invocation_response.target = invoked_service.name
# For pre-Zato 3.2 compatibility, callbacks expect dicts on input.
dict_payload = {
'source': invocation_response.source,
'target': invocation_response.target,
'response': invocation_response.response,
'req_ts_utc': invocation_response.req_ts_utc.isoformat(),
'resp_ts_utc': invocation_response.resp_ts_utc.isoformat(),
'ok': invocation_response.ok,
'exception': invocation_response.exception,
'cid': invocation_response.cid,
}
# .. add the received response to the list of what we have so far ..
entry.target_responses.append(dict_payload)
# .. invoke any potential on-target callbacks ..
if entry.on_target_list:
# Updates the dictionary in-place
dict_payload['phase'] = 'on-target'
for on_target_item in entry.on_target_list: # type: str
invoked_service.invoke_async(
on_target_item, dict_payload, channel=self.on_target_channel, cid=invoked_service.cid)
# .. check if this was the last service that we were waiting for ..
if entry.remaining_targets == 0:
# .. if so, run the final callback services if it is required in our case ..
if self.needs_on_final:
if entry.on_final_list:
# This message is what all the on-final callbacks
# receive in their self.request.payload attribute.
on_final_message = {
'phase': 'on-final',
'source': invocation_response.source,
'req_ts_utc': entry.req_ts_utc,
'on_target': entry.on_target_list,
'on_final': entry.on_final_list,
'data': entry.target_responses,
}
for on_final_item in entry.on_final_list: # type: str
invoked_service.invoke_async(
on_final_item, on_final_message,
channel=self.on_final_channel, cid=invoked_service.cid)
# .. now, clean up by deleting the current entry from cache.
# Note that we ise None in an unlikely it is already deleted,
# although this should not happen because we are the only piece of code holding this lock.
self.cache.pop(invoked_service.cid, None)
# ################################################################################################################################
# ################################################################################################################################
class ParallelExec(ParallelBase):
call_channel = CHANNEL.PARALLEL_EXEC_CALL
on_target_channel = CHANNEL.PARALLEL_EXEC_ON_TARGET
def invoke(self, targets, on_target, cid=None):
return super().invoke(targets, None, on_target, cid)
# ################################################################################################################################
# ################################################################################################################################
class FanOut(ParallelBase):
call_channel = CHANNEL.FANOUT_CALL
on_target_channel = CHANNEL.FANOUT_ON_TARGET
on_final_channel = CHANNEL.FANOUT_ON_FINAL
needs_on_final = True
# ################################################################################################################################
# ################################################################################################################################
| 9,988
|
Python
|
.py
| 174
| 44.885057
| 130
| 0.482073
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,066
|
model.py
|
zatosource_zato/code/zato-server/src/zato/server/pattern/model.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from datetime import datetime
from typing import List as list_, Optional as optional
# Zato
from zato.common.ext.dataclasses import dataclass
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class Target:
name: str
payload: optional[object] = None
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class ParallelCtx:
cid: str
req_ts_utc: datetime
source_name: str
target_list: list_[Target]
on_target_list: optional[list] = None
on_final_list: optional[list] = None
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class CacheEntry:
cid: str
req_ts_utc: datetime
len_targets: int
remaining_targets: int
target_responses: list
final_responses: dict
on_target_list: optional[list] = None
on_final_list: optional[list] = None
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class InvocationResponse:
cid: str
req_ts_utc: datetime
resp_ts_utc: datetime
response: optional[object]
exception: optional[Exception]
ok: bool
source: str
target: str
# ################################################################################################################################
# ################################################################################################################################
| 2,416
|
Python
|
.py
| 52
| 43.365385
| 130
| 0.323267
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,067
|
invoke_retry.py
|
zatosource_zato/code/zato-server/src/zato/server/pattern/invoke_retry.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from datetime import datetime
from logging import getLogger
from traceback import format_exc
# gevent
from gevent import sleep
# Zato
from zato.common.exception import ZatoException
from zato.common.json_internal import dumps
from zato.common.util.api import new_cid
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.server.service import Service
Service = Service
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
utcnow = datetime.utcnow
# ################################################################################################################################
# ################################################################################################################################
def retry_failed_msg(so_far, retry_repeats, service_name, retry_seconds, orig_cid, e):
return '({}/{}) Retry failed for:`{}`, retry_seconds:`{}`, orig_cid:`{}`, {}:`{}`'.format(
so_far, retry_repeats, service_name, retry_seconds, orig_cid, e.__class__.__name__, e.args)
def retry_limit_reached_msg(retry_repeats, service_name, retry_seconds, orig_cid):
return '({}/{}) Retry limit reached for:`{}`, retry_seconds:`{}`, orig_cid:`{}`'.format(
retry_repeats, retry_repeats, service_name, retry_seconds, orig_cid)
# ################################################################################################################################
# ################################################################################################################################
class NeedsRetry(ZatoException):
def __init__(self, cid, inner_exc):
self.cid = cid
self.inner_exc = inner_exc
def __repr__(self):
return '<{} at {} cid:`{}` inner_exc:`{}`>'.format(self.__class__.__name__, hex(id(self)), self.cid,
format_exc(self.inner_exc) if self.inner_exc else None)
# ################################################################################################################################
# ################################################################################################################################
class RetryFailed(ZatoException):
def __init__(self, remaining, inner_exc):
self.remaining = remaining
self.inner_exc = inner_exc
def __repr__(self):
return '<{} at {} remaining:`{}` inner_exc:`{}`>'.format(
self.__class__.__name__, hex(id(self)), self.remaining, format_exc(self.inner_exc) if self.inner_exc else None)
# ################################################################################################################################
# ################################################################################################################################
class InvokeRetry:
""" Provides the invoke-retry pattern that lets one invoke a service with parametrized retries.
"""
def __init__(self, invoking_service):
# type: (Service) -> None
self.invoking_service = invoking_service
# ################################################################################################################################
def _get_retry_settings(self, target, **kwargs):
async_fallback = kwargs.get('async_fallback')
callback = kwargs.get('callback')
callback_context = kwargs.get('context')
retry_repeats = kwargs.get('repeats')
retry_seconds = kwargs.get('seconds')
retry_minutes = kwargs.get('minutes')
if async_fallback:
items = ('callback', 'repeats')
for item in items:
value = kwargs.get(item)
if not value:
msg = 'Could not invoke `{}`, `{}` was not provided ({})'.format(target, item, value)
logger.error(msg)
raise ValueError(msg)
if retry_seconds and retry_minutes:
msg = 'Could not invoke `{}`, only one of seconds:`{}` and minutes:`{}` can be given'.format(
target, retry_seconds, retry_minutes)
logger.error(msg)
raise ValueError(msg)
if not(retry_seconds or retry_minutes):
msg = 'Could not invoke `{}`, exactly one of seconds:`{}` or minutes:`{}` must be given'.format(
target, retry_seconds, retry_minutes)
logger.error(msg)
raise ValueError(msg)
try:
self.invoking_service.server.service_store.name_to_impl_name[callback]
except KeyError:
msg = 'Service:`{}` does not exist, e:`{}`'.format(callback, format_exc())
logger.error(msg)
raise ValueError(msg)
# Get rid of arguments our superclass doesn't understand
for item in('async_fallback', 'callback', 'context', 'repeats', 'seconds', 'minutes'):
kwargs.pop(item, True)
# Note that internally we use seconds only.
return async_fallback, callback, callback_context, retry_repeats, retry_seconds or retry_minutes * 60, kwargs
# ################################################################################################################################
def _invoke_async_retry(self, target, retry_repeats, retry_seconds, orig_cid, call_cid, callback,
callback_context, args, kwargs, _utcnow=utcnow):
# Request to invoke the background service with ..
retry_request = {
'source':self.invoking_service.name,
'target': target,
'retry_repeats': retry_repeats,
'retry_seconds': retry_seconds,
'orig_cid': orig_cid,
'call_cid': call_cid,
'callback': callback,
'callback_context': callback_context,
'args': args,
'kwargs': kwargs,
'req_ts_utc': _utcnow()
}
return self.invoking_service.invoke_async('zato.pattern.invoke-retry.invoke-retry', dumps(retry_request), cid=call_cid)
# ################################################################################################################################
def invoke_async(self, target, *args, **kwargs):
async_fallback, callback, callback_context, retry_repeats, retry_seconds, kwargs = self._get_retry_settings(
target, **kwargs)
return self._invoke_async_retry(
target, retry_repeats, retry_seconds, self.invoking_service.cid, kwargs['cid'], callback,
callback_context, args, kwargs)
# ################################################################################################################################
def invoke(self, target, *args, **kwargs):
async_fallback, callback, callback_context, retry_repeats, retry_seconds, kwargs = self._get_retry_settings(
target, **kwargs)
# Let's invoke the service and find out if it works, maybe we don't need
# to retry anything.
kwargs['cid'] = kwargs.get('cid', new_cid())
try:
result = self.invoking_service.invoke(target, *args, **kwargs)
except Exception:
logger.warning('Could not invoke:`%s`, cid:`%s`, e:`%s`', target, self.invoking_service.cid, format_exc())
# How we handle the exception depends on whether the caller wants us
# to block or prefers if we retry in background.
if async_fallback:
# .. invoke the background service and return CID to the caller.
return self._invoke_async_retry(
target, retry_repeats, retry_seconds, self.invoking_service.cid, kwargs['cid'], callback,
callback_context, args, kwargs)
# We are to block while repeating
else:
# Repeat the given number of times sleeping for as many seconds as we are told
remaining = retry_repeats
result = None
while remaining > 1:
try:
result = self.invoking_service.invoke(target, *args, **kwargs)
except Exception as e:
msg = retry_failed_msg(
(retry_repeats-remaining)+1, retry_repeats, target, retry_seconds, self.invoking_service.cid, e)
logger.info(msg)
sleep(retry_seconds)
remaining -= 1
# OK, give up now, there's nothing more we can do
if not result:
msg = retry_limit_reached_msg(retry_repeats, target, retry_seconds, self.invoking_service.cid)
raise ZatoException(self.invoking_service.cid, msg)
else:
# All good, simply return the response
return result
# ################################################################################################################################
# ################################################################################################################################
| 9,976
|
Python
|
.py
| 163
| 51.165644
| 130
| 0.440496
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,068
|
queue.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/queue.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
from datetime import datetime, timedelta
from time import sleep
from traceback import format_exc
# gevent
import gevent
from gevent.lock import RLock
from gevent.queue import Empty, Queue
# Zato
from zato.common.api import GENERIC as COMMON_GENERIC
from zato.common.typing_ import cast_
from zato.common.util.config import resolve_name, replace_query_string_items
from zato.common.util.python_ import get_python_id
# ################################################################################################################################
# ################################################################################################################################
if 0:
from logging import Logger
from bunch import Bunch
from zato.common.typing_ import any_, callable_, intnone, strnone
from zato.server.base.parallel import ParallelServer
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
_outconn_wsx = COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_WSX
# ################################################################################################################################
# ################################################################################################################################
class _Connection:
""" Meant to be used as a part of a 'with' block - returns a connection from its queue each time 'with' is entered
assuming the queue isn't empty.
"""
client_queue: 'Queue'
conn_name: 'str'
should_block: 'bool'
block_timeout: 'intnone'
client:'any_' = None
def __init__(
self,
client_queue:'Queue',
conn_name:'str',
should_block:'bool'=False,
block_timeout:'intnone'=None
) -> 'None':
self.queue = client_queue
self.conn_name = conn_name
self.should_block = should_block
self.block_timeout = block_timeout
def __enter__(self) -> 'None':
try:
self.client = self.queue.get(self.should_block, self.block_timeout)
except Empty:
self.client = None
msg = 'No free connections to `{}`'.format(self.conn_name)
logger.error(msg)
raise Exception(msg)
else:
return self.client
def __exit__(self, _type:'any_', _value:'any_', _traceback:'any_') -> 'None':
if self.client:
self.queue.put(self.client)
# ################################################################################################################################
# ################################################################################################################################
class ConnectionQueue:
""" Holds connections to resources. Each time it's called a connection is fetched from its underlying queue
assuming any connection is still available.
"""
is_active: 'bool'
queue: 'Queue'
queue_build_cap: 'int'
queue_max_size: 'int'
conn_id: 'int'
conn_name: 'str'
conn_type: 'str'
address: 'str'
add_client_func: 'callable_'
needs_spawn: 'bool'
max_attempts: 'int'
keep_connecting: 'bool' = True
is_building_conn_queue: 'bool' = False
queue_building_stopped: 'bool' = False
lock: 'RLock'
logger: 'Logger'
# How many add_client_func instances are running currently. This value must be updated with self.lock held.
in_progress_count:'int' = 0
def __init__(
self,
server: 'ParallelServer',
is_active: 'bool',
pool_size:'int',
queue_build_cap:'int',
conn_id:'int',
conn_name:'str',
conn_type:'str',
address:'str',
add_client_func:'callable_',
needs_spawn:'bool'=True,
max_attempts:'int' = 1234567890
) -> 'None':
self.is_active = is_active
self.server = server
self.queue = Queue(pool_size)
self.queue_max_size = cast_('int', self.queue.maxsize) # Force static typing as we know that it will not be None
self.queue_build_cap = queue_build_cap
self.conn_id = conn_id
self.conn_name = conn_name
self.conn_type = conn_type
self.address = address
self.add_client_func = add_client_func
self.needs_spawn = needs_spawn
self.max_attempts = max_attempts
self.lock = RLock()
if isinstance(self.address, str): # type: ignore
self.address_masked = replace_query_string_items(self.server, self.address)
else:
self.address_masked = self.address
# We are ready now
self.logger = getLogger(self.__class__.__name__)
# ################################################################################################################################
def __call__(self, should_block:'bool'=False, block_timeout:'intnone'=None) -> '_Connection':
return _Connection(self.queue, self.conn_name, should_block, block_timeout)
# ################################################################################################################################
def put_client(self, client:'any_') -> 'bool':
with self.lock:
if self.queue.full():
is_accepted = False
msg = 'Skipped adding a superfluous `%s` client to %s (%s)'
log_func = self.logger.info
else:
self.queue.put(client)
is_accepted = True
msg = 'Added `%s` client to `%s` (%s)'
log_func = self.logger.info
if self.connection_exists():
log_func(msg, self.conn_name, self.address_masked, self.conn_type)
return is_accepted
# ################################################################################################################################
def connection_exists(self) -> 'bool':
# Right now, we check only whether WSX outgoing connections exist
# and assume that all the other ones always do.
if self.conn_type != COMMON_GENERIC.ConnName.OutconnWSX:
return True
# This may be None during tests ..
elif not self.server:
return True
# .. same as above ..
elif not getattr(self.server, 'worker_store', None):
return True
else:
for _ignored_conn_type, value in self.server.worker_store.generic_conn_api.items():
for _ignored_conn_name, conn_dict in value.items():
if conn_dict['id'] == self.conn_id:
return True
# By default, assume that there is no such WSX outconn
return False
# ################################################################################################################################
def should_keep_connecting(self):
_connection_exists = self.connection_exists()
_keep_connecting_flag_is_set = self.keep_connecting
_queue_is_not_full = not self.queue.full()
return _connection_exists and _keep_connecting_flag_is_set and _queue_is_not_full
# ################################################################################################################################
def _build_queue(self) -> 'None':
start = datetime.utcnow()
build_until = start + timedelta(seconds=self.queue_build_cap)
suffix = 's ' if self.queue_max_size > 1 else ' '
try:
# We are just starting out
num_attempts = 0
self.is_building_conn_queue = True
while self.should_keep_connecting():
# If we have reached the limits of attempts ..
if num_attempts >= self.max_attempts:
# .. store a log message ..
self.logger.info('Max. attempts reached (%s/%s); quitting -> %s %s -> %s ',
num_attempts,
self.max_attempts,
self.conn_type,
self.address_masked,
self.conn_name
)
# .. and exit the loop.
return
gevent.sleep(1)
now = datetime.utcnow()
self.logger.info('%d/%d %s clients obtained to `%s` (%s) after %s (cap: %ss)',
self.queue.qsize(), self.queue_max_size,
self.conn_type, self.address_masked, self.conn_name, now - start, self.queue_build_cap)
if now >= build_until:
# Log the fact that the queue is not full yet
self.logger.info('Built %s/%s %s clients to `%s` within %s seconds, sleeping until %s (UTC)',
self.queue.qsize(), self.queue.maxsize, self.conn_type, self.address_masked, self.queue_build_cap,
datetime.utcnow() + timedelta(seconds=self.queue_build_cap))
# Sleep for a predetermined time
gevent.sleep(self.queue_build_cap)
# Spawn additional greenlets to fill up the queue but make sure not to spawn
# more greenlets than there are slots in the queue still available.
with self.lock:
if self.in_progress_count < self.queue_max_size:
self._spawn_add_client_func(self.queue_max_size - self.in_progress_count)
start = datetime.utcnow()
build_until = start + timedelta(seconds=self.queue_build_cap)
if self.should_keep_connecting():
self.logger.info('Obtained %d %s client%sto `%s` for `%s`', self.queue.maxsize, self.conn_type, suffix,
self.address_masked, self.conn_name)
else:
# What we log will depend on whether we have already built a queue of connections or not ..
if self.queue.full():
msg = 'Built a connection queue to `%s` for `%s`'
else:
msg = 'Skipped building a queue to `%s` for `%s`'
# .. do log it now ..
self.logger.info(msg, self.address_masked, self.conn_name)
# .. indicate that we are not going to continue ..
self.is_building_conn_queue = False
self.queue_building_stopped = True
# If we are here, we are no longer going to build the queue, e.g. if it already fully built.
self.is_building_conn_queue = False
return
except KeyboardInterrupt:
self.keep_connecting = False
self.queue_building_stopped = True
# ################################################################################################################################
def _spawn_add_client_func_no_lock(self, count:'int') -> 'None':
for _x in range(count):
if self.needs_spawn:
_ = gevent.spawn(self.add_client_func)
else:
self.add_client_func()
self.in_progress_count += 1
# ################################################################################################################################
def _spawn_add_client_func(self, count:'int'=1) -> 'None':
""" Spawns as many greenlets to populate the connection queue as there are free slots in the queue available.
"""
with self.lock:
if self.queue.full():
logger.info('Queue fully prepared -> c:%d (%s %s)', count, self.address_masked, self.conn_name)
return
self._spawn_add_client_func_no_lock(count)
# ################################################################################################################################
def decr_in_progress_count(self) -> 'None':
with self.lock:
self.in_progress_count -= 1
# ################################################################################################################################
def is_in_progress(self) -> 'bool':
return self.in_progress_count > 0
# ################################################################################################################################
def build_queue(self) -> 'None':
""" Spawns greenlets to populate the queue and waits up to self.queue_build_cap seconds until the queue is full.
If it never is, raises an exception stating so.
"""
# This call spawns greenlet that populate the queue ..
self._spawn_add_client_func(self.queue_max_size)
# .. whereas this call spawns a different greenlet ..
# .. that waits until all the greenlets above build their connections.
_ = gevent.spawn(self._build_queue)
# ################################################################################################################################
# ################################################################################################################################
class Wrapper:
""" Base class for queue-based connections wrappers.
"""
has_delete_reasons = False
supports_reconnections = False
def __init__(self, config:'Bunch', conn_type:'str', server:'ParallelServer') -> 'None':
self.conn_type = conn_type
self.config = config
self.config['name'] = resolve_name(self.config['name'])
self.config['username_pretty'] = self.config['username'] or '(None)'
self.server = server
self.python_id = get_python_id(self)
self.should_reconnect = True
# An optional list of all the connections that are currently trying to connect
# but which are not connected yet, e.g. this will apply to WebSockets.
self.conn_in_progress_list = []
conn_type = self.config.get('type_') or ''
address = self.config['auth_url']
self.client = ConnectionQueue(
server,
self.config['is_active'],
self.config['pool_size'],
self.config['queue_build_cap'],
self.config['id'],
self.config['name'],
self.conn_type,
address,
self.add_client,
self.config.get('needs_spawn', True),
self.config.get('max_connect_attempts', 1234567890)
)
self.delete_requested = False
self.update_lock = RLock()
self.logger = getLogger(self.__class__.__name__)
# ################################################################################################################################
def add_client(self):
logger.warning('Calling Wrapper.add_client which has not been overloaded in a subclass -> %s', self.__class__)
# ################################################################################################################################
def build_queue(self) -> 'None':
with self.update_lock:
if self.config['is_active']:
try:
self.client.build_queue()
except Exception:
logger.warning('Could not build client queue `%s`', format_exc())
else:
logger.info('Skipped building an inactive connection queue for `%s` (%s)',
self.client.conn_name, self.client.conn_type)
# Not all connection types will be queue-based
build_wrapper = build_queue
# ################################################################################################################################
def _get_item_name(self, item:'any_') -> 'str':
if hasattr(item, 'get_name'):
item_name = item.get_name()
return item_name
else:
if config := getattr(item, 'config', None):
if item_name := config.get('name'):
return item_name
# If we are here, it means that have no way extract the name
# so we simply return a string representation of this item.
return str(item)
# ################################################################################################################################
def delete_in_progress_connections(self, reason:'strnone'=None) -> 'None':
# These connections are trying to connect (e.g. WSXClient objects)
if self.conn_in_progress_list:
for item in self.conn_in_progress_list:
try:
item.delete(reason)
except Exception as e:
item_name = self._get_item_name(item)
logger.info('Exception while deleting queue item `%s` -> `%s` -> %s', item_name, e, format_exc())
self.conn_in_progress_list.clear()
# ################################################################################################################################
def delete_queue_connections(self, reason:'strnone'=None) -> 'None':
# These are connections that are already connected
items = self.client.queue.queue
for item in items:
try:
logger.info('Deleting connection from queue for `%s`', self.config['name'])
# Some connections (e.g. LDAP) want to expose .delete to user API which conflicts with our own needs.
delete_func = getattr(item, 'zato_delete_impl', None)
# A delete function is optional which is why we need this series of checks
if delete_func:
delete_func = cast_('callable_', delete_func)
else:
delete_func = getattr(item, 'delete', None)
if delete_func:
delete_func(reason) if reason else delete_func()
except Exception:
logger.warning('Could not delete connection from queue for `%s`, e:`%s`', self.config['name'], format_exc())
# ################################################################################################################################
def delete(self, reason:'strnone'=None) -> 'None':
""" Deletes all connections from queue and sets a flag that disallows this client to connect again.
"""
with self.update_lock:
# Tell the client that it is to stop connecting and that it will be deleted in a moment
self.delete_requested = True
self.client.keep_connecting = False
# Delete connections that are still connecting
self.delete_in_progress_connections(reason)
# Delete connections that are already established
self.delete_queue_connections(reason)
# In case the client was in the process of building a queue of connections,
# wait until it has stopped doing it.
if self.client.is_building_conn_queue:
while not self.client.queue_building_stopped:
sleep(1)
if not self.client.connection_exists():
return
else:
self.logger.info('Waiting for queue building stopped flag `%s` (%s %s)',
self.client.address, self.client.conn_type, self.client.conn_name)
# Reset flags that will allow this client to reconnect in the future
self.delete_requested = False
self.client.keep_connecting = True
self.client.queue_building_stopped = False
self.client.is_building_conn_queue = False
# ################################################################################################################################
# ################################################################################################################################
| 20,562
|
Python
|
.py
| 384
| 42.820313
| 130
| 0.472716
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,069
|
sftp.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/sftp.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# pylint: disable=attribute-defined-outside-init
# stdlib
from datetime import date, datetime
from logging import getLogger
from tempfile import NamedTemporaryFile
from time import strptime
from traceback import format_exc
# gevent
from gevent.fileobject import FileObjectThread
# humanize
from humanize import naturalsize
# Zato
from zato.common.api import SFTP
from zato.common.broker_message import OUTGOING
from zato.common.json_internal import loads
from zato.common.sftp import SFTPOutput
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# Type checking
import typing
if typing.TYPE_CHECKING:
from typing import List
from zato.server.base.parallel import ParallelServer
# For pyflakes
List = List
ParallelServer = ParallelServer
# ################################################################################################################################
# ################################################################################################################################
class EntryType:
file = 'file'
directory = 'directory'
symlink = 'symlink'
other = 'other'
@staticmethod
def get_entry_type(prefix):
entry_map = {
'-': EntryType.file,
'd': EntryType.directory,
'l': EntryType.symlink
}
return entry_map.get(prefix) or EntryType.other
# ################################################################################################################################
_other_types_check = EntryType.file, EntryType.directory, EntryType.symlink
# ################################################################################################################################
# ################################################################################################################################
class SFTPInfo:
__slots__ = 'type', 'name', 'owner', 'group', 'size', 'permissions', 'permissions_oct', 'last_modified'
def __init__(self):
self.type = None # type: str
self.name = None # type: str
self.size = None # type: int
self.owner = None # type: str
self.group = None # type: str
self.permissions = None # type: int
self.permissions_oct = None # type: str
self.last_modified = None # type: date
# ################################################################################################################################
def to_dict(self, skip_last_modified=True):
# type: () -> dict
out = {
'type': self.type,
'name': self.name,
'size': self.size,
'size_human': self.size_human,
'owner': self.owner,
'group': self.group,
'permissions': self.permissions,
'permissions_oct': self.permissions_oct,
'is_file': self.is_file,
'is_directory': self.is_directory,
'is_symlink': self.is_symlink,
'is_other': self.is_other,
'last_modified_iso': self.last_modified_iso,
}
# We do not return it by default so as not to make JSON serializers wonder what to do with a Python object
if not skip_last_modified:
out['last_modified'] = self.last_modified
return out
# ################################################################################################################################
@property
def is_file(self):
# type: () -> bool
return self.type.startswith(EntryType.file)
# ################################################################################################################################
@property
def is_directory(self):
# type: () -> bool
return self.type.startswith(EntryType.directory)
# ################################################################################################################################
@property
def is_symlink(self):
# type: () -> bool
return self.type.startswith(EntryType.symlink)
# ################################################################################################################################
@property
def is_other(self):
# type: () -> bool
return not self.type.startswith(_other_types_check)
# ################################################################################################################################
@property
def last_modified_iso(self):
# type: () -> str
return self.last_modified.isoformat()
# ################################################################################################################################
@property
def size_human(self):
# type: () -> str
return naturalsize(self.size)
# ################################################################################################################################
# ################################################################################################################################
class SFTPIPCFacade:
""" Provides servers and services with access to SFTP resources.
"""
def __init__(self, server, config):
# type: (ParallelServer, dict) -> None
self.cid = None # Not used for now
self.server = server
self.config = config
# ################################################################################################################################
def ping(self):
# type: (dict) -> None
self.server.connector_sftp.ping_sftp(**{
'id': self.config['id']
})
# ################################################################################################################################
def _execute(self, data, log_level, raise_on_error=True, _execute_value=OUTGOING.SFTP_EXECUTE.value):
# type: (str, int, int) -> SFTPOutput
if log_level > 0:
logger.info('Executing cid:`%s` `%s`', self.cid, data)
# Prepare input message
msg = {}
msg['action'] = _execute_value
msg['cid'] = self.cid
self.data = data
# Invoke the connector
response = self.server.connector_sftp.invoke_sftp_connector({
'id': self.config['id'],
'action': _execute_value,
'cid': self.cid,
'data': data,
'log_level': str(log_level)
})
# Read in the JSON response - this will always succeed
response = loads(response.text)
if log_level > 0:
logger.info('Response received, cid:`%s`, data:`%s`', self.cid, response)
# Perhaps we are to raise an exception on an error encountered
if not response['is_ok']:
if raise_on_error:
raise ValueError(response)
# Return the business response
out = SFTPOutput.from_dict(response)
out.strip_stdout_prefix()
return out
# ################################################################################################################################
def execute(self, data, log_level=SFTP.LOG_LEVEL.LEVEL0.id, raise_on_error=True):
# type: (str, int) -> SFTPOutput
return self._execute(data, log_level, raise_on_error)
# ################################################################################################################################
def _parse_permissions(self, permissions):
# type: (str) -> int
out = 0
# Read
if permissions[0] == 'r':
out += 4
# Write
if permissions[1] == 'w':
out += 2
# Execute
if permissions[2] == 'x':
out += 1
return out
# ################################################################################################################################
def _build_last_modified(self, month, day, year_hour_info):
# type: (str, str, str) -> datetime
# If there is a colon, it means that it is an hour in the format of HH:mm,
# otherwise it will be actually a year.
has_year = ':' not in year_hour_info
# We have a year already so we can immediately build the full date to be parsed
if has_year:
mod_date = '{}-{}-{}'.format(year_hour_info, month, day)
mod_date = strptime(mod_date, '%Y-%b-%d')
return date(mod_date.tm_year, mod_date.tm_mon, mod_date.tm_mday)
# .. otherwise, we must find that year ourselves
else:
#
# Given a particular month and day, we need to find in which year it was
# in relation to current month.
#
# For instance (simplified, without taking days into account):
#
# If now is November 2183 and month is March -> year was 2183
# If now is February 2183 and month is December -> year was 2182
#
today = datetime.today()
mod_date = strptime('{}-{}'.format(month, day), '%b-%d')
hour, minute = year_hour_info.split(':')
hour = int(hour)
minute = int(minute)
# If modification month is bigger than current one, it means that it must have been already
# in the previous year. Otherwise, it was in the same year we have today.
mod_year = today.year - 1 if mod_date.tm_mon > today.month else today.year
return datetime(mod_year, mod_date.tm_mon, mod_date.tm_mday, hour, minute)
# ################################################################################################################################
def _parse_ls_line(self, line):
# type: (str) -> SFTPInfo
# Output to return
out = SFTPInfo()
# Map prefix found to entry type
prefix = line[0] # type: str
entry_type = EntryType.get_entry_type(prefix)
permissions = line[1:10] # type: str
user_permissions = self._parse_permissions(permissions[:3]) # type: int
group_permissions = self._parse_permissions(permissions[3:6]) # type: int
other_permissions = self._parse_permissions(permissions[6:]) # type: int
permissions_oct = '{}{}{}'.format(user_permissions, group_permissions, other_permissions)
permissions = int(permissions_oct, 8)
# Move to other entries now
line = line[10:].strip()
# Ignore hardlinks / directory entries
line = line.split(' ', 1)
# line[0] are the very ignored hardlinks and directory entries
line = line[1]
# The next entry is owner
line = line.split(' ', 1)
owner = line[0]
# The next entry is group name
line = line[1].strip()
line = line.split(' ', 1)
group = line[0]
# Next is the entry size
line = line[1].strip()
line = line.split(' ', 1)
size = line[0]
# Split by whitespace into individual elements
line = line[1].strip()
line = line.split(' ', 4)
line = [elem for elem in line if elem.strip()]
# Next two tokens are modification date, but only its month and day will be known here
month = line[0]
day = line[1]
line = line[2:]
# Next token is either year or hour:minute
year_time_info = line[0]
# We can now combine all date elements to build a full modification time
last_modified = self._build_last_modified(month, day, year_time_info)
# Anything left must be our entry name
name = line[1].strip()
# Populate everything before returning ..
out.type = entry_type
out.name = name
out.permissions = permissions
out.permissions_oct = permissions_oct
out.owner = owner
out.group = group
out.size = size
out.last_modified = last_modified
# .. now we can return
return out
# ################################################################################################################################
def _parse_ls_output(self, ls_output):
# type: (str) -> List[SFTPInfo]
# Output to produce
out = []
#
# Sample lines to parse - the output is standardized by The Open Group
#
# https://pubs.opengroup.org/onlinepubs/9699919799/utilities/ls.html
#
# -rw------- 1 user1 group1 336 Mar 3 11:50 filename.txt
# drwx------ 2 user1 group1 4096 Mar 3 10:02 directony_name
# lrwxrwxrwx 1 user1 group1 5 Mar 3 13:36 symlink.txt
# srwxrwxr-x 1 user1 group1 0 Mar 3 11:50 ipc_queue_name
#
#
# Or, if timestamps include the year:
#
# -rw-rw-r-- 1 user1 group1 1822 Oct 24 2017 publish1.py
# drwxr-x---+
#
#
# The format that we expect and parse is thus:
#
# 1) Item type - one character
# 2) Permissions - nine characters (SFTP does not return getfacl flags)
# 3) Hardlinks/dir entries - we do not parse it
# 4) User name - N characters
# 5) Group name - N characters
# 6) Size - N characters
# 7) Modification date - Assuming a file system maintains it
# 8) Either time or year - Ditto
# 9) File name
#
for line in ls_output.splitlines():
parsed = self._parse_ls_line(line)
out.append(parsed)
return out
# ################################################################################################################################
def _get_info(self, remote_path, log_level=0, needs_dot_entries=True, raise_on_error=True):
# type: (str, int, bool) -> SFTPInfo
options = '-la' if needs_dot_entries else '-l'
out = self.execute('ls {} {}'.format(options, remote_path), log_level, raise_on_error)
if out.stdout:
out = self._parse_ls_output(out.stdout) # type: List[SFTPInfo]
return out
# ################################################################################################################################
def get_info(self, remote_path, log_level=0, raise_on_error=True):
out = self._get_info(remote_path, log_level, raise_on_error=raise_on_error)
if out:
# Replace resolved '.' directory names with what was given on input
# because they must point to the same thing.
out = out[0]
if out.type == EntryType.directory and out.name == '.':
out.name = remote_path
return out
# ################################################################################################################################
def exists(self, remote_path, log_level=0):
# type: (str) -> bool
# The is_ok flag will be True only if the remote path points to an existing file or directory
return self.execute('ls {}'.format(remote_path), log_level, raise_on_error=False).is_ok
# ################################################################################################################################
def is_file(self, remote_path, log_level=0):
return self.get_info(remote_path, log_level).is_file
# ################################################################################################################################
def is_directory(self, remote_path, log_level=0):
return self.get_info(remote_path, log_level).is_directory
# ################################################################################################################################
def is_symlink(self, remote_path, log_level=0):
return self.get_info(remote_path, log_level).is_symlink
# ################################################################################################################################
def delete(self, remote_path, log_level=0, _info=None):
info = _info or self.get_info(remote_path, log_level)
if info.is_directory:
return self.delete_directory(remote_path, log_level, False)
elif info.is_file:
return self.delete_file(remote_path, log_level, False)
elif info.is_symlink:
return self.delete_symlink(remote_path, log_level, False)
else:
raise ValueError('Unexpected entry type (delete) `{}`'.format(info.to_dict()))
# ################################################################################################################################
def delete_by_type(self, remote_path, type, log_level):
delete_func_map = {
EntryType.file: self.delete_file,
EntryType.directory: self.delete_directory,
EntryType.symlink: self.delete_symlink,
}
func = delete_func_map[type]
return func(remote_path, log_level, False)
# ################################################################################################################################
def _remove(self, is_dir, remote_path, log_level):
command = 'rmdir' if is_dir else 'rm'
return self.execute('{} {}'.format(command, remote_path), log_level)
# ################################################################################################################################
def _ensure_entry_type(self, remote_path, expected, log_level):
info = self.get_info(remote_path, log_level)
if not info.type == expected:
raise ValueError('Expected for `{}` to be `{}` instead of `{}` ({})'.format(
remote_path, expected, info.type, info.to_dict()))
# ################################################################################################################################
def delete_file(self, remote_path, log_level=0, needs_check=True):
if needs_check:
self._ensure_entry_type(remote_path, EntryType.file, log_level)
return self._remove(False, remote_path, log_level)
# ################################################################################################################################
def delete_directory(self, remote_path, log_level=0, needs_check=True):
if needs_check:
self._ensure_entry_type(remote_path, EntryType.directory, log_level)
return self._remove(True, remote_path, log_level)
# ################################################################################################################################
def delete_symlink(self, remote_path, log_level=0, needs_check=True):
if needs_check:
self._ensure_entry_type(remote_path, EntryType.symlink, log_level)
return self._remove(False, remote_path, log_level)
# ################################################################################################################################
def chmod(self, mode, remote_path, log_level=0):
return self.execute('chmod {} {}'.format(mode, remote_path), log_level)
# ################################################################################################################################
def chown(self, owner, remote_path, log_level=0):
return self.execute('chown {} {}'.format(owner, remote_path), log_level)
# ################################################################################################################################
def chgrp(self, group, remote_path, log_level=0):
return self.execute('chgrp {} {}'.format(group, remote_path), log_level)
# ################################################################################################################################
def create_symlink(self, from_path, to_path, log_level=0):
return self.execute('ln -s {} {}'.format(from_path, to_path), log_level)
# ################################################################################################################################
def create_hardlink(self, from_path, to_path, log_level=0):
return self.execute('ln {} {}'.format(from_path, to_path), log_level)
# ################################################################################################################################
def create_directory(self, remote_path, log_level=0):
return self.execute('mkdir {}'.format(remote_path), log_level)
# ################################################################################################################################
def list(self, remote_path, log_level=0):
return self._get_info(remote_path, log_level, needs_dot_entries=False)
# ################################################################################################################################
def move(self, from_path, to_path, log_level=0):
return self.execute('rename {} {}'.format(from_path, to_path), log_level)
rename = move
# ################################################################################################################################
def download(self, remote_path, local_path, recursive=True, require_file=False, log_level=0):
# Make sure this is indeed a file
if require_file:
self._ensure_entry_type(remote_path, EntryType.file, log_level)
options = ' -r' if recursive else ''
return self.execute('get{} {} {}'.format(options, remote_path, local_path), log_level)
# ################################################################################################################################
def download_file(self, remote_path, local_path, log_level=0):
return self.download(remote_path, local_path, require_file=True, log_level=log_level)
# ################################################################################################################################
def read(self, remote_path, mode='r+b', log_level=0):
# Download the file to a temporary location ..
with NamedTemporaryFile(mode, suffix='zato-sftp-read.txt') as local_path:
self.download_file(remote_path, local_path.name)
# .. and read it in using a separate thread so as not to block the event loop.
thread_file = FileObjectThread(local_path)
data = thread_file.read()
thread_file.close()
return data
# ################################################################################################################################
def _overwrite_if_needed(self, remote_path, overwrite, log_level):
info = self.get_info(remote_path, log_level, raise_on_error=False)
# The remote location exists so we either need to delete it (overwrite=True) or raise an error (overwrite=False)
if info:
if overwrite:
self.delete_by_type(remote_path, info.type, log_level)
else:
raise ValueError('Cannot upload, location `{}` already exists ({})'.format(remote_path, info.to_dict()))
# ################################################################################################################################
def upload(self, local_path, remote_path, recursive=True, overwrite=False, log_level=0, _needs_overwrite_check=True):
# Will raise an exception or delete the remote location, depending on what is needed
if _needs_overwrite_check:
self._overwrite_if_needed(remote_path, overwrite, log_level)
options = ' -r' if recursive else ''
return self.execute('put{} {} {}'.format(options, local_path, remote_path), log_level)
# ################################################################################################################################
def write(self, data, remote_path, mode='w+b', overwrite=False, log_level=0, encoding='utf8'):
# Will raise an exception or delete the remote location, depending on what is needed
self._overwrite_if_needed(remote_path, overwrite, log_level)
# Data to be written must be always bytes
data = data if isinstance(data, bytes) else data.encode(encoding)
# A temporary file to write data to ..
with NamedTemporaryFile(mode, suffix='zato-sftp-write.txt') as local_path:
# .. wrap the file in separate thread so as not to block the event loop.
thread_file = FileObjectThread(local_path, mode=mode)
thread_file.write(data)
thread_file.flush()
try:
# Data written out, we can now upload it to the remote location
self.upload(local_path.name, remote_path, False, overwrite, log_level, False)
except Exception:
logger.warning('Exception in SFTP write method `%s`', format_exc())
finally:
# Now we can close the file too
thread_file.close()
# ################################################################################################################################
# ################################################################################################################################
| 25,909
|
Python
|
.py
| 464
| 47.476293
| 130
| 0.446055
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,070
|
cassandra.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/cassandra.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
# Cassandra
from cassandra.auth import PlainTextAuthProvider
from cassandra.cluster import Cluster
from cassandra.io.geventreactor import GeventConnection
from cassandra.query import dict_factory
# Zato
from zato.common.broker_message import DEFINITION
from zato.server.connection import BaseConnPoolStore, BasePoolAPI
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
msg_to_stdlib = {
'tls_ca_certs': 'ca_certs',
'tls_client_cert': 'certfile',
'tls_client_priv_key': 'keyfile',
}
# ################################################################################################################################
class CassandraAPI(BasePoolAPI):
""" API through which connections to Cassandra can be obtained.
"""
# ################################################################################################################################
class CassandraConnStore(BaseConnPoolStore):
""" Stores connections to Cassandra.
"""
conn_name = 'Cassandra'
dispatcher_events = [DEFINITION.CASSANDRA_DELETE, DEFINITION.CASSANDRA_EDIT]
delete_event = DEFINITION.CASSANDRA_DELETE
dispatcher_listen_for = DEFINITION
# ################################################################################################################################
def create_session(self, name, config, config_no_sensitive):
auth_provider = PlainTextAuthProvider(config.username, config.password) if config.get('username') else None
tls_options = {}
for msg_name, stdlib_name in msg_to_stdlib.items():
if config.get(msg_name):
tls_options[stdlib_name] = config[msg_name]
cluster = Cluster(
config.contact_points.splitlines(), int(config.port), cql_version=config.cql_version,
protocol_version=int(config.proto_version), executor_threads=int(config.exec_size),
auth_provider=auth_provider, ssl_options=tls_options, control_connection_timeout=3,
connection_class=GeventConnection)
session = cluster.connect()
session.row_factory = dict_factory
session.set_keyspace(config.default_keyspace)
return session
# ################################################################################################################################
def delete_session_hook(self, session):
""" Closes our underlying session.
"""
session.conn.shutdown()
# ################################################################################################################################
| 3,099
|
Python
|
.py
| 58
| 48.310345
| 130
| 0.503313
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,071
|
sap.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/sap.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
from traceback import format_exc
# Zato
from zato.common.util.api import ping_sap
from zato.common.const import SECRETS
from zato.server.connection.queue import Wrapper
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
class SAPWrapper(Wrapper):
""" Wraps a queue of connections to SAP RFC.
"""
def __init__(self, config, server):
# Imported here because not everyone will be using SAP
import pyrfc
self.pyrfc = pyrfc
config.username = config.user # Make Wrapper happy.
if not hasattr(config, 'is_active'): # On update passwd, we get AttributeError on is_active
config.is_active = False
config.auth_url = 'rfc://{user}@{host}:{sysnr}/{client}'.format(**config)
super(SAPWrapper, self).__init__(config, 'SAP', server)
self.logger.info('config: %r', config)
def add_client(self):
# Decrypt the password if it is encrypted.
if self.config.password.startswith(SECRETS.PREFIX):
self.config.password = self.server.decrypt(self.config.password)
conn = self.pyrfc.Connection(user=self.config.user, passwd=self.config.password,
ashost=self.config.host, sysnr=self.config.sysnr, client=self.config.client)
try:
ping_sap(conn)
except Exception:
self.logger.warning('Could not ping SAP (%s), e:`%s`', self.config.name, format_exc())
self.client.put_client(conn)
# ################################################################################################################################
| 2,102
|
Python
|
.py
| 41
| 45.292683
| 130
| 0.550611
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,072
|
pool_wrapper.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/pool_wrapper.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
# Zato
from zato.distlock import PassThrough as PassThroughLock
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, anylist, callable_
from zato.distlock import Lock
from zato.server.base.parallel import ParallelServer
# ################################################################################################################################
# ################################################################################################################################
class ConnectionPoolWrapper:
""" Holds information about all the connection pools of a given type currently running or being built.
"""
type_:'str'
items:'anylist'
server:'ParallelServer'
def __init__(
self,
server:'ParallelServer',
type_:'str',
) -> 'None':
self.server = server
self.type_ = type_
self.items = []
# ################################################################################################################################
def _lock(self, config_id:'any_') -> 'Lock':
# A lock to use when we want to ensure that only one connection pool will be built at a time
# for a given connection definition. Note that we are using its ID
# instead of name to let the connection be renamed at any time.
lock_name = f'ConnectionPoolWrapper.{self.type_}.{config_id}'
# Acquire a lock that will be held across all the connection pools ..
return self.server.zato_lock_manager(lock_name, block=1200)
# ################################################################################################################################
def get_update_lock(self, *, is_zato:'bool') -> 'callable_':
if is_zato:
return self._lock
else:
return PassThroughLock
# ################################################################################################################################
def get_func_call_lock(self, *, is_zato:'bool') -> 'callable_':
if is_zato:
return PassThroughLock
else:
return self._lock
# ################################################################################################################################
def add_item(self, *, config_id:'any_', is_zato:'bool', item:'any_') -> 'None':
_lock = self.get_func_call_lock(is_zato=is_zato)
with _lock(config_id):
self.items.append(item)
# ################################################################################################################################
def delete_all(self, *, config_id:'any_', is_zato:'bool') -> 'None':
_lock = self.get_func_call_lock(is_zato=is_zato)
with _lock(config_id):
# First, stop all the items ..
for item in self.items:
item.delete()
# .. now, clear the list.
self.items.clear()
# ################################################################################################################################
def has_item(self, *, is_zato:'bool', config_id:'any_', item:'any_') -> 'bool':
_lock = self.get_func_call_lock(is_zato=is_zato)
with _lock(config_id):
return item in self.items
# ################################################################################################################################
def delete_item(self, *, config_id:'any_', is_zato:'bool', item_to_delete:'any_') -> 'None':
_lock = self.get_func_call_lock(is_zato=is_zato)
with _lock(config_id):
for item in self.items:
if item is item_to_delete:
item.delete()
# ################################################################################################################################
# ################################################################################################################################
| 4,734
|
Python
|
.py
| 82
| 50.878049
| 130
| 0.35247
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,073
|
confluence_.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/confluence_.py
|
# -*- coding: utf-8 -*-
# atlassian-python-api
from atlassian import Confluence as AtlassianConfluenceClient
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import stranydict
# ################################################################################################################################
# ################################################################################################################################
class ConfluenceClient(AtlassianConfluenceClient):
zato_api_version: 'str'
zato_address: 'str'
zato_username: 'str'
zato_token: 'str'
zato_is_cloud: 'bool'
def __init__(
self,
*,
zato_api_version, # type: str
zato_address, # type: str
zato_username, # type: str
zato_token, # type: str
zato_is_cloud, # type: bool
) -> 'None':
# We need to make sure that the API version is a string
# because this is what the underlying Confluence API requires.
self.zato_api_version = str(zato_api_version)
self.zato_address = zato_address
self.zato_username = zato_username
self.zato_token = zato_token
self.zato_is_cloud = zato_is_cloud
super().__init__(
url = self.zato_address,
username = self.zato_username,
password = self.zato_token,
api_version = self.zato_api_version,
cloud = self.zato_is_cloud,
)
# ################################################################################################################################
@staticmethod
def from_config(config:'stranydict') -> 'ConfluenceClient':
return ConfluenceClient(
zato_api_version = config['api_version'],
zato_address = config['address'],
zato_username = config['username'],
zato_token = config['secret'],
zato_is_cloud = config['is_cloud'],
)
# ################################################################################################################################
# ################################################################################################################################
| 2,465
|
Python
|
.py
| 50
| 41.96
| 130
| 0.376353
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,074
|
vault.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/vault.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import basicConfig, getLogger, INFO
from traceback import format_exc
# Bunch
from bunch import Bunch, bunchify
# gevent
from gevent import spawn
from gevent.lock import RLock
# Vault
from hvac import Client
# Zato
from zato.common.api import UNITTEST
from zato.common.vault_ import VAULT
# ################################################################################################################################
basicConfig(level=INFO, format='%(asctime)s - %(message)s')
logger = getLogger(__name__)
# ################################################################################################################################
class VaultResponse:
""" A convenience class to hold individual attributes of responses from Vault.
"""
__slots__ = ('action', 'client_token', 'lease_duration', 'accessor', 'policies')
def __init__(self, action=None, client_token=None, lease_duration=None, accessor=None, policies=None):
self.action = action
self.client_token = client_token
self.lease_duration = lease_duration
self.accessor = accessor
self.policies = policies
def __str__(self):
attrs = []
for elem in sorted(self.__slots__):
value = getattr(self, elem)
attrs.append('{}:{}'.format(elem, value))
return '<{} at {}, {}>'.format(self.__class__.__name__, hex(id(self)), ', '.join(attrs))
@staticmethod
def from_vault(action, response, main_key='auth', token_key='client_token', has_lease_duration=True):
""" Builds a VaultResponse out of a dictionary returned from Vault.
"""
auth = response[main_key]
vr = VaultResponse(action)
vr.client_token = auth[token_key]
vr.accessor = auth['accessor']
vr.policies = auth['policies']
if has_lease_duration:
vr.lease_duration = auth['lease_duration']
return vr
# ################################################################################################################################
class _Client(Client):
""" A thin wrapper around hvac.Client providing connectivity to Vault.
"""
def __init__(self, *args, **kwargs):
super(_Client, self).__init__(*args, **kwargs)
self._auth_func = {
VAULT.AUTH_METHOD.TOKEN.id: self._auth_token,
VAULT.AUTH_METHOD.USERNAME_PASSWORD.id: self._auth_username_password,
VAULT.AUTH_METHOD.GITHUB.id: self._auth_github,
}
def ping(self):
return self.is_sealed
def _auth_token(self, client_token, _from_vault=VaultResponse.from_vault):
if not client_token:
raise ValueError('Client token missing on input')
response = self.lookup_token(client_token)
return _from_vault('auth_token', response, 'data', 'id', False)
def _auth_username_password(self, username, password, mount_point='userpass', _from_vault=VaultResponse.from_vault):
login_response = self.auth_userpass(username, password, mount_point, use_token=False)
return _from_vault('auth_userpass', login_response)
def _auth_github(self, gh_token, _from_vault=VaultResponse.from_vault):
login_response = self.auth_github(gh_token, use_token=False)
return _from_vault('auth_github', login_response)
def renew(self, client_token, _from_vault=VaultResponse.from_vault):
login_response = self.renew_token(client_token)
return _from_vault('renew', login_response)
def authenticate(self, auth_method, *credentials):
return self._auth_func[auth_method](*credentials)
# ################################################################################################################################
class _VaultConn:
def __init__(self, name, url, token, service_name, tls_verify, timeout, allow_redirects, client_class=_Client,
requests_adapter=None):
self.name = name
self.url = url
self.token = token
self.service_name = service_name
self.tls_verify = tls_verify
self.timeout = timeout
self.allow_redirects = allow_redirects
self.client = client_class(self.url, self.token, verify=self.tls_verify, timeout=self.timeout,
allow_redirects=self.allow_redirects, adapter=requests_adapter)
# ################################################################################################################################
class VaultConnAPI:
""" An API through which connections to Vault are established and managed.
"""
def __init__(self, config_list=None, requests_adapter=None):
self.config = Bunch()
self.lock = RLock()
self.requests_adapter = requests_adapter
for config in config_list or []:
self.create(config)
# ################################################################################################################################
def __getitem__(self, name):
return self.config[name]
# ################################################################################################################################
def get(self, name):
return self.config.get(name)
# ################################################################################################################################
def get_client(self, name):
return self.config[name].client
# ################################################################################################################################
def _ping(self, name):
try:
self.config[name].client.ping()
except Exception:
logger.warning('Could not ping Vault connection `%s`, e:`%s`', name, format_exc())
else:
logger.info('Ping OK, Vault connection `%s`', name)
def ping(self, name):
spawn(self._ping, name)
# ################################################################################################################################
def _create(self, config):
conn = _VaultConn(
config.name, config.url, config.token, config.get('service_name'), config.tls_verify, config.timeout,
config.allow_redirects, requests_adapter=self.requests_adapter)
self.config[config.name] = conn
if config.url != UNITTEST.VAULT_URL:
self.ping(config.name)
# ################################################################################################################################
def create(self, config):
with self.lock:
self._create(config)
# ################################################################################################################################
def _delete(self, name):
try:
self.config[name].client.close()
except Exception:
logger.warning(format_exc())
finally:
del self.config[name]
# ################################################################################################################################
def delete(self, name):
with self.lock:
self._delete(name)
# ################################################################################################################################
def edit(self, new_config):
with self.lock:
self._delete(new_config.old_name)
self._create(new_config)
# ################################################################################################################################
if __name__ == '__main__':
name = 'abc'
client_token = '5f763fa3-2872-71ab-4e5d-f1398aca6637'
username = 'user1'
password = 'secret1'
gh_token = ''
config = Bunch()
config.name = name
config.url = 'https://localhost:49517'
config.token = client_token
config.service_name = 'my.service'
config.tls_verify = True
config.timeout = 20
config.allow_redirects = True
api = VaultConnAPI([config])
import time
time.sleep(0.1)
response1 = api[name].client.authenticate(VAULT.AUTH_METHOD.TOKEN.id, client_token)
logger.info('Response1 %s', response1)
response2 = api[name].client.authenticate(VAULT.AUTH_METHOD.USERNAME_PASSWORD.id, username, password)
logger.info('Response2 %s', response2)
api[name].client.authenticate(VAULT.AUTH_METHOD.TOKEN.id, response2.client_token)
api[name].client.renew_token(response2.client_token)
if gh_token:
token3 = api[name].client.authenticate(VAULT.AUTH_METHOD.GITHUB.id, gh_token)
api[name].client.authenticate(VAULT.AUTH_METHOD.TOKEN.id, token3)
api[name].client.renew_token(token3)
logger.info('Token3 %s', token3)
response = api[name].client.renew(response2.client_token)
logger.info('Renew 11 %s', bunchify(response))
# ################################################################################################################################
| 9,255
|
Python
|
.py
| 183
| 43.737705
| 130
| 0.504329
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,075
|
salesforce.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/salesforce.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from json import dumps
# requests
from requests import get as request_get, post as requests_post
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, anydict, dictnone, stranydict, strnone
# ################################################################################################################################
# ################################################################################################################################
class ModuleCtx:
PathLogin = '/services/oauth2/token'
PathBase = '/services/data/v{api_version}'
MethodGet = 'get'
MethodPost = 'post'
# ################################################################################################################################
# ################################################################################################################################
method_map = {
ModuleCtx.MethodGet: request_get,
ModuleCtx.MethodPost: requests_post,
}
# ################################################################################################################################
# ################################################################################################################################
class SalesforceClient:
api_version: 'str'
address: 'str'
username: 'str'
password: 'str'
consumer_key: 'str'
consumer_secret: 'str'
access_token: 'str'
http_bearer: 'str'
def __init__(
self,
*,
api_version, # type: str
address, # type: str
username, # type: str
password, # type: str
consumer_key, # type: str
consumer_secret, # type: str
) -> 'None':
self.api_version = api_version
self.address = address
self.username = username
self.password = password
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
# ################################################################################################################################
@staticmethod
def from_config(config:'stranydict') -> 'SalesforceClient':
return SalesforceClient(
api_version = config['api_version'],
address = config['address'],
username = config['username'],
password = config['password'],
consumer_key = config['consumer_key'],
consumer_secret = config['consumer_secret'],
)
# ################################################################################################################################
def _invoke_http(
self,
*,
path, # type: str
data, # type: strnone
headers, # type: dictnone
params, # type: dictnone
method=ModuleCtx.MethodGet, # type: str
) -> 'anydict':
# Build a full URL now for the incoming request.
if path != ModuleCtx.PathLogin:
path_prefix = ModuleCtx.PathBase.format(api_version=self.api_version)
else:
path_prefix = ''
url = self.address + path_prefix + path
# Invoke Salesforce now ..
func = method_map[method]
response = func(url, data=data, headers=headers, params=params)
# .. convert the response to JSON ..
response_json = response.json()
# .. and return it to our caller.
return response_json
# ################################################################################################################################
def ensure_access_token_is_assigned(self):
# This information is sent in headers ..
headers = {
'X-PrettyPrint': '1',
'Content-Type': 'application/x-www-form-urlencoded',
}
# .. while this goes to POST parameters.
params = {
'grant_type': 'password',
'username': self.username,
'password': self.password,
'client_id': self.consumer_key,
'client_secret': self.consumer_secret,
}
# .. obtain a JSON response ..
response_json = self._invoke_http(
path=ModuleCtx.PathLogin,
data=None,
headers=headers,
params=params,
method=ModuleCtx.MethodPost
)
# .. and try extract the access token now for later use.
access_token = response_json.get('access_token')
if not access_token:
raise Exception('No Salesforce access token found in response `{}`)'.format(response_json))
else:
self.access_token = access_token
self.http_bearer = 'Bearer ' + self.access_token
# ################################################################################################################################
def _send_request(
self,
*,
path, # type: str
method, # type: str
data=None, # type: strnone
headers=None, # type: dictnone
) -> 'any_':
# Before sending the request, make sure we have an access token to authenticate with.
self.ensure_access_token_is_assigned()
# Headers required for the request.
_headers = {
'X-PrettyPrint': '1',
'Authorization': self.http_bearer,
'Content-Type':'application/json'
}
if headers:
_headers.update(headers)
return self._invoke_http(
path=path,
data=data,
headers=_headers,
params=None,
method=method
)
# ################################################################################################################################
def get(
self,
path, # type: str
) -> 'any_':
return self._send_request(
path=path,
method=ModuleCtx.MethodGet,
)
# ################################################################################################################################
def post(
self,
path, # type: str
data=None, # type: dictnone
) -> 'any_':
_data = dumps(data)
return self._send_request(
path=path,
data=_data,
method=ModuleCtx.MethodPost
)
# ################################################################################################################################
def ping(self):
""" Sends a ping-like request to Salesforce.
"""
return self._send_request(
path=ModuleCtx.PathBase.format(api_version=self.api_version),
method=ModuleCtx.MethodGet
)
# ################################################################################################################################
# ################################################################################################################################
| 7,368
|
Python
|
.py
| 174
| 34.258621
| 130
| 0.395801
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,076
|
odoo.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/odoo.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
from traceback import format_exc
# gevent
from gevent.lock import RLock
# Zato
from zato.common.const import SECRETS
from zato.common.util.api import ping_odoo
from zato.server.connection.queue import ConnectionQueue
# Python 2/3 compatibility
from six import PY2
if PY2:
import openerplib as client_lib
else:
import odoolib as client_lib
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
class OdooWrapper:
""" Wraps a queue of connections to Odoo.
"""
def __init__(self, config, server):
self.config = config
self.server = server
# Decrypt the password if it is encrypted. It will be in clear text when the server is starting up
# but otherwise for connections created in run-time, it will be decrypted.
if self.config.password.startswith(SECRETS.PREFIX):
self.config.password = self.server.decrypt(self.config.password)
self.url = '{protocol}://{user}:******@{host}:{port}/{database}'.format(**self.config)
self.client = ConnectionQueue(
self.server,
self.config.is_active,
self.config.pool_size,
self.config.queue_build_cap,
self.config.id,
self.config.name,
'Odoo',
self.url,
self.add_client
)
self.update_lock = RLock()
self.logger = getLogger(self.__class__.__name__)
def build_queue(self):
with self.update_lock:
self.client.build_queue()
def add_client(self):
conn = client_lib.get_connection(hostname=self.config.host, protocol=self.config.protocol, port=self.config.port,
database=self.config.database, login=self.config.user, password=self.config.password)
try:
ping_odoo(conn)
except Exception:
logger.warning('Could not ping Odoo (%s), e:`%s`', self.config.name, format_exc())
self.client.put_client(conn)
# ################################################################################################################################
| 2,603
|
Python
|
.py
| 60
| 36.766667
| 130
| 0.561459
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,077
|
wrapper.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/wrapper.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
from traceback import format_exc
# gevent
from gevent import spawn
from gevent.lock import RLock
# Zato
from zato.common.util.api import spawn_greenlet
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# Type hints
import typing
if typing.TYPE_CHECKING:
# Bunch
from bunch import Bunch
# Zato
from zato.server.base.parallel import ParallelServer
# For pyflakes
Bunch = Bunch
ParallelServer = ParallelServer
# ################################################################################################################################
# ################################################################################################################################
class Wrapper:
""" Base class for non-queue based connections wrappers.
"""
needs_self_client = False
wrapper_type = '<undefined-Wrapper>'
required_secret_attr = None
required_secret_label = None
build_if_not_active = False
def __init__(self, config, server=None):
# type: (Bunch, ParallelServer)
self.config = config
self.config.username_pretty = self.config.username or '(None)'
self.server = server
self._impl = None
self.delete_requested = False
self.is_connected = False
self.update_lock = RLock()
@property
def client(self):
if not self._impl:
self.build_wrapper(False)
return self._impl
# ################################################################################################################################
def build_wrapper(self, should_spawn=True):
if not self.config.is_active:
if not self.build_if_not_active:
logger.info('Skipped building an inactive %s (%s)', self.wrapper_type, self.config.name)
return
if self.required_secret_attr:
if not self.config[self.required_secret_attr]:
logger.warning('Skipped building a %s without %s defined (%s)', self.wrapper_type,
self.required_secret_label, self.config.name)
return
# If we are to build the wrapper, it means that we are not connected at this time
self.is_connected = False
# Connection is active (config.is_active) so we can try to build it ..
# .. in background ..
if should_spawn:
spawn(self._init)
# .. or in a blocking manner
else:
self._init_impl()
# ################################################################################################################################
def _init(self):
# We use this double spawn method to be able to catch NotImplementedError immediately
# in case subclasses do not implement self._init_impl.
try:
spawn_greenlet(self._init_impl, timeout=45)
except Exception:
logger.warning('Could not initialize %s `%s`, e:`%s`', self.wrapper_type, self.config.name, format_exc())
# ################################################################################################################################
def _init_impl(self):
raise NotImplementedError('Must be implemented in subclasses (_init; {!r})'.format(self.config))
# ################################################################################################################################
def _delete(self):
raise NotImplementedError('Must be implemented in subclasses (_delete; {!r})'.format(self.config))
# ################################################################################################################################
def _ping(self):
# This is optional - not all connections can be pinged
raise NotImplementedError('Can be implemented in subclasses (_ping; {!r})'.format(self.config))
# ################################################################################################################################
def delete(self):
if self.needs_self_client:
if not self._impl:
return
else:
self._delete()
else:
self._delete()
# ################################################################################################################################
def ping(self):
if self.client:
self._ping()
# ################################################################################################################################
# ################################################################################################################################
| 5,177
|
Python
|
.py
| 104
| 42.788462
| 130
| 0.425109
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,078
|
cache.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/cache.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from base64 import b64encode
from logging import getLogger
from traceback import format_exc
# gevent
from gevent import sleep, spawn
from gevent.lock import RLock
# python-memcached
from memcache import Client as _MemcachedClient
# Paste
from paste.util.converters import asbool
# Zato
from zato.cache import Cache as _CyCache
from zato.common.api import CACHE, ZATO_NOT_GIVEN
from zato.common.broker_message import CACHE as CACHE_BROKER_MSG
from zato.common.typing_ import cast_
from zato.common.util.api import parse_extra_into_dict
# Python 2/3 compatibility
from zato.common.ext.future.utils import iteritems, itervalues
from zato.common.py23_.past.builtins import basestring
from zato.common.py23_ import pickle_dumps
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
builtin_ops = [
'CLEAR',
'DELETE',
'DELETE_BY_PREFIX', 'DELETE_BY_SUFFIX',
'DELETE_BY_REGEX',
'DELETE_CONTAINS', 'DELETE_NOT_CONTAINS',
'DELETE_CONTAINS_ALL', 'DELETE_CONTAINS_ANY',
'EXPIRE',
'EXPIRE_BY_PREFIX', 'EXPIRE_BY_SUFFIX',
'EXPIRE_BY_REGEX',
'EXPIRE_CONTAINS', 'EXPIRE_NOT_CONTAINS',
'EXPIRE_CONTAINS_ALL', 'EXPIRE_CONTAINS_ANY',
'SET',
'SET_BY_PREFIX', 'SET_BY_SUFFIX',
'SET_BY_REGEX',
'SET_CONTAINS', 'SET_NOT_CONTAINS',
'SET_CONTAINS_ALL', 'SET_CONTAINS_ANY',
]
builtin_op_to_broker_msg = {}
for builtin_op in builtin_ops:
common_key = getattr(CACHE.STATE_CHANGED, builtin_op)
broker_msg_value = getattr(CACHE_BROKER_MSG, 'BUILTIN_STATE_CHANGED_{}'.format(builtin_op)).value
builtin_op_to_broker_msg[common_key] = broker_msg_value
# ################################################################################################################################
default_get = ZATO_NOT_GIVEN # A singleton to indicate that no default for Cache.get was given on input
_no_key = 'zato-no-key'
_no_value = 'zato-no-value'
# ################################################################################################################################
class Cache:
""" The cache API through which services access the built-in self.cache objects.
Attribute self.impl is the actual Cython-based cache implementation.
"""
def __init__(self, config):
self.config = config
self.after_state_changed_callback = self.config.after_state_changed_callback
self.needs_sync = self.config.sync_method != CACHE.SYNC_METHOD.NO_SYNC.id
self.impl = _CyCache(self.config.max_size, self.config.max_item_size, self.config.extend_expiry_on_get,
self.config.extend_expiry_on_set)
spawn(self._delete_expired)
# ################################################################################################################################
def __getitem__(self, key):
if isinstance(key, slice):
return self.impl.get_slice(key.start, key.stop, key.step)
else:
return self.get(key)
# ################################################################################################################################
def __setitem__(self, key, value):
return self.set(key, value)
# ################################################################################################################################
def __delitem__(self, key):
return self.delete(key)
# ################################################################################################################################
def __contains__(self, key):
return key in self.impl
# ################################################################################################################################
def __len__(self):
return len(self.impl)
# ################################################################################################################################
def get(self, key, default=default_get, details=False) -> 'any_':
""" Returns a value stored under a given key. If details is True, return metadata about the key as well.
"""
return self.impl.get(key, default if default != default_get else self.impl.default_get, details)
# ################################################################################################################################
def has_key(self, key, default=default_get, details=False) -> 'bool':
""" Returns True or False, depending on whether such a key exists in the cache or not.
"""
value = self.get(key, default=default, details=details)
return value != ZATO_NOT_GIVEN
# ################################################################################################################################
def get_by_prefix(self, key, details=False, limit=0):
""" Returns a dictionary of key:value items for keys matching the prefix given on input.
"""
return self.impl.get_by_prefix(key, details, limit)
# ################################################################################################################################
def get_by_suffix(self, key, details=False, limit=0):
""" Returns a dictionary of key:value items for keys matching the suffix given on input.
"""
return self.impl.get_by_suffix(key, details, limit)
# ################################################################################################################################
def get_by_regex(self, key, details=False, limit=0):
""" Returns a dictionary of key:value items for keys matching the regular expression given on input.
"""
return self.impl.get_by_regex(key, details, limit)
# ################################################################################################################################
def get_contains(self, key, details=False, limit=0):
""" Returns a dictionary of key:value items for keys containing the string given on input.
"""
return self.impl.get_contains(key, details, limit)
# ################################################################################################################################
def get_not_contains(self, key, details=False, limit=0):
""" Returns a dictionary of key:value items for keys that don't contain the string given on input.
"""
return self.impl.get_not_contains(key, details, limit)
# ################################################################################################################################
def get_contains_all(self, key, details=False, limit=0):
""" Returns a dictionary of key:value items for keys that contain all of elements in the input list of strings.
"""
return self.impl.get_contains_all(key, details, limit)
# ################################################################################################################################
def get_contains_any(self, key, details=False, limit=0):
""" Returns a dictionary of key:value items for keys that contain at least one of elements in the input list of strings.
"""
return self.impl.get_contains_any(key, details, limit)
# ################################################################################################################################
def set(self, key, value, expiry=0.0, details=False, _OP=CACHE.STATE_CHANGED.SET):
""" Sets key to a given value. Key must be string/unicode. Value must be an integer or string/unicode.
Expiry is in seconds (or a fraction of).
"""
meta_ref = {'key':key, 'value':value, 'expiry':expiry} if self.needs_sync else None
value = self.impl.set(key, value, expiry, details, meta_ref)
if self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, meta_ref)
return value
# ################################################################################################################################
def set_by_prefix(self, key, value, expiry=0.0, return_found=False, details=False, limit=0,
_OP=CACHE.STATE_CHANGED.SET_BY_PREFIX):
""" Sets keys matching the prefix of a given value - non-string-like keys are ignored. Prefix must be string/unicode.
Value must be an integer or string/unicode. Expiry is in seconds (or a fraction of). Optionally,
returns all matched keys and their previous values.
"""
meta_ref = {'_now': None, '_any_found': False}
out = self.impl.set_by_prefix(key, value, expiry, False, meta_ref, return_found, limit)
if meta_ref['_any_found'] and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'value':value,
'expiry':expiry,
'limit':limit,
'orig_now':meta_ref['_now']
})
return out
# ################################################################################################################################
def set_by_suffix(self, key, value, expiry=0.0, return_found=False, details=False, limit=0,
_OP=CACHE.STATE_CHANGED.SET_BY_SUFFIX):
""" Sets keys matching the suffix to a given value - non-string-like keys are ignored. Suffix must be string/unicode.
Value must be an integer or string/unicode. Expiry is in seconds (or a fraction of). Optionally,
returns all matched keys and their previous values.
"""
meta_ref = {'_now': None, '_any_found': False}
out = self.impl.set_by_suffix(key, value, expiry, False, meta_ref, return_found, limit)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'value':value,
'expiry':expiry,
'limit':limit,
'orig_now':meta_ref['_now']
})
return out
# ################################################################################################################################
def set_by_regex(self, key, value, expiry=0.0, return_found=False, details=False, limit=0,
_OP=CACHE.STATE_CHANGED.SET_BY_REGEX):
""" Sets value for keys matching the input regular expresion - non-string-like keys are ignored.
Value must be an integer or string/unicode. Expiry is in seconds (or a fraction of). Optionally,
returns all matched keys and their previous values.
"""
meta_ref = {'_now': None, '_any_found': False}
out = self.impl.set_by_regex(key, value, expiry, False, meta_ref, return_found, limit)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'value':value,
'expiry':expiry,
'limit':limit,
'orig_now':meta_ref['_now']
})
return out
# ################################################################################################################################
def set_contains(self, key, value, expiry=0.0, return_found=False, details=False, limit=0,
_OP=CACHE.STATE_CHANGED.SET_CONTAINS):
""" Sets value for keys containing the input string - non-string-like keys are ignored.
Value must be an integer or string/unicode. Expiry is in seconds (or a fraction of). Optionally,
returns all matched keys and their previous values.
"""
meta_ref = {'_now': None, '_any_found': False}
out = self.impl.set_contains(key, value, expiry, False, meta_ref, return_found, limit)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'value':value,
'expiry':expiry,
'limit':limit,
'orig_now':meta_ref['_now']
})
return out
# ################################################################################################################################
def set_not_contains(self, key, value, expiry=0.0, return_found=False, details=False, limit=0,
_OP=CACHE.STATE_CHANGED.SET_NOT_CONTAINS):
""" Sets value for keys that don't contain the input string - non-string-like keys are ignored.
Value must be an integer or string/unicode. Expiry is in seconds (or a fraction of). Optionally,
returns all matched keys and their previous values.
"""
meta_ref = {'_now': None, '_any_found': False}
out = self.impl.set_not_contains(key, value, expiry, False, meta_ref, return_found, limit)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'value':value,
'expiry':expiry,
'limit':limit,
'orig_now':meta_ref['_now']
})
return out
# ################################################################################################################################
def set_contains_all(self, key, value, expiry=0.0, return_found=False, details=False, limit=0,
_OP=CACHE.STATE_CHANGED.SET_CONTAINS_ALL):
""" Sets value for keys that contain all elements from the input list - non-string-like keys are ignored.
Value must be an integer or string/unicode. Expiry is in seconds (or a fraction of). Optionally,
returns all matched keys and their previous values.
"""
meta_ref = {'_now': None, '_any_found': False}
out = self.impl.set_contains_all(key, value, expiry, False, meta_ref, return_found, limit)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'value':value,
'expiry':expiry,
'limit':limit,
'orig_now':meta_ref['_now']
})
return out
# ################################################################################################################################
def set_contains_any(self, key, value, expiry=0.0, return_found=False, details=False, limit=0,
_OP=CACHE.STATE_CHANGED.SET_CONTAINS_ANY):
""" Sets value for keys that contain at least one of elements from the input list - non-string-like keys are ignored.
Value must be an integer or string/unicode. Expiry is in seconds (or a fraction of). Optionally,
returns all matched keys and their previous values.
"""
meta_ref = {'_now': None, '_any_found': False}
out = self.impl.set_contains_any(key, value, expiry, False, meta_ref, return_found, limit)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'value':value,
'expiry':expiry,
'limit':limit,
'orig_now':meta_ref['_now']
})
return out
# ################################################################################################################################
def delete(self, key, raise_key_error=True, _OP=CACHE.STATE_CHANGED.DELETE):
""" Deletes a cache entry by its key.
"""
try:
value = self.impl.delete(key)
except KeyError:
if raise_key_error:
raise
else:
if self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {'key':key})
return value
# ################################################################################################################################
def delete_by_prefix(self, key, return_found=False, limit=0, _OP=CACHE.STATE_CHANGED.DELETE_BY_PREFIX):
""" Deletes cache entries by their key prefixes - non-string-like keys are ignored.
Optionally, returns all matched keys and their previous values.
"""
out = self.impl.delete_by_prefix(key, return_found, limit)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'limit':limit
})
return out
# ################################################################################################################################
def delete_by_suffix(self, key, return_found=False, limit=0, _OP=CACHE.STATE_CHANGED.DELETE_BY_SUFFIX):
""" Deletes cache entries by their key suffixes - non-string-like keys are ignored.
Optionally, returns all matched keys and their previous values.
"""
out = self.impl.delete_by_suffix(key, return_found, limit)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'limit':limit
})
return out
# ################################################################################################################################
def delete_by_regex(self, key, return_found=False, limit=0, _OP=CACHE.STATE_CHANGED.DELETE_BY_REGEX):
""" Deletes cache entries with keys matching the input regular expression - non-string-like keys are ignored.
Optionally, returns all matched keys and their previous values.
"""
out = self.impl.delete_by_regex(key, return_found, limit)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'limit':limit
})
return out
# ################################################################################################################################
def delete_contains(self, key, return_found=False, limit=0, _OP=CACHE.STATE_CHANGED.DELETE_CONTAINS):
""" Deletes cache entries with keys containing the input string - non-string-like keys are ignored.
Optionally, returns all matched keys and their previous values.
"""
out = self.impl.delete_contains(key, return_found, limit)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'limit':limit
})
return out
# ################################################################################################################################
def delete_not_contains(self, key, return_found=False, limit=0, _OP=CACHE.STATE_CHANGED.DELETE_NOT_CONTAINS):
""" Deletes cache entries with keys that don't contain the input string - non-string-like keys are ignored.
Optionally, returns all matched keys and their previous values.
"""
out = self.impl.delete_not_contains(key, return_found, limit)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'limit':limit
})
return out
# ################################################################################################################################
def delete_contains_all(self, key, return_found=False, limit=0, _OP=CACHE.STATE_CHANGED.DELETE_CONTAINS_ALL):
""" Deletes cache entries with keys containing all of elements in the input string - non-string-like keys are ignored.
Optionally, returns all matched keys and their previous values.
"""
out = self.impl.delete_contains_all(key, return_found, limit)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'limit':limit
})
return out
# ################################################################################################################################
def delete_contains_any(self, key, return_found=False, limit=0, _OP=CACHE.STATE_CHANGED.DELETE_CONTAINS_ANY):
""" Deletes cache entries with keys containing at least one of elements in the input string -
non-string-like keys are ignored. Optionally, returns all matched keys and their previous values.
"""
out = self.impl.delete_contains_any(key, return_found, limit)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'limit':limit
})
return out
# ################################################################################################################################
def expire(self, key, expiry=0.0, _OP=CACHE.STATE_CHANGED.EXPIRE):
""" Sets expiry in seconds (or a fraction of) for a given key.
"""
meta_ref = {'key':key, 'expiry':expiry} if self.needs_sync else None
found_key = self.impl.expire(key, expiry, meta_ref)
if self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, meta_ref)
return found_key
# ################################################################################################################################
def expire_by_prefix(self, key, expiry=0.0, limit=0, _OP=CACHE.STATE_CHANGED.EXPIRE_BY_PREFIX):
""" Sets expiry in seconds (or a fraction of) for all keys matching the input prefix.
"""
out = self.impl.expire_by_prefix(key, expiry)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'expiry':expiry,
'limit':limit
})
return out
# ################################################################################################################################
def expire_by_suffix(self, key, expiry=0.0, limit=0, _OP=CACHE.STATE_CHANGED.EXPIRE_BY_SUFFIX):
""" Sets expiry in seconds (or a fraction of) for all keys matching the input suffix.
"""
out = self.impl.expire_by_suffix(key, expiry)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'expiry':expiry,
'limit':limit
})
return out
# ################################################################################################################################
def expire_by_regex(self, key, expiry=0.0, limit=0, _OP=CACHE.STATE_CHANGED.EXPIRE_BY_REGEX):
""" Sets expiry in seconds (or a fraction of) for all keys matching the input regular expression.
"""
out = self.impl.expire_by_regex(key, expiry)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'expiry':expiry,
'limit':limit
})
return out
# ################################################################################################################################
def expire_contains(self, key, expiry=0.0, limit=0, _OP=CACHE.STATE_CHANGED.EXPIRE_CONTAINS):
""" Sets expiry in seconds (or a fraction of) for all keys containing the input string.
"""
out = self.impl.expire_contains(key, expiry)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'expiry':expiry,
'limit':limit
})
return out
# ################################################################################################################################
def expire_not_contains(self, key, expiry=0.0, limit=0, _OP=CACHE.STATE_CHANGED.EXPIRE_NOT_CONTAINS):
""" Sets expiry in seconds (or a fraction of) for all keys that don't contain the input string.
"""
out = self.impl.expire_not_contains(key, expiry)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'expiry':expiry,
'limit':limit
})
return out
# ################################################################################################################################
def expire_contains_all(self, key, expiry=0.0, limit=0, _OP=CACHE.STATE_CHANGED.EXPIRE_CONTAINS_ALL):
""" Sets expiry in seconds (or a fraction of) for keys that contain all of input elements.
"""
out = self.impl.expire_contains_all(key, expiry)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'expiry':expiry,
'limit':limit
})
return out
# ################################################################################################################################
def expire_contains_any(self, key, expiry=0.0, limit=0, _OP=CACHE.STATE_CHANGED.EXPIRE_CONTAINS_ALL):
""" Sets expiry in seconds (or a fraction of) for keys that contain at least one of input elements.
"""
out = self.impl.expire_contains_any(key, expiry)
if out and self.needs_sync:
spawn(self.after_state_changed_callback, _OP, self.config.name, {
'key':key,
'expiry':expiry,
'limit':limit
})
return out
# ################################################################################################################################
def keys(self):
""" Returns all keys in the cache - like dict.keys().
"""
return self.impl.keys()
# ################################################################################################################################
def iterkeys(self):
""" Returns an iterator over all keys in the cache - like dict.iterkeys().
"""
return self.impl.iterkeys()
# ################################################################################################################################
def values(self):
""" Returns all values in the cache - like dict.values().
"""
return self.impl.values()
# ################################################################################################################################
def itervalues(self):
""" Returns an iterator over all values in the cache - like dict.itervalues().
"""
return itervalues(self.impl)
# ################################################################################################################################
def items(self):
""" Returns all items in the cache - like dict.items().
"""
return self.impl.items()
# ################################################################################################################################
def iteritems(self):
""" Returns an iterator over all items in the cache - like dict.iteritems().
"""
return iteritems(self.impl)
# ################################################################################################################################
def clear(self, _CLEAR=CACHE.STATE_CHANGED.CLEAR):
""" Clears the cache - removes all entries.
"""
self.impl.clear()
if self.needs_sync:
spawn(self.after_state_changed_callback, _CLEAR, self.config.name, {})
# ################################################################################################################################
def update_config(self, config):
self.needs_sync = self.config.sync_method != CACHE.SYNC_METHOD.NO_SYNC.id
self.impl.update_config(config)
# ################################################################################################################################
def _delete_expired(self, interval=5, _sleep=sleep):
""" Invokes in its own greenlet in background to delete expired cache entries.
"""
try:
while True:
try:
_sleep(interval)
deleted = self.impl.delete_expired()
except Exception:
logger.warning('Exception while deleting expired keys %s', format_exc())
_sleep(2)
else:
if deleted:
logger.info('Cache `%s` deleted keys expired in the last %ss - %s', self.config.name, interval, deleted)
except Exception:
logger.warning('Exception in _delete_expired loop %s', format_exc())
# ################################################################################################################################
def sync_after_set(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .set operation in another worker process.
"""
self.impl.set(data.key, data.value, data.expiry, False, None, data.orig_now)
def sync_after_set_by_prefix(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .set_by_prefix operation in another worker process.
"""
self.impl.set_by_prefix(data.key, data.value, data.expiry, False, None, data.limit, data.orig_now)
def sync_after_set_by_suffix(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .set_by_suffix operation in another worker process.
"""
self.impl.set_by_suffix(data.key, data.value, data.expiry, False, None, data.limit, data.orig_now)
def sync_after_set_by_regex(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .set_by_regex operation in another worker process.
"""
self.impl.set_by_regex(data.key, data.value, data.expiry, False, None, data.limit, data.orig_now)
def sync_after_set_contains(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .set_contains operation in another worker process.
"""
self.impl.set_contains(data.key, data.value, data.expiry, False, None, data.limit, data.orig_now)
def sync_after_set_not_contains(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .set_not_contains operation
in another worker process.
"""
self.impl.set_not_contains(data.key, data.value, data.expiry, False, None, data.limit, data.orig_now)
def sync_after_set_contains_all(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .set_contains_all operation
in another worker process.
"""
self.impl.set_contains_all(data.key, data.value, data.expiry, False, None, data.limit, data.orig_now)
def sync_after_set_contains_any(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .set_contains_any operation
in another worker process.
"""
self.impl.set_contains_any(data.key, data.value, data.expiry, False, None, data.limit, data.orig_now)
# ################################################################################################################################
def sync_after_delete(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .delete operation in another worker process.
"""
self.impl.delete(data.key)
def sync_after_delete_by_prefix(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .delete_by_prefix operation
in another worker process.
"""
self.impl.delete_by_prefix(data.key, False, data.limit)
def sync_after_delete_by_suffix(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .delete_by_suffix operation
in another worker process.
"""
self.impl.delete_by_suffix(data.key, False, data.limit)
def sync_after_delete_by_regex(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .delete_by_regex operation
in another worker process.
"""
self.impl.delete_by_regex(data.key, False, data.limit)
def sync_after_delete_contains(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .delete_contains operation
in another worker process.
"""
self.impl.delete_contains(data.key, False, data.limit)
def sync_after_delete_not_contains(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .delete_not_contains operation
in another worker process.
"""
self.impl.delete_not_contains(data.key, False, data.limit)
def sync_after_delete_contains_all(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .delete_contains_all operation
in another worker process.
"""
self.impl.delete_contains_all(data.key, False, data.limit)
def sync_after_delete_contains_any(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .delete_contains_any operation
in another worker process.
"""
self.impl.delete_contains_any(data.key, False, data.limit)
# ################################################################################################################################
def sync_after_expire(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after an .expire operation in another worker process.
"""
self.impl.set_expiration_data(data.key, data.expiry, data.expires_at)
def sync_after_expire_by_prefix(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .expire_by_prefix operation
in another worker process.
"""
self.impl.expire_by_prefix(data.key, data.expiry, data.limit)
def sync_after_expire_by_suffix(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .expire_by_suffix operation
in another worker process.
"""
self.impl.expire_by_suffix(data.key, data.expiry, data.limit)
def sync_after_expire_by_regex(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .expire_by_regex operation
in another worker process.
"""
self.impl.expire_by_regex(data.key, data.expiry, data.limit)
def sync_after_expire_contains(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .expire_contains operation
in another worker process.
"""
self.impl.expire_contains(data.key, data.expiry, data.limit)
def sync_after_expire_not_contains(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .expire_not_contains operation
in another worker process.
"""
self.impl.expire_not_contains(data.key, data.expiry, data.limit)
def sync_after_expire_contains_all(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .expire_contains_all operation
in another worker process.
"""
self.impl.expire_contains_all(data.key, data.expiry, data.limit)
def sync_after_expire_contains_any(self, data):
""" Invoked by Cache API to synchronizes this worker's cache after a .expire_contains_any operation
in another worker process.
"""
self.impl.expire_contains_any(data.key, data.expiry, data.limit)
# ################################################################################################################################
def sync_after_clear(self):
""" Invoked by Cache API to synchronizes this worker's cache after a .clear operation in another worker process.
"""
self.impl.clear()
# ################################################################################################################################
class _NotConfiguredAPI:
def set(self, *args, **kwargs):
logger.warn('Default cache is not configured')
get = set
# ################################################################################################################################
class CacheAPI:
""" Base class for all cache objects.
"""
def __init__(self, server):
self.server = server
self.lock = RLock()
self.default = cast_('Cache', _NotConfiguredAPI())
self.caches = {
CACHE.TYPE.BUILTIN:{},
CACHE.TYPE.MEMCACHED:{},
}
self.builtin = self.caches[CACHE.TYPE.BUILTIN]
self.memcached = self.caches[CACHE.TYPE.MEMCACHED]
def _maybe_set_default(self, config, cache):
if config.is_default:
self.default = cache
# ################################################################################################################################
def after_state_changed(self, op, cache_name, data, _broker_msg=builtin_op_to_broker_msg, _pickle_dumps=pickle_dumps):
""" Callback method invoked by each cache if it requires synchronization with other worker processes.
"""
try:
data['action'] = _broker_msg[op]
data['cache_name'] = cache_name
data['source_worker_id'] = self.server.worker_id
key = data.get('key', _no_key)
value = data.get('value', _no_value)
if isinstance(key, basestring):
data['is_key_pickled'] = False
else:
data['is_key_pickled'] = True
data['key'] = _pickle_dumps(key)
if value:
if isinstance(value, basestring):
data['is_value_pickled'] = False
else:
data['is_value_pickled'] = True
value = _pickle_dumps(value)
value = b64encode(value)
value = value.decode('utf8')
data['value'] = value
else:
data['is_value_pickled'] = False
self.server.broker_client.publish(data)
except Exception:
logger.warning('Could not run `%s` after_state_changed in cache `%s`, data:`%s`, e:`%s`',
op, cache_name, data, format_exc())
# ################################################################################################################################
def _create_builtin(self, config):
""" A low-level method building a bCache object for built-in caches. Must be called with self.lock held.
"""
config.after_state_changed_callback = self.after_state_changed
return Cache(config)
# ################################################################################################################################
def _create_memcached(self, config):
""" A low-level method building a Memcached-based cache connections.
"""
def impl():
try:
servers = [elem.strip() for elem in config.servers.splitlines()]
cache = _MemcachedClient(servers, asbool(config.is_debug), **parse_extra_into_dict(config.extra))
self._add_cache(config, cache)
except Exception:
logger.warning(format_exc())
spawn(impl)
# ################################################################################################################################
def _add_cache(self, config, cache):
# Add it to caches
self.caches[config.cache_type][config.name] = cache
# If told to be configuration, make this cache the default one
self._maybe_set_default(config, cache)
# ################################################################################################################################
def _create(self, config):
""" A low-level method building caches. Must be called with self.lock held.
"""
# Create a cache object out of configuration
cache = getattr(self, '_create_{}'.format(config.cache_type))(config)
# Only built-in caches can be added directly because they do not establish
# any external connections, any other cache will be built in a background greenlet
if config.cache_type == CACHE.TYPE.BUILTIN:
self._add_cache(config, cache)
# ################################################################################################################################
def create(self, config):
""" Public method for building caches out of configuration.
"""
with self.lock:
self._create(config)
# ################################################################################################################################
def _edit(self, config):
""" A low-level method for updating configuration of a given cache. Must be called with self.lock held.
"""
if config.cache_type == CACHE.TYPE.BUILTIN:
cache = self.caches[config.cache_type].pop(config.old_name)
cache.update_config(config)
self._add_cache(config, cache)
else:
cache = self.caches[config.cache_type][config.old_name]
cache.disconnect_all()
self._delete(config.cache_type, config.old_name)
self._create_memcached(config)
# ################################################################################################################################
def edit(self, config):
""" Public method for updating configuration of a given cache.
"""
with self.lock:
self._edit(config)
# ################################################################################################################################
def _delete(self, cache_type, name):
""" A low-level method for deleting a given cache. Must be called with self.lock held.
"""
cache = self.caches[cache_type][name]
if cache_type == CACHE.TYPE.BUILTIN:
self._clear(cache_type, name)
else:
cache.disconnect_all()
del self.caches[cache_type][name]
# ################################################################################################################################
def delete(self, config):
""" Public method for updating configuration of a given cache.
"""
with self.lock:
self._delete(config.cache_type, config.name)
# ################################################################################################################################
def _clear(self, cache_type, name):
""" A low-level method for clearing out contents of a given cache. Must be called with self.lock held.
"""
self.caches[cache_type][name].clear()
# ################################################################################################################################
def clear(self, cache_type, name):
""" Public method for clearing out a given cache.
"""
with self.lock:
self._clear(cache_type, name)
#
# ################################################################################################################################
def _get_cache(self, cache_type:'str', name:'str') -> 'Cache':
""" Actually returns a cache. Must be called with self.lock held.
"""
return self.caches[cache_type][name]
# ################################################################################################################################
def get_cache(self, cache_type:'str', name:'str') -> 'Cache':
""" Returns the lower-level cache implementation object by its type and name.
"""
with self.lock:
return self.caches[cache_type][name]
# ################################################################################################################################
def get_builtin_cache(self, name):
""" Returns a built-in cache by its name.
"""
with self.lock:
return self._get_cache(CACHE.TYPE.BUILTIN, name)
# ################################################################################################################################
def get_memcached_cache(self, name):
""" Returns a Memcached cache by its name.
"""
with self.lock:
return self._get_cache(CACHE.TYPE.MEMCACHED, name)
# ################################################################################################################################
def get_size(self, cache_type, name):
""" Returns current size, the number of entries, in a given cache.
"""
return len(self.caches[cache_type][name])
# ################################################################################################################################
def sync_after_set(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .set operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_set(data)
def sync_after_set_by_prefix(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .set_by_prefix operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_set_by_prefix(data)
def sync_after_set_by_suffix(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .set_by_suffix operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_set_by_suffix(data)
def sync_after_set_by_regex(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .set_by_regex operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_set_by_regex(data)
def sync_after_set_contains(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .set_contains operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_set_contains(data)
def sync_after_set_not_contains(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .set_not_contains operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_set_not_contains(data)
def sync_after_set_contains_all(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .set_contains_all operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_set_contains_all(data)
def sync_after_set_contains_any(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .set_contains_any operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_set_contains_any(data)
# ################################################################################################################################
def sync_after_delete(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .delete operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_delete(data)
def sync_after_delete_by_prefix(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .delete_by_prefix operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_delete_by_prefix(data)
def sync_after_delete_by_suffix(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .delete_by_suffix operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_delete_by_suffix(data)
def sync_after_delete_by_regex(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .delete_by_regex operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_delete_by_regex(data)
def sync_after_delete_contains(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .delete_contains operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_delete_contains(data)
def sync_after_delete_not_contains(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .delete_not_contains operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_delete_not_contains(data)
def sync_after_delete_contains_all(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .delete_contains_all operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_delete_contains_all(data)
def sync_after_delete_contains_any(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .delete_contains_any operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_delete_contains_any(data)
# ################################################################################################################################
def sync_after_expire(self, cache_type, data):
""" Synchronizes the state of this worker's cache after an .expire operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_expire(data)
def sync_after_expire_by_prefix(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .expire_by_prefix operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_expire_by_prefix(data)
def sync_after_expire_by_suffix(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .expire_by_suffix operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_expire_by_suffix(data)
def sync_after_expire_by_regex(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .expire_by_regex operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_expire_by_regex(data)
def sync_after_expire_contains(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .expire_contains operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_expire_contains(data)
def sync_after_expire_not_contains(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .expire_not_contains operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_expire_not_contains(data)
def sync_after_expire_contains_all(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .expire_contains_all operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_expire_contains_all(data)
def sync_after_expire_contains_any(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .expire_contains_any operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_expire_contains_any(data)
# ################################################################################################################################
def sync_after_clear(self, cache_type, data):
""" Synchronizes the state of this worker's cache after a .clear operation in another worker process.
"""
self.caches[cache_type][data.cache_name].sync_after_clear()
# ################################################################################################################################
| 53,829
|
Python
|
.py
| 915
| 49.949727
| 130
| 0.506685
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,079
|
jira_.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/jira_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# atlassian-python-api
from atlassian import Jira as AtlassianJiraClient
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, stranydict, strlist, strset
# ################################################################################################################################
# ################################################################################################################################
class JiraClient(AtlassianJiraClient):
zato_api_version: 'str'
zato_address: 'str'
zato_username: 'str'
zato_token: 'str'
zato_is_cloud: 'bool'
def __init__(
self,
*,
zato_api_version, # type: str
zato_address, # type: str
zato_username, # type: str
zato_token, # type: str
zato_is_cloud, # type: bool
) -> 'None':
# We need to make sure that the API version is a string
# because this is what the underlying Jira API requires.
self.zato_api_version = str(zato_api_version)
self.zato_address = zato_address
self.zato_username = zato_username
self.zato_token = zato_token
self.zato_is_cloud = zato_is_cloud
super().__init__(
url = self.zato_address,
username = self.zato_username,
password = self.zato_token,
api_version = self.zato_api_version,
cloud = self.zato_is_cloud,
)
# ################################################################################################################################
@staticmethod
def from_config(config:'stranydict') -> 'JiraClient':
return JiraClient(
zato_api_version = config['api_version'],
zato_address = config['address'],
zato_username = config['username'],
zato_token = config['secret'],
zato_is_cloud = config['is_cloud'],
)
# ################################################################################################################################
def append_to_field(
self,
*,
key, # type: str
field_id, # type: str
value, # type: str
) -> 'strlist':
# Get the current values ..
value_list:'strlist | strset' = self.issue_field_value(key, field_id)
# .. make sure it is always a list ..
value_list = value_list or []
# .. remove any potential duplicates ..
value_list = set(value_list)
# .. actually add the new value ..
value_list.add(value)
# .. ensure the list of values is always sorted to make it easier
# .. to browse it in Jira ..
value_list = sorted(value_list)
# .. update the ticket with a new list ..
_:'any_' = self.update_issue_field(key, {
field_id: value_list
})
return value_list
# ################################################################################################################################
def append_and_transition_if_field_complete(
self,
*,
key, # type: str
field_id, # type: str
value, # type: str
transition_to, # type: str
complete_list # type: strlist
) -> 'None':
# This will modify the ticket and return the current value of the field's list ..
current_list = self.append_to_field(
key=key,
field_id=field_id,
value=value
)
# .. now, compare it to what the complete list looks like, ..
# .. note that the lists need to be sorted to make sure they are the same.
if sorted(current_list) == sorted(complete_list):
# .. if we are here, it means that we must have append the final item
# .. in the list above, in which case we can make the transition.
_:'any_' = self.set_issue_status(key, transition_to)
# ################################################################################################################################
# ################################################################################################################################
| 4,584
|
Python
|
.py
| 100
| 37.87
| 130
| 0.429854
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,080
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import logging
from copy import deepcopy
from traceback import format_exc
# Bunch
from bunch import Bunch, bunchify
# ConcurrentLogHandler - updates stlidb's logging config on import so this needs to stay
try:
import cloghandler
except ImportError:
pass
else:
cloghandler = cloghandler # For pyflakes
# Zato
from zato.common.api import SECRET_SHADOW
from zato.common.dispatch import dispatcher
from zato.common.exception import Inactive
from zato.common.util.api import new_cid
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
class BasePoolAPI:
""" API for pool-based outgoing connections.
"""
def __init__(self, conn_store):
self._conn_store = conn_store
def __getitem__(self, name):
item = self._conn_store.get(name)
if not item:
msg = 'No such connection `{}` in `{}`'.format(name, sorted(self._conn_store.sessions))
logger.warning(msg)
raise KeyError(msg)
if not item.config.is_active:
msg = 'Connection `{}` is not active'.format(name)
logger.warning(msg)
raise Inactive(msg)
return item
get = __getitem__
def create_def(self, name, msg, on_connection_established_callback=None, *args, **kwargs):
self._conn_store.create(name, msg, on_connection_established_callback, *args, **kwargs)
create = create_def
def edit_def(self, name, msg, on_connection_established_callback=None, *args, **kwargs):
return self._conn_store.edit(name, msg, on_connection_established_callback, *args, **kwargs)
def delete_def(self, name):
return self._conn_store.delete(name)
def change_password_def(self, config):
return self._conn_store.change_password(config)
# ################################################################################################################################
class BaseConnPoolStore:
""" Base connection store for pool-based outgoing connections.
"""
conn_name = None
dispatcher_events = []
dispatcher_listen_for = None
def __init__(self):
# Import gevent here because connectors may not want to use it
import gevent
from gevent.lock import RLock
self._gevent = gevent
self._RLock = RLock
self.sessions = {}
self.lock = self._RLock()
self.keep_connecting = set() # IDs of connections to keep connecting for
# Connects us to interesting events the to-be-established connections need to consult
dispatcher.listen_for_updates(self.dispatcher_listen_for, self.dispatcher_callback)
# Maps broker message IDs to their accompanying config
self.dispatcher_backlog = []
def __getitem__(self, name):
return self.sessions[name]
def get(self, name):
return self.sessions.get(name)
def create_session(self, name, config):
""" Actually adds a new definition, must be called with self.lock held.
"""
raise NotImplementedError('Must be overridden in subclasses')
def _log_connection_error(self, name, config_no_sensitive, e, additional=''):
logger.warning('Could not connect to %s `%s`, config:`%s`, e:`%s`%s', self.conn_name, name, config_no_sensitive,
format_exc(e), additional)
def get_config_no_sensitive(self, config):
config_no_sensitive = deepcopy(config)
config_no_sensitive['password'] = SECRET_SHADOW
return config_no_sensitive
def _create(self, name, config, on_connection_established_callback=None, *args, **kwargs):
""" Actually establishes a new connection - the method is called in a new greenlet.
"""
self.keep_connecting.add(config.id)
session = None
config_no_sensitive = self.get_config_no_sensitive(config)
item = Bunch(config=config, config_no_sensitive=config_no_sensitive, is_connected=False, conn=None)
try:
logger.debug('Connecting to `%s`', item.config_no_sensitive)
while item.config.id in self.keep_connecting:
# It's possible our configuration has been already updated by users
# even before we first time established any connection. For instance,
# connection parameters were invalid and the user updated them.
# We need to learn of the new config or possibly stop connecting
# at all if we have been deleted.
with self.lock:
keep_connecting, new_config = self.check_dispatcher_backlog(item)
if keep_connecting:
if new_config:
item.config = new_config
item.config_no_sensitive = self.get_config_no_sensitive(item.config)
try:
# Will be overridden in a subclass
session = self.create_session(name, item.config, item.config_no_sensitive)
self.keep_connecting.remove(item.config.id)
logger.info('Connected to %s `%r`', self.conn_name, item.config_no_sensitive)
except KeyboardInterrupt:
return
except Exception as e:
self._log_connection_error(name, item.config_no_sensitive, e, ', sleeping for 20 s')
self._gevent.sleep(20) # TODO: Should be configurable
except Exception as e:
self._log_connection_error(name, item.config_no_sensitive, e)
else:
# No session, we give up and quit. This may happen if we haveon_dispatcher_events been deleted
# through a dispatcher event before the session could have been established at all.
if not session:
return
item.conn = session
item.is_connected = True
if on_connection_established_callback:
on_connection_established_callback(item, *args, **kwargs)
self.sessions[name] = item
return item
def create(self, name, config, on_connection_established_callback=None, *args, **kwargs):
""" Adds a new connection definition.
"""
with self.lock:
self._gevent.spawn(self._create, name, config, on_connection_established_callback, *args, **kwargs)
def delete_session(self, name):
""" Actually deletes a definition. Must be called with self.lock held.
"""
item = self.sessions.get(name)
if item:
try:
self.keep_connecting.remove(item.config.id)
except KeyError:
pass # It's OK, no ongoing connection attempt at the moment
self.delete_session_hook(item)
logger.debug('Could not delete session `%s` - not among `%s`', name, self.sessions)
def delete_session_hook(self, session):
""" A hook for concrete subclasses to delete their sessions.
"""
raise NotImplementedError('Must be overridden in subclasses')
def delete(self, name):
""" Deletes an existing connection.
"""
with self.lock:
try:
session = self.sessions.get(name)
if session and session.is_connected:
self.delete_session(name)
except Exception:
logger.warning('Error while shutting down session `%s`, e:`%s`', name, format_exc())
finally:
self.sessions.pop(name, None)
def edit(self, name, config, on_connection_established_callback=None, *args, **kwargs):
with self.lock:
try:
self.delete_session(name)
except Exception:
logger.warning('Could not delete session `%s`, config:`%s`, e:`%s`', name, config, format_exc())
else:
return self._create(config.name, config, on_connection_established_callback, *args, **kwargs)
def change_password(self, password_data):
with self.lock:
new_config = deepcopy(self.sessions[password_data.name].config_no_sensitive)
new_config.password = password_data.password
return self.edit(password_data.name, new_config)
# ################################################################################################################################
def check_dispatcher_backlog(self, item):
events = []
for event_info in self.dispatcher_backlog:
if event_info.ctx.id == item.config.id and event_info.event in self.dispatcher_events:
events.append(bunchify({
'item': item,
'event_info': event_info
}))
if events:
return self.on_dispatcher_events(events)
else:
return True, None
# ################################################################################################################################
def dispatcher_callback(self, event, ctx, **opaque):
self.dispatcher_backlog.append(bunchify({
'event_id': new_cid(),
'event': event,
'ctx': ctx,
'opaque': opaque
}))
# ################################################################################################################################
def on_dispatcher_events(self, events):
""" Handles in-process dispatcher events. If it's a DELETE, the connection is removed
from a list of connections to be established. If an EDIT, the connection's config is updated.
In either case all subsequent dispatcher events are discarded.
"""
# Only check the latest event
event = events[-1]
is_delete = event.event_info.event == self.delete_event
if is_delete:
self.keep_connecting.remove(event.item.config.id)
else:
new_config = event.event_info.ctx
# We always delete all the events because we processed the last one anyway
for event in events:
self.dispatcher_backlog.remove(event.event_info)
# Stop connecting if we have just been deleted
return (False, None) if is_delete else (True, new_config)
# ################################################################################################################################
| 10,901
|
Python
|
.py
| 217
| 40.119816
| 130
| 0.573394
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,081
|
google.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/google.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from logging import getLogger
from pathlib import Path
# google-api-client
from googleapiclient import discovery
from googleapiclient.http import MediaFileUpload
# httplib2
import httplib2
# oauth2client
from oauth2client.service_account import ServiceAccountCredentials
# stroll
from stroll import stroll
# Zato
from zato.common.typing_ import cast_, list_field
from zato.common.util.file_system import fs_safe_now
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, anylist, stranydict, strnone
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class GoogleClient:
""" A client class that builds connections to Google APIs.
"""
conn:'any_'
files:'any_'
api_name: 'str'
api_version: 'str'
user: 'str'
scopes: 'anylist' = list_field()
service_file_dict: 'str'
dir_map: 'stranydict'
def __init__(self, api_name: 'str', api_version: 'str', user:'str', scopes: 'anylist', service_file_dict:'str') -> 'None':
self.api_name = api_name
self.api_version = api_version
self.user = user
self.scopes = scopes
self.service_file_dict = service_file_dict
# This is a mapping of remote directories already created to their Google Drive IDs
self._dir_map = {}
# ################################################################################################################################
def connect(self) -> 'any_':
credentials = ServiceAccountCredentials.from_json_keyfile_dict(self.service_file_dict, scopes=self.scopes)
credentials = credentials.create_delegated(self.user)
http = credentials.authorize(httplib2.Http())
self.conn = discovery.build(self.api_name, self.api_version, http=http)
self._files = self.conn.files()
# ################################################################################################################################
def reset(self) -> 'None':
self._dir_map.clear()
# ################################################################################################################################
def _create_remote_resource(
self,
name:'str',
resource_type:'str',
mime_type:'strnone',
*,
parent_id:'strnone',
request_wrapper:'any_'=None,
full_path:'strnone'=None
) -> 'str':
# A base request to create a remote resource ..
request = {
'name': name,
}
# This will exist with directories only
if mime_type:
request['mimeType'] = 'application/vnd.google-apps.folder'
# Parent will be None in case this is the top-level directory,
# and in such a case, we cannot send a list with a None object inside,
# it needs to be None directly.
request['parents'] = [parent_id] if parent_id else None # type: ignore
# Log what we are about to do ..
logger.info('Creating %s `%s` with a parent of `%s`', resource_type, name, parent_id)
# .. files will use a wrapper class for their contents ..
if request_wrapper:
media_body = MediaFileUpload(full_path, resumable=True)
else:
media_body = None
# .. create the resource ..
resource = self._files.create(body=request, media_body=media_body, fields='id').execute()
# .. log the metadata received ..
logger.info('Created %s `%s` with an ID of `%s`', resource_type, name, resource['id'])
# .. and return the newly created resource's ID to our caller.
return resource['id']
# ################################################################################################################################
def create_remote_directory(self, name:'str', *, parent_id:'strnone') -> 'str':
# Invoke a reusable method to create a new directory
return self._create_remote_resource(
name, 'directory', 'application/vnd.google-apps.folder', parent_id=parent_id
)
# ################################################################################################################################
def create_remote_file(self, name:'str', full_path:'str', *, parent_id:'str') -> 'str':
# Invoke a reusable method to create a new file
return self._create_remote_resource(
name, 'file', None, parent_id=parent_id, request_wrapper=MediaFileUpload, full_path=full_path
)
# ################################################################################################################################
def add_directory(self, new_dir_root:'Path', new_dir_root_str:'str', item_relative:'Path') -> 'None':
full_remote_path = new_dir_root.joinpath(item_relative)
# We need a list of components to visit and create consisting of all our parents
# as well as our own item (directory) that we are currently visiting.
components = list(reversed(full_remote_path.parents))
components.append(item_relative)
for component in components:
component_str = str(component)
# Ignore current directory markers
if component_str == '.':
continue
# This is a real directory that we may possibly want to create out of its parts
else:
# We start to build all the parts at the top-level directory
current_parent = new_dir_root_str
current_path = [current_parent]
for part in component.parts:
# Ignore the case where part is the same as current parent (i.e. top-level directory)
if current_parent == part:
continue
# Append our own part to the full list of parts visited so far ..
current_path.append(part)
current_path_str = '/'.join(current_path)
# .. which means that our parent's path is the same as above except for what we have just added.
# .. We build the parent in this way in order to have two separate Python lists that we can
# .. manipulate separately instead of having a single mutuable list only.
current_parent_path = current_path[:-1]
current_parent_path_str = '/'.join(current_parent_path)
# If we do not have such a directory cached yet, it means that we need
# to create it and assign it to our current part. There will be always
# a parent to assign the directory to because we started with the top-level element.
if current_path_str not in self._dir_map:
parent_id = self._dir_map[current_parent_path_str]
dir_id = self.create_remote_directory(part, parent_id=parent_id)
self._dir_map[current_path_str] = dir_id
logger.info('Caching directory %s -> %s', current_path_str, dir_id)
logger.info('Current cache: %s', self._dir_map)
# Iterate down the list of parts
current_parent = part
# ################################################################################################################################
def sync_to_google_drive(
self,
local_path:'str',
new_root_name:'str',
parent_directory_id:'str'
) -> 'str':
# Log information about what we are about to do
logger.info('About to sync `%s` to Google Drive (%s)', local_path, self.user)
# Each directory contains a timestamp in case we need to recreate it
root_suffix = fs_safe_now()
# This is the root directory in Google Drive that we are going to store the local directory under
new_dir_root = new_root_name + '-' + root_suffix
new_dir_root = Path(new_dir_root)
# This is reusable
new_dir_root_str = str(new_dir_root)
# This is our own local root directory that needs to be synced to Google Drive
local_path_root = Path(local_path)
# First, create the remote root directory under which all local directories will be anchored.
# Note that the remote root itself is attached to a directory whose ID we were given on input.
root_dir_id = self.create_remote_directory(new_dir_root_str, parent_id=parent_directory_id)
# Assign it to the mapping of directories to their IDs for later use
self._dir_map[new_dir_root_str] = root_dir_id
# Walk down a tree of directories and files. Note that directories will always be visited first
# and that all the names are sorted alphabetically.
for item in stroll(local_path, directories=True, sort=True):
# Add static typing
item = cast_('Path', item)
# This object is relative to our local root that we are synchronizing from,
# i.e. we are removing any local path part leading to it. In this way,
# we can in next steps build full remote paths based on that.
item_relative = item.relative_to(local_path_root)
# This is a directory and, unless we have already seen it,
# we need to create its remote reflection ..
if item.is_dir():
logger.info('Syncing local directory `%s`', item)
self.add_directory(new_dir_root, new_dir_root_str, item_relative)
# .. this is a file and we know that is parent directory
# .. must have been already created by now.
else:
# Build a string path to our parent, relative to the local root dir,
# which will allow us to look up the parent in the cache of directories already synced.
file_parent_str = item_relative.parent.as_posix()
file_parent_str = os.path.join(new_dir_root_str, file_parent_str)
file_parent_str = os.path.normpath(file_parent_str)
# We can be certain that it exists because, again, directories are visited first.
parent_id = self._dir_map[file_parent_str]
# Now we can upload the file
item_full_path = item.absolute().as_posix()
self.create_remote_file(item.name, item_full_path, parent_id=parent_id)
return root_dir_id
# ################################################################################################################################
# ################################################################################################################################
| 11,678
|
Python
|
.py
| 199
| 48.733668
| 130
| 0.516564
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,082
|
ftp.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/ftp.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import logging
from copy import deepcopy
from threading import RLock
from traceback import format_exc
# pyfilesystem
from fs.ftpfs import FTPFS
# Zato
from zato.common.api import SECRET_SHADOW, TRACE1
from zato.common.exception import Inactive
# Python2/3 compatibility
from zato.common.ext.future.utils import PY2
# ################################################################################################################################
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class FTPFacade(FTPFS):
""" A thin wrapper around fs's FTPFS so it looks like the other Zato connection objects.
"""
def conn(self):
return self
# ################################################################################################################################
# ################################################################################################################################
class FTPStore:
""" An object through which services access FTP connections.
"""
def __init__(self):
self.conn_params = {}
self._lock = RLock()
# ################################################################################################################################
def _add(self, params):
""" Adds one set of params to the list of connection parameters.
Must not be called without holding onto self._lock
"""
self.conn_params[params.name] = params
msg = 'FTP params added:`{!r}`'
if logger.isEnabledFor(TRACE1):
logger.log(TRACE1, msg.format(params))
elif logger.isEnabledFor(logging.DEBUG):
params = deepcopy(params)
params['password'] = SECRET_SHADOW
logger.debug(params)
# ################################################################################################################################
def add_params(self, params_list):
with self._lock:
for params in params_list:
self._add(params)
# ################################################################################################################################
def get_conn_names(self):
""" Returns a list of UTF-8 connection names this store contains, sorted in ascending order.
"""
with self._lock:
return [elem.encode('utf-8') for elem in sorted(self.conn_params)]
# ################################################################################################################################
def _get(self, params):
if params.is_active:
timeout = float(params.timeout) if params.timeout else 180
# Python 2 vs. Python 3 builds of Zato have different versions
# of the 'fs' dependency which in turn has a different API to its __init__ method
# which is why 'dircache' cannot be used with Python 3.
init_params = [params.host, params.user, params.get('password'), params.acct, timeout, int(params.port)]
if PY2:
init_params.append(params.dircache)
return FTPFacade(*init_params)
else:
raise Inactive(params.name)
# ################################################################################################################################
def get(self, name):
with self._lock:
params = self.conn_params[name]
return self._get(params)
# ################################################################################################################################
def get_by_id(self, connection_id):
with self._lock:
for params in self.conn_params.values():
if params.id == connection_id:
return self._get(params)
else:
raise ValueError('FTP connection not found `{}`'.format(connection_id))
# ################################################################################################################################
def create_edit(self, params, old_name):
with self._lock:
if params:
_name = old_name if old_name else params.name
ftp = params.get(_name)
try:
if ftp:
ftp.close()
except Exception:
msg = 'Could not close the FTP connection `{}`, e:`{}`'.format(params.name, format_exc())
logger.warning(msg)
finally:
self._add(params)
if old_name and old_name != params.name:
del self.conn_params[old_name]
msg = 'FTP connection stored, name:`{}`, old_name:`{}`'.format(params.name, old_name)
logger.info(msg)
# ################################################################################################################################
def change_password(self, name, password):
with self._lock:
self.conn_params[name].password = password
logger.info('Password updated - FTP connection `{}`'.format(name))
# ################################################################################################################################
def delete(self, name):
with self._lock:
del self.conn_params[name]
logger.info('FTP connection `{}` deleted'.format(name))
# ################################################################################################################################
# ################################################################################################################################
| 6,319
|
Python
|
.py
| 116
| 46.025862
| 130
| 0.38796
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,083
|
amqp_.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/amqp_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# pylint: disable=attribute-defined-outside-init
# stdlib
from datetime import datetime, timedelta
from logging import getLogger
from socket import error as socket_error
from traceback import format_exc
# amqp
from amqp.exceptions import ConnectionError as AMQPConnectionError
# gevent
from gevent import sleep, spawn
# Kombu
from kombu import Connection, Consumer as _Consumer, pools, Queue
from kombu.transport.pyamqp import Connection as PyAMQPConnection, SSLTransport, Transport
# Python 2/3 compatibility
from zato.common.ext.future.utils import itervalues
from zato.common.py23_.past.builtins import xrange
# Zato
from zato.common.api import AMQP, CHANNEL, SECRET_SHADOW
from zato.common.version import get_version
from zato.common.util.api import get_component_name
from zato.server.connection.connector import Connector, Inactive
# ################################################################################################################################
if 0:
from bunch import Bunch
from typing import Any, Callable
Any = Any
Bunch = Bunch
Callable = Callable
# ################################################################################################################################
version = get_version()
logger = getLogger(__name__)
# ################################################################################################################################
_default_out_keys=('app_id', 'content_encoding', 'content_type', 'delivery_mode', 'expiration', 'priority', 'user_id')
# ################################################################################################################################
no_ack = {
AMQP.ACK_MODE.ACK.id: False,
AMQP.ACK_MODE.REJECT.id: True,
}
# ################################################################################################################################
def _is_tls_config(config):
# type: (Bunch) -> bool
return config.conn_url.startswith('amqps://')
# ################################################################################################################################
class _AMQPMessage:
__slots__ = ('body', 'impl')
def __init__(self, body, impl):
self.body = body
self.impl = impl
# ################################################################################################################################
class _AMQPProducers:
""" Encapsulates information about producers used by outgoing AMQP connection to send messages to a broker.
Each outgoing connection has one _AMQPProducers object assigned.
"""
def __init__(self, config):
# type: (dict)
self.config = config
self.name = self.config.name
self.get_conn_class_func = config.get_conn_class_func
self.name = config.name
self.conn = self.get_conn_class_func(
'out/{}'.format(self.config.name), _is_tls_config(self.config))(self.config.conn_url, frame_max=self.config.frame_max)
# Kombu uses a global object to keep all connections in (pools.connections) but we cannot use it
# because multiple channels or outgoing connections may be using the same definition,
# thus we need to create a new connection group for each _AMQPProducers object.
connections = pools.register_group(pools.Connections(limit=self.config.pool_size))
class _Producers(pools.Producers):
def create(self, connection, limit):
return pools.ProducerPool(connections[connection], limit=limit)
self.pool = _Producers(limit=self.config.pool_size)
def acquire(self, *args, **kwargs):
return self.pool[self.conn].acquire(*args, **kwargs)
def stop(self):
for pool in itervalues(self.pool):
pool.connections.force_close_all()
# ################################################################################################################################
class Consumer:
""" Consumes messages from AMQP queues. There is one Consumer object for each Zato AMQP channel.
"""
def __init__(self, config, on_amqp_message):
# type: (dict, Callable)
self.config = config
self.name = self.config.name
self.queue = [Queue(self.config.queue)]
self.on_amqp_message = on_amqp_message
self.keep_running = True
self.is_stopped = False
self.is_connected = False # Instance-level flag indicating whether we have an active connection now.
self.timeout = 0.35
def _on_amqp_message(self, body, msg):
try:
return self.on_amqp_message(body, msg, self.name, self.config)
except Exception:
logger.warning(format_exc())
# ################################################################################################################################
def _get_consumer(self, _no_ack=no_ack, _gevent_sleep=sleep):
""" Creates a new connection and consumer to an AMQP broker.
"""
# We cannot assume that we will obtain the consumer right-away. For instance, the remote end
# may be currently available when we are starting. It's OK to block indefinitely (or until self.keep_running is False)
# because we run in our own greenlet.
consumer = None
err_conn_attempts = 0
while not consumer:
if not self.keep_running:
break
try:
conn = self.config.conn_class(self.config.conn_url)
consumer = _Consumer(conn, queues=self.queue, callbacks=[self._on_amqp_message],
no_ack=_no_ack[self.config.ack_mode], tag_prefix='{}/{}'.format(
self.config.consumer_tag_prefix, get_component_name('amqp-consumer')))
consumer.qos(prefetch_size=0, prefetch_count=self.config.prefetch_count, apply_global=False)
consumer.consume()
except Exception:
err_conn_attempts += 1
noun = 'attempts' if err_conn_attempts > 1 else 'attempt'
logger.info('Could not create an AMQP consumer for channel `%s` (%s %s so far), e:`%s`',
self.name, err_conn_attempts, noun, format_exc())
# It's fine to sleep for a longer time because if this exception happens it means that we cannot connect
# to the server at all, which will likely mean that it is down,
if self.keep_running:
_gevent_sleep(2)
if err_conn_attempts > 0:
noun = 'attempts' if err_conn_attempts > 1 else 'attempt'
logger.info('Created an AMQP consumer for channel `%s` after %s %s', self.name, err_conn_attempts, noun)
return consumer
# ################################################################################################################################
def start(self, conn_errors=(socket_error, IOError, OSError), _gevent_sleep=sleep):
""" Runs the AMQP consumer's mainloop.
"""
try:
connection = None
consumer = self._get_consumer()
self.is_connected = True
# Local aliases.
timeout = self.timeout
# Since heartbeats run frequently (self.timeout may be a fraction of a second), we don't want to log each
# and every error. Instead we log errors each log_every times.
hb_errors_so_far = 0
log_every = 20
while self.keep_running:
try:
connection = consumer.connection
# Do not assume the consumer still has the connection, it may have been already closed, we don't know.
# Unfortunately, the only way to check it is to invoke the method and catch AttributeError
# if connection is already None.
try:
connection.drain_events(timeout=timeout)
except AttributeError:
consumer = self._get_consumer()
# Special-case AMQP-level connection errors and recreate the connection if any is caught.
except AMQPConnectionError:
logger.warning('Caught AMQP connection error in mainloop e:`%s`', format_exc())
if connection:
connection.close()
consumer = self._get_consumer()
# Regular network-level errors - assume the AMQP connection is still fine and treat it
# as an opportunity to perform the heartbeat.
except conn_errors:
try:
connection.heartbeat_check()
except Exception:
hb_errors_so_far += 1
if hb_errors_so_far % log_every == 0:
logger.warning('Exception in heartbeat (%s so far), e:`%s`', hb_errors_so_far, format_exc())
# Ok, we've lost the connection, set the flag to False and sleep for some time then.
if not connection:
self.is_connected = False
if self.keep_running:
_gevent_sleep(timeout)
else:
# Reset heartbeat errors counter since we have apparently succeeded.
hb_errors_so_far = 0
# If there was not any exception but we did not have a previous connection it means that a previously
# established connection was broken so we need to recreate it.
# But, we do it only if we are still told to keep running.
if self.keep_running:
if not self.is_connected:
consumer = self._get_consumer()
self.is_connected = True
if connection:
logger.info('Closing connection for `%s`', consumer)
connection.close()
self.is_stopped = True # Set to True if we break out of the main loop.
except Exception:
logger.warning('Unrecoverable exception in consumer, e:`%s`', format_exc())
# ################################################################################################################################
def stop(self):
""" Stops the consumer and wait for the confirmation that it actually is not running anymore.
"""
self.keep_running = False
# Wait until actually stopped.
if not self.is_stopped:
# self.timeout is multiplied by 2 because it's used twice in the main loop in self.start
# plus a bit of additional time is added.
now = datetime.utcnow()
delta = (self.timeout * 2) + 0.2
until = now + timedelta(seconds=delta)
while now < until:
sleep(0.1)
now = datetime.utcnow()
if self.is_stopped:
return
if not self.is_connected:
return
# If we get here it means that we did not stop in the time expected, raise an exception in that case.
raise Exception('Consumer for channel `{}` did not stop in the expected time of {}s.'.format(
self.name, delta))
# ################################################################################################################################
class ConnectorAMQP(Connector):
""" An AMQP connector under which channels or outgoing connections run.
"""
start_in_greenlet = True
# ################################################################################################################################
def _get_conn_class(self, suffix, is_tls):
""" Subclasses below are needed so as to be able to return per-greenlet/thread/process/definition
information in an AMQP connection's zato.* properties and, except for zato.version,
this information is not available on module level hence the classes are declared here,
in particular, we need access to self.config.name and suffix which are available only in run-time.
"""
class _PyAMQPConnection(PyAMQPConnection):
def __init__(_py_amqp_self, *args, **kwargs):
super(_PyAMQPConnection, _py_amqp_self).__init__(client_properties={
'zato.component':'{}/{}'.format(get_component_name('amqp-conn'), suffix),
'zato.version':version,
'zato.definition.name':self.config.name,
}, *args, **kwargs)
class _AMQPTransport(SSLTransport if is_tls else Transport):
Connection = _PyAMQPConnection
class _AMQPConnection(Connection):
def get_transport_cls(self):
return _AMQPTransport
return _AMQPConnection
# ################################################################################################################################
def _start(self):
self._consumers = {}
self._producers = {}
self.config.conn_url = self._get_conn_string()
self.is_connected = True
test_conn = self._get_conn_class('test-conn', _is_tls_config(self.config))(
self.config.conn_url, frame_max=self.config.frame_max)
test_conn.connect()
self.is_connected = test_conn.connected
# Close the connection object which was needed only to confirm that the remote end can be reached.
# Then in run-time, when connections are needed by producers or consumers, they will be opened by kombu anyway.
# In this manner we can at least know rightaway that something is wrong with the connection's definition
# without having to wait for a producer/consumer to be first time used. Naturally, it is possible
# that the connection will work now but then it won't when it's needed but this is unrelated to the fact
# that if we can already report that the connection won't work now, then we should do it so that an error message
# can be logged as early as possible.
test_conn.close()
# ################################################################################################################################
def _stop(self):
self._stop_consumers()
self._stop_producers()
# ################################################################################################################################
def on_amqp_message(self, body, msg, channel_name, channel_config, _AMQPMessage=_AMQPMessage, _CHANNEL_AMQP=CHANNEL.AMQP,
_RECEIVED='RECEIVED', _ZATO_ACK_MODE_ACK=AMQP.ACK_MODE.ACK.id):
""" Invoked each time a message is taken off an AMQP queue.
"""
self.on_message_callback(
channel_config['service_name'], body, channel=_CHANNEL_AMQP,
data_format=channel_config['data_format'],
zato_ctx={'zato.channel_item': { # noqa: JS101
'id': channel_config.id,
'name': channel_config.name,
'is_internal': False,
'amqp_msg': msg,
}}) # noqa: JS101
if msg._state == _RECEIVED:
if channel_config['ack_mode'] == _ZATO_ACK_MODE_ACK:
msg.ack()
else:
msg.reject()
# ################################################################################################################################
def _get_conn_string(self, needs_password=True, _amqp_prefix=('amqp://', 'amqps://')):
host = self.config.host
for name in _amqp_prefix:
if host.startswith(name):
host = host.replace(name, '')
prefix = name
break
else:
prefix = 'amqp://'
conn_string = '{}{}:{}@{}:{}/{}'.format(prefix, self.config.username,
self.config.password if needs_password else SECRET_SHADOW, host, self.config.port, self.config.vhost)
return conn_string
# ################################################################################################################################
def get_log_details(self):
return self._get_conn_string(False)
# ################################################################################################################################
def _enrich_channel_config(self, config):
config.conn_class = self._get_conn_class('channel/{}'.format(config.name), _is_tls_config(self.config))
config.conn_url = self.config.conn_url
# ################################################################################################################################
def create_channels(self):
""" Sets up AMQP consumers for all channels.
"""
for config in itervalues(self.channels):
self._enrich_channel_config(config)
for _x in xrange(config.pool_size):
spawn(self._create_consumer, config)
# ################################################################################################################################
def create_outconns(self):
""" Sets up AMQP producers for outgoing connections. Called when the connector starts up thus it only creates producers
because self.outconns entries are already available.
"""
with self.lock:
for config in itervalues(self.outconns):
self._create_producers(config)
# ################################################################################################################################
def _create_consumer(self, config):
# type: (str)
""" Creates an AMQP consumer for a specific queue and starts it.
"""
consumer = Consumer(config, self.on_amqp_message)
self._consumers.setdefault(config.name, []).append(consumer)
if config.is_active:
consumer.start()
# ################################################################################################################################
def _create_producers(self, config):
# type: (dict)
""" Creates outgoing AMQP producers using kombu.
"""
config.conn_url = self.config.conn_url
config.frame_max = self.config.frame_max
config.get_conn_class_func = self._get_conn_class
self._producers[config.name] = _AMQPProducers(config)
# ################################################################################################################################
def _stop_consumers(self):
for config in self.channels.values():
self._delete_channel(config, False)
# ################################################################################################################################
def _stop_producers(self):
for producer in itervalues(self._producers):
try:
producer.stop()
except Exception:
logger.warning('Could not stop AMQP producer `%s`, e:`%s`', producer.name, format_exc())
else:
logger.info('Stopped producer for outconn `%s` in AMQP connector `%s`', producer.name, self.config.name)
# ################################################################################################################################
def _create_channel(self, config):
# type: (dict)
""" Creates a channel. Must be called with self.lock held.
"""
self.channels[config.name] = config
self._enrich_channel_config(config)
for _x in xrange(config.pool_size):
spawn(self._create_consumer, config)
# ################################################################################################################################
def create_channel(self, config):
""" Creates a channel.
"""
with self.lock:
self._create_channel(config)
logger.info('Added channel `%s` to AMQP connector `%s`', config.name, self.config.name)
# ################################################################################################################################
def edit_channel(self, config):
# type: (dict)
""" Obtains self.lock and updates a channel
"""
with self.lock:
self._delete_channel(config)
self._create_channel(config)
old_name = ' ({})'.format(config.old_name) if config.old_name != config.name else ''
logger.info('Updated channel `%s`%s in AMQP connector `%s`', config.name, old_name, config.def_name)
# ################################################################################################################################
def _delete_channel(self, config, delete_from_channels=True):
# type: (dict)
""" Deletes a channel. Must be called with self.lock held.
"""
# Closing consumers may take time so we report the progress after about each 5% of consumers is closed,
# or, if there are ten consumers or less, after each connection is closed.
consumers = self._consumers.get(config.name)
# There will be no consumer objects if pool_size is 0.
if consumers:
total = len(consumers)
progress_after = int(round(total * 0.05)) if total > 10 else 1
noun = 'consumer' if total == 1 else 'consumers'
for idx, consumer in enumerate(consumers, 1):
consumer.stop()
if idx % progress_after == 0:
if idx != total:
logger.info(
'Stopped %s/%s %s for channel `%s` in AMQP connector `%s`', idx, total, noun, config.name,
self.config.name)
logger.info('Stopped %s/%s %s for channel `%s` in AMQP connector `%s`',
total, total, noun, config.name, self.config.name)
del self._consumers[config.name]
# Note that we do not always delete from self.channels because they may be needed in our super-class,
# in particular, in its self.edit method.
if delete_from_channels:
del self.channels[config.name]
# ################################################################################################################################
def delete_channel(self, config):
# type: (dict)
""" Obtains self.lock and deletes a channel.
"""
with self.lock:
self._delete_channel(config)
logger.info('Deleted channel `%s` from AMQP connector `%s`', config.name, self.config.name)
# ################################################################################################################################
def _create_outconn(self, config):
# type: (dict)
""" Creates an outgoing connection. Must be called with self.lock held.
"""
self.outconns[config.name] = config
self._create_producers(config)
# ################################################################################################################################
def create_outconn(self, config):
# type: (dict)
""" Creates an outgoing connection.
"""
with self.lock:
self._create_outconn(config)
logger.info('Added outconn `%s` to AMQP connector `%s`', config.name, self.config.name)
# ################################################################################################################################
def edit_outconn(self, config):
# type: (dict)
""" Obtains self.lock and updates an outgoing connection.
"""
with self.lock:
self._delete_outconn(config)
self._create_outconn(config)
old_name = ' ({})'.format(config.old_name) if config.old_name != config.name else ''
logger.info('Updated outconn `%s`%s in AMQP connector `%s`', config.name, old_name, config.def_name)
# ################################################################################################################################
def _delete_outconn(self, config):
# type: (dict)
""" Deletes an outgoing connection. Must be called with self.lock held.
"""
# It will be old_name if this is an edit and name if it a deletion.
_name = config.get('old_name') or config.name
self._producers[_name].stop()
del self._producers[_name]
del self.outconns[_name]
# ################################################################################################################################
def delete_outconn(self, config):
# type: (dict)
""" Obtains self.lock and deletes an outgoing connection.
"""
with self.lock:
self._delete_outconn(config)
logger.info('Deleted outconn `%s` from AMQP connector `%s`', config.name, self.config.name)
# ################################################################################################################################
def invoke(self, out_name, msg, exchange='/', routing_key=None, properties=None, headers=None,
_default_out_keys=_default_out_keys, **kwargs):
# type: (str, str, str, str, dict, dict, Any, Any)
""" Synchronously publishes a message to an AMQP broker.
"""
with self.lock:
outconn_config = self.outconns[out_name]
# Don't do anything if this connection is not active
if not outconn_config['is_active']:
raise Inactive('Connection is inactive `{}` ({})'.format(out_name, self._get_conn_string(False)))
acquire_block = kwargs.pop('acquire_block', True)
acquire_timeout = kwargs.pop('acquire_block', None)
# Dictionary of kwargs is built based on user input falling back to the defaults
# as specified in the outgoing connection's configuration.
properties = properties or {}
kwargs = {'exchange':exchange, 'routing_key':routing_key, 'mandatory':kwargs.get('mandatory')}
for key in _default_out_keys:
# The last 'or None' is needed because outconn_config[key] may return '' which is considered
# to be a valid value by kombu/pyamqp but not by AMQP brokers. For instance with user_id=''
# RabbitMQ will complain that this value is not the same as the one used to open the connection,
# however, it will accept the message with user_id=None, thus it is added at the end.
kwargs[key] = properties.pop(key, None) or outconn_config[key] or None
# Merge in anything that is still left in user-defined properties.
if properties:
kwargs.update(properties)
with self._producers[out_name].acquire(acquire_block, acquire_timeout) as producer:
return producer.publish(msg, headers=headers, **kwargs)
# ################################################################################################################################
| 27,474
|
Python
|
.py
| 478
| 46.987448
| 130
| 0.50324
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,084
|
facade.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/facade.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from datetime import datetime, timedelta, timezone
# Arrow
from arrow import Arrow
# datetutil
from dateutil.parser import parse as dt_parse
from dateutil.tz.tz import tzutc
# Zato
from zato.common.api import SCHEDULER
from zato.common.json_internal import dumps
################################################################################################################################
################################################################################################################################
if 0:
from requests import Response
from zato.common.typing_ import any_, callnone
from zato.server.base.parallel import ParallelServer
from zato.server.config import ConfigDict
from zato.server.connection.http_soap.outgoing import HTTPSOAPWrapper
from zato.server.service import Service
################################################################################################################################
################################################################################################################################
_utz_utc = timezone.utc
################################################################################################################################
################################################################################################################################
class SchedulerFacade:
""" The API through which jobs can be scheduled.
"""
def __init__(self, server:'ParallelServer') -> 'None':
self.server = server
################################################################################################################################
def onetime(
self,
invoking_service, # type: Service
target_service, # type: any_
name='', # type: str
*,
prefix='', # type: str
start_date='', # type: any_
after_seconds=0, # type: int
after_minutes=0, # type: int
data='' # type: any_
) -> 'int':
""" Schedules a service to run at a specific date and time or aftern N minutes or seconds.
"""
# This is reusable
now = self.server.time_util.utcnow(needs_format=False)
# We are given a start date on input ..
if start_date:
if not isinstance(start_date, datetime):
# This gives us a datetime object but we need to ensure
# that it is in UTC because this what the scheduler expects.
start_date = dt_parse(start_date)
if not isinstance(start_date.tzinfo, tzutc):
_as_arrow = Arrow.fromdatetime(start_date)
start_date = _as_arrow.to(_utz_utc)
# .. or we need to compute one ourselves.
else:
start_date = now + timedelta(seconds=after_seconds, minutes=after_minutes)
# This is the service that is scheduling a job ..
invoking_name = invoking_service.get_name()
# .. and this is the service that is being scheduled.
target_name = target_service if isinstance(target_service, str) else target_service.get_name()
# Construct a name for the job
name = name or '{}{} -> {} {} {}'.format(
'{} '.format(prefix) if prefix else '',
invoking_name,
target_name,
now.isoformat(),
invoking_service.cid,
)
# This is what the service being invoked will receive on input
if data:
data = dumps({
SCHEDULER.EmbeddedIndicator: True,
'data': data
})
# Now, we are ready to create a new job ..
response = self.server.invoke(
'zato.scheduler.job.create', {
'cluster_id': self.server.cluster_id,
'name': name,
'is_active': True,
'job_type': SCHEDULER.JOB_TYPE.ONE_TIME,
'service': target_name,
'start_date': start_date,
'extra': data
}
)
# .. check if we shouldn't go further to extract the actual response ..
if not 'id' in response:
response = response['zato_scheduler_job_create_response']
# .. and return its ID to the caller.
return response['id'] # type: ignore
# ################################################################################################################################
# ################################################################################################################################
class RESTFacade:
""" A facade through which self.rest calls can be made.
"""
cid: 'str'
_out_plain_http: 'ConfigDict'
name_prefix: 'str' = ''
needs_facade: 'bool' = True
has_path_in_args: 'bool' = False
before_call_func: 'callnone' = None
after_call_func: 'callnone' = None
def init(self, cid:'str', _out_plain_http:'ConfigDict') -> 'None':
self.cid = cid
self._out_plain_http = _out_plain_http
# ################################################################################################################################
def _get(self, orig_name:'str', needs_prefix:'bool'=True) -> 'RESTInvoker':
# Check if name may point to an environment variable ..
if orig_name.startswith('$'):
env_name = orig_name.replace('$', '', 1)
name = os.environ[env_name]
# .. otherwise, use it as is.
else:
name = orig_name
# Use a potential prefix
if needs_prefix:
name = self.name_prefix + name
# This will raise a KeyError if we have no such name ..
item = self._out_plain_http[name]
# .. now, we can return our own facade.
invoker = RESTInvoker(item.conn, self)
return invoker
# ################################################################################################################################
def __getitem__(self, name:'str') -> 'RESTInvoker':
result = self._get(name)
return result
# ################################################################################################################################
def __getattr__(self, attr_name:'str') -> 'RESTInvoker':
# Use a potential prefix
attr_name = self.name_prefix + attr_name
try:
# First, try and see if we do not have a connection of that exact name ..
conn = self._get(attr_name, needs_prefix=False)
except KeyError:
# .. this is fine, there was no such connection
pass
else:
# .. if there was, we can return it here ..
return conn
# .. otherwise, go through of the connections and check their filesystem-safe names ..
for config in self._out_plain_http.get_config_list():
if config['name_fs_safe'] == attr_name:
name = config['name']
break
else:
raise KeyError(f'No such connection `{attr_name}`')
# If we are here, it means that we must have found the correct name
return self._get(name, needs_prefix=False)
# ################################################################################################################################
# ################################################################################################################################
class RESTInvoker:
conn: 'HTTPSOAPWrapper'
container: 'RESTFacade'
def __init__(self, conn:'HTTPSOAPWrapper', container:'RESTFacade') -> 'None':
self.conn = conn
self.container = container
# ################################################################################################################################
def call_rest_func(self, func_name:'str', conn_name:'str', *args:'any_', **kwargs:'str') -> 'any_':
# .. the actual method to invoke ..
func = getattr(self.conn, func_name)
# .. if we have a function to call before the actual method should be invoked, do it now ..
if self.container.before_call_func:
self.container.before_call_func(func_name, conn_name, self.conn, *args, **kwargs)
# .. do invoke the actual function ..
result = func(self.container.cid, *args, **kwargs)
# .. if we have a function to call after the actual method was invoked, do it now ..
if self.container.after_call_func:
self.container.after_call_func(func_name, conn_name, self.conn, result, *args, **kwargs)
# .. and return the result to our caller.
return result
# ################################################################################################################################
def call_wrapper(self, *args:'any_', **kwargs:'any_') -> 'any_':
# This will be always the same
conn_name = self.conn.config['name']
func_name = args[0]
args = args[1:]
# If this is a pre-facade REST call, we do not need the CID in here
if args:
if args[0] == self.container.cid:
args = args[1:]
# Depending on what kind of an invoker this is, build the path that we actually want to access.
if self.container.has_path_in_args:
if args:
_zato_path = args[0]
args = args[1:]
else:
_zato_path = '/zato-no-path-given'
# We know we will be always able to populate this key with some value
kwargs_params = kwargs.setdefault('params', {})
kwargs_params['_zato_path'] = _zato_path
return self.call_rest_func(func_name, conn_name, *args, **kwargs)
# ################################################################################################################################
def get(self, *args:'any_', **kwargs:'str') -> 'any_':
return self.call_wrapper('get', *args, **kwargs)
def delete(self, *args:'any_', **kwargs:'str') -> 'any_':
return self.call_wrapper('delete', *args, **kwargs)
def options(self, *args:'any_', **kwargs:'str') -> 'any_':
return self.call_wrapper('options', *args, **kwargs)
def post(self, *args:'any_', **kwargs:'str') -> 'any_':
return self.call_wrapper('post', *args, **kwargs)
send = post
def put(self, *args:'any_', **kwargs:'str') -> 'any_':
return self.call_wrapper('put', *args, **kwargs)
def patch(self, *args:'any_', **kwargs:'str') -> 'any_':
return self.call_wrapper('patch', *args, **kwargs)
def ping(self, *args:'any_', **kwargs:'str') -> 'any_':
return self.call_wrapper('ping', *args, **kwargs)
def upload(self, *args:'any_', **kwargs:'str') -> 'any_':
return self.call_wrapper('upload', *args, **kwargs)
# ################################################################################################################################
# ################################################################################################################################
class KeysightVisionFacade(RESTFacade):
name_prefix = 'KeysightVision.'
has_path_in_args = True
# ################################################################################################################################
# ################################################################################################################################
class KeysightHawkeyeFacade(RESTFacade):
name_prefix = 'KeysightHawkeye.'
has_path_in_args = True
# ################################################################################################################################
def before_call_func(
self,
func_name, # type: str
conn_name, # type: str
conn, # type: HTTPSOAPWrapper
*args, # type: any_
**kwargs, # type: str
) -> 'any_':
pass
# ################################################################################################################################
def after_call_func(
self,
func_name, # type: str
conn_name, # type: str
conn, # type: HTTPSOAPWrapper
result, # type: Response
*args, # type: any_
**kwargs, # type: str
) -> 'any_':
pass
# ################################################################################################################################
# ################################################################################################################################
class KeysightContainer:
vision: 'KeysightVisionFacade'
hawkeye: 'KeysightHawkeyeFacade'
def init(self, cid:'str', _out_plain_http:'ConfigDict') -> 'None':
self.vision = KeysightVisionFacade()
self.vision.init(cid, _out_plain_http)
self.hawkeye = KeysightHawkeyeFacade()
self.hawkeye.init(cid, _out_plain_http)
# ################################################################################################################################
# ################################################################################################################################
| 13,532
|
Python
|
.py
| 261
| 43.64751
| 130
| 0.432646
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,085
|
email.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/email.py
|
# -# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from base64 import b64decode
from contextlib import contextmanager
from io import BytesIO
from logging import getLogger, INFO
from mimetypes import guess_type as guess_mime_type
from traceback import format_exc
# imbox
from zato.common.ext.imbox import Imbox as _Imbox
from zato.common.ext.imbox.imap import ImapTransport as _ImapTransport
from zato.common.ext.imbox.parser import parse_email, Struct
# Outbox
from zato.server.ext.outbox import AnonymousOutbox, Attachment, Email, Outbox
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import basestring, unicode
# Zato
from zato.common.api import IMAPMessage, EMAIL
from zato.server.connection.cloud.microsoft_365 import Microsoft365Client
from zato.server.store import BaseAPI, BaseStore
# ################################################################################################################################
# ################################################################################################################################
if 0:
from O365.mailbox import MailBox
from O365.message import Message as MS365Message
from zato.common.typing_ import any_, anylist
MailBox = MailBox
MS365Message = MS365Message
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
_modes = {
EMAIL.SMTP.MODE.PLAIN: None,
EMAIL.SMTP.MODE.SSL: 'SSL',
EMAIL.SMTP.MODE.STARTTLS: 'TLS'
}
# ################################################################################################################################
# ################################################################################################################################
class GenericIMAPMessage(IMAPMessage):
def delete(self):
self.conn.delete(self.uid)
def mark_seen(self):
self.conn.mark_seen(self.uid)
# ################################################################################################################################
# ################################################################################################################################
class Microsoft365IMAPMessage(IMAPMessage):
impl: 'MS365Message'
def delete(self):
_ = self.impl.delete()
def mark_seen(self):
_ = self.impl.mark_as_read()
# ################################################################################################################################
# ################################################################################################################################
class Imbox(_Imbox):
def __init__(self, config, config_no_sensitive):
self.config = config
self.config_no_sensitive = config_no_sensitive
self.server = ImapTransport(self.config.host, self.config.port, self.config.mode==EMAIL.IMAP.MODE.SSL)
self.connection = self.server.connect(self.config.username, self.config.password or '', self.config.debug_level)
def __repr__(self):
return '<{} at {}, config:`{}`>'.format(self.__class__.__name__, hex(id(self)), self.config_no_sensitive)
def fetch_by_uid(self, uid):
message, data = self.connection.uid('fetch', uid, '(BODY.PEEK[])')
raw_email = data[0][1]
if not isinstance(raw_email, unicode):
raw_email = raw_email.decode('utf8')
email_object = parse_email(raw_email)
return email_object
def search(self, criteria):
message, data = self.connection.uid('search', None, criteria)
return data[0].split()
def fetch_list(self, criteria):
uid_list = self.search(criteria)
for uid in uid_list:
yield (uid, self.fetch_by_uid(uid))
def close(self):
self.connection.close()
# ################################################################################################################################
# ################################################################################################################################
class ImapTransport(_ImapTransport):
def connect(self, username, password, debug_level):
self.server.debug = debug_level
self.server.login(username, password)
self.server.select()
return self.server
# ################################################################################################################################
# ################################################################################################################################
class EMailAPI:
def __init__(self, smtp, imap):
self.smtp = smtp
self.imap = imap
# ################################################################################################################################
# ################################################################################################################################
class _Connection:
def __repr__(self):
return '<{} at {}, config:`{}`>'.format(self.__class__.__name__, hex(id(self)), self.config_no_sensitive)
# ################################################################################################################################
# ################################################################################################################################
class SMTPConnection(_Connection):
def __init__(self, config, config_no_sensitive):
self.config = config
self.config_no_sensitive = config_no_sensitive
self.conn_args = [
self.config.host.encode('utf-8'),
int(self.config.port),
self.config.mode_outbox,
self.config.is_debug,
self.config.timeout
]
if config.username or config.password:
password = (self.config.password or '')
username = (self.config.username or '')
self.conn_class = Outbox
self.conn_args.insert(0, password)
self.conn_args.insert(0, username)
else:
self.conn_class = AnonymousOutbox
# ################################################################################################################################
def send(self, msg, from_=None) -> 'bool':
headers = msg.headers or {}
atts = []
if msg.attachments:
for item in msg.attachments:
contents = item['contents']
contents = contents.encode('utf8') if isinstance(contents, unicode) else contents
att = Attachment(item['name'], BytesIO(contents))
atts.append(att)
if 'From' not in msg.headers:
headers['From'] = msg.from_
if msg.cc and 'CC' not in headers:
headers['CC'] = ', '.join(msg.cc) if not isinstance(msg.cc, basestring) else msg.cc
if msg.bcc and 'BCC' not in headers:
headers['BCC'] = ', '.join(msg.bcc) if not isinstance(msg.bcc, basestring) else msg.bcc
body, html_body = (None, msg.body) if msg.is_html else (msg.body, None)
email = Email(msg.to, msg.subject, body, html_body, msg.charset, headers, msg.is_rfc2231)
try:
with self.conn_class(*self.conn_args) as conn:
conn.send(email, atts, from_ or msg.from_)
except Exception:
# Log what happened ..
logger.warning('Could not send an SMTP message to `%s`, e:`%s`', self.config_no_sensitive, format_exc())
# .. and tell the caller that the message was not sent.
return False
else:
# Optionally, log what happened ..
if logger.isEnabledFor(INFO):
atts_info = ', '.join(att.name for att in atts) if atts else None
logger.info('SMTP message `%r` sent from `%r` to `%r`, attachments:`%r`',
msg.subject, msg.from_, msg.to, atts_info)
# .. and tell the caller that the message was sent successfully.
return True
# ################################################################################################################################
# ################################################################################################################################
class SMTPAPI(BaseAPI):
""" API to obtain SMTP connections through.
"""
# ################################################################################################################################
# ################################################################################################################################
class SMTPConnStore(BaseStore):
""" Stores connections to SMTP.
"""
def create_impl(self, config, config_no_sensitive):
config.mode_outbox = _modes[config.mode]
return SMTPConnection(config, config_no_sensitive)
# ################################################################################################################################
# ################################################################################################################################
class _IMAPConnection(_Connection):
def __init__(self, config, config_no_sensitive):
self.config = config
self.config_no_sensitive = config_no_sensitive
def get(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by subclasses')
def ping(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by subclasses')
def delete(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by subclasses')
def mark_seen(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by subclasses')
# ################################################################################################################################
# ################################################################################################################################
class GenericIMAPConnection(_IMAPConnection):
@contextmanager
def get_connection(self):
conn = Imbox(self.config, self.config_no_sensitive)
yield conn
conn.close()
conn.server.server.sock.close()
def get(self, folder='INBOX'):
with self.get_connection() as conn: # type: Imbox
conn.connection.select(folder)
for uid, msg in conn.fetch_list(' '.join(self.config.get_criteria.splitlines())):
yield (uid, GenericIMAPMessage(uid, conn, msg))
def ping(self):
with self.get_connection() as conn: # type: Imbox
conn.connection.noop()
def delete(self, *uids):
with self.get_connection() as conn: # type: Imbox
for uid in uids:
mov, data = self.connection.uid('STORE', uid, '+FLAGS', '(\\Deleted)')
conn.connection.expunge()
def mark_seen(self, *uids):
with self.get_connection() as conn: # type: Imbox
for uid in uids:
conn.connection.uid('STORE', uid, '+FLAGS', '\\Seen')
# ################################################################################################################################
# ################################################################################################################################
class Microsoft365IMAPConnection(_IMAPConnection):
def _extract_list_of_addresses(self, native_elem:'any_') -> 'anylist':
out = []
elems = ((elem.name, elem.address) for elem in list(native_elem))
# .. try to extract the recipients of the message ..
for display_name, email in elems:
out.append({
'name': display_name,
'email': email,
})
return out
# ################################################################################################################################
def _extract_attachments(self, native_message:'MS365Message') -> 'anylist':
# Our response to produce
out = []
attachments = list(native_message.attachments)
for elem in attachments:
mime_type, _ = guess_mime_type(elem.name)
if not mime_type:
mime_type = 'text/plain'
content = elem.content
if content:
content = b64decode(content)
else:
content = b''
size = len(content)
content = BytesIO(content)
out.append({
'filename': elem.name,
'size': size,
'content': content,
'content-type': mime_type
})
return out
# ################################################################################################################################
def _convert_to_imap_message(self, msg_id:'str', native_message:'MS365Message') -> 'IMAPMessage':
# A dict object to base the resulting message's struct on ..
data_dict = {}
# .. the message's body (always HTML)..
body = {}
body['plain'] = []
body['html'] = [native_message.body]
data_dict['body'] = body
# .. who sent the message ..
sent_from = {
'name': native_message.sender.name,
'email': native_message.sender.address
}
sent_to = self._extract_list_of_addresses(native_message.to)
sent_cc = self._extract_list_of_addresses(native_message.cc)
# .. populate the correspondents fields ..
data_dict['sent_from'] = [sent_from]
data_dict['sent_to'] = sent_to
data_dict['cc'] = sent_cc
# .. populate attachments ..
attachments = self._extract_attachments(native_message)
data_dict['attachments'] = attachments
# .. build the remaining fields ..
data_dict['message_id'] = msg_id
data_dict['subject'] = native_message.subject
# .. build the messages internal Struct object (as in generic IMAP connections) ..
data = Struct(**data_dict)
# .. now, construct the response ..
out = Microsoft365IMAPMessage(msg_id, self, data)
out.impl = native_message
# .. and return it to our caller.
return out
# ################################################################################################################################
def _get_mailbox(self) -> 'MailBox':
# Obtain a new connection ..
client = Microsoft365Client(self.config)
# .. get a handle to the user's underlying mailbox ..
mailbox = client.impl.mailbox(resource=self.config['username'])
# .. and return it to the caller.
return mailbox
# ################################################################################################################################
def get(self, folder='INBOX', filter=None):
filter = filter or self.config['filter_criteria']
# By default, we have nothing to return.
default = []
# Obtain a handle to a mailbox ..
mailbox = self._get_mailbox()
# .. try to look up a folder by its name ..
folder = mailbox.get_folder(folder_name=folder)
# .. if found, we can return all of its messages ..
if folder:
messages = folder.get_messages(limit=10_000, query=filter, download_attachments=True)
for item in messages:
msg_id = item.internet_message_id
imap_message = self._convert_to_imap_message(msg_id, item)
yield msg_id, imap_message
else:
for item in default:
yield item
# ################################################################################################################################
def ping(self):
mailbox = self._get_mailbox()
result = mailbox.get_folders()
return result
# ################################################################################################################################
# ################################################################################################################################
class IMAPConnStore(BaseStore):
""" Stores connections to IMAP.
"""
_impl_class = {
EMAIL.IMAP.ServerType.Generic: GenericIMAPConnection,
EMAIL.IMAP.ServerType.Microsoft365: Microsoft365IMAPConnection,
}
def create_impl(self, config, config_no_sensitive):
server_type = config.server_type or EMAIL.IMAP.ServerType.Generic
class_ = self._impl_class[server_type]
instance = class_(config, config_no_sensitive)
return instance
# ################################################################################################################################
# ################################################################################################################################
class IMAPAPI(BaseAPI):
""" API to obtain SMTP connections through.
"""
# ################################################################################################################################
# ################################################################################################################################
| 17,949
|
Python
|
.py
| 332
| 46.156627
| 130
| 0.427991
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,086
|
stats.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/stats.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
# gevent
from gevent.lock import RLock
# Zato
from zato.common.events.client import Client as EventsClient
from zato.common.events.common import EventInfo, PushCtx
from zato.common.util.api import new_cid
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
event_type_req = EventInfo.EventType.service_request
event_type_resp = EventInfo.EventType.service_response
object_type_service = EventInfo.CommonObject.service
# ################################################################################################################################
# ################################################################################################################################
class ServiceStatsClient:
def __init__(self, impl_class=None):
# type: (object) -> None
self.host = '<ServiceStatsClient-host>'
self.port = -1
self.impl = None # type: EventsClient
self.impl_class = impl_class or EventsClient
self.backlog = []
self.lock = RLock()
# ################################################################################################################################
def init(self, host, port):
# type: (str, int) -> None
self.host = host
self.port = port
with self.lock:
self.impl = self.impl_class(self.host, self.port)
self.impl.connect()
# ################################################################################################################################
def run(self):
self.impl.run()
# ################################################################################################################################
def _push_backlog(self):
""" Pushes an event to the backend, assuming that we have access to the backend already.
"""
# type: (str, str, str, int, str) -> None
# Make sure we are connected to the backend ..
if self.impl:
# .. ensure no updates to the backlog while we run ..
with self.lock:
# .. get all enqueued events ..
for item in self.backlog[:]: # type: PushCtx
# .. push each to the backend ..
self.impl.push(item)
# .. and remove it from the queue ..
self.backlog.remove(item)
# ################################################################################################################################
def push(self, cid, timestamp, service_name, is_request, total_time_ms=0, id=None):
""" Accepts information about the service, enqueues it as a push context and tries to empty the backlog.
The reason we first need the backlog is that we may not be connected to the backend yet
when this method executes. That is we need a staging area, a backlog, first.
"""
# type: (str, str, str, int, str) -> None
# Fill out the details of a context object ..
ctx = PushCtx()
ctx.id = id or new_cid()
ctx.cid = cid
ctx.timestamp = timestamp
ctx.event_type = event_type_req if is_request else event_type_resp
ctx.object_type = object_type_service
ctx.object_id = service_name
ctx.total_time_ms = total_time_ms
# .. push the event to the backlog queue, using a lock to ensure the backlog is not modified in between ..
with self.lock:
self.backlog.append(ctx)
# .. and try to send it to the backend now.
self._push_backlog()
# ################################################################################################################################
def get_table(self):
with self.lock:
return self.impl.get_table()
# ################################################################################################################################
def sync_state(self):
with self.lock:
self.impl.sync_state()
# ################################################################################################################################
# ################################################################################################################################
| 4,973
|
Python
|
.py
| 89
| 48.651685
| 130
| 0.387423
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,087
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/sms/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# ################################################################################################################################
if 0:
from zato.server.connection.sms.twilio import TwilioAPI
TwilioAPI = TwilioAPI
# ################################################################################################################################
class SMSAPI:
def __init__(self, twilio):
# type: (TwilioAPI)
self.twilio = twilio
# ################################################################################################################################
| 832
|
Python
|
.py
| 16
| 48.6875
| 130
| 0.363073
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,088
|
twilio.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/sms/twilio.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
# Twilio
from twilio.rest import Client as _TwilioClient
# Zato
from zato.server.store import BaseAPI, BaseStore
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
class TwilioAPI(BaseAPI):
""" API to obtain Twilio connections through.
"""
# ################################################################################################################################
def send(twilio_messages, config):
""" Sends a Twilio message 'body' to phone number 'to' from the phone number 'from'. Attached dynamically
to all Twilio connections so as to make it for Zato users to specify default values in configuration.
"""
def _send(_zato_body, _zato_to=None, _zato_from=None, **kwargs):
""" Actually sends the messae.
"""
return twilio_messages.create(**dict({
'body': _zato_body,
'to': _zato_to or config.default_to or kwargs.pop('to', None),
'from_': _zato_from or config.default_from or kwargs.pop('from_', None),
}, **kwargs))
return _send
# ################################################################################################################################
class TwilioConnStore(BaseStore):
""" Stores connections to Twilio.
"""
def create_impl(self, config, config_no_sensitive):
twilio_messages = _TwilioClient(config.account_sid, config.auth_token).messages
twilio_messages.send = send(twilio_messages, config)
return twilio_messages
# ################################################################################################################################
| 2,145
|
Python
|
.py
| 41
| 47.682927
| 130
| 0.474365
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,089
|
microsoft_365.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/cloud/microsoft_365.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import stranydict
from O365 import Account as Office365Account
Office365Account = Office365Account
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class Microsoft365Client:
def __init__(self, config:'stranydict') -> 'None':
self.config = config
self.impl = self.impl_from_config(config)
self.ping()
# ################################################################################################################################
def impl_from_config(self, config:'stranydict') -> 'Office365Account':
# stdlib
from json import loads
# Office-365
from O365 import Account
opaque1 = config.pop('opaque1', None) or '{}'
opaque1 = loads(opaque1)
config.update(opaque1)
scopes = config.get('scopes') or []
tenant_id = config['tenant_id']
client_id = config['client_id']
secret_value = config.get('secret_value') or config.get('secret') or config['password']
credentials = (client_id, secret_value)
account = Account(credentials, auth_flow_type='credentials', tenant_id=tenant_id)
_ = account.authenticate(scopes=scopes)
return account
# ################################################################################################################################
def api(self) -> 'Office365Account':
out = self.impl_from_config(self.config)
return out
# ################################################################################################################################
def ping(self):
result = self.impl.get_current_user()
logger.info('Microsoft 365 ping result (%s) -> `%s`', self.config['name'], result)
# ################################################################################################################################
# ################################################################################################################################
| 2,998
|
Python
|
.py
| 50
| 54.38
| 130
| 0.333561
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,090
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/cloud/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
| 238
|
Python
|
.py
| 6
| 38.166667
| 82
| 0.729258
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,091
|
s3.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/cloud/aws/s3.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
# Boto
from boto.s3.bucket import Bucket
from boto.s3.connection import NoHostProvided, S3Connection
from boto.s3.key import Key
# Zato
from zato.common.api import ZATO_NONE
from zato.common.util.api import parse_extra_into_dict
from zato.server.connection.queue import Wrapper
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class _S3Connection:
def __init__(self, **kwargs):
self.zato_default_bucket = kwargs.pop('bucket')
self.zato_content_type = kwargs.pop('content_type')
self.zato_metadata = kwargs.pop('metadata')
encrypt_at_rest = kwargs.pop('encrypt_at_rest')
self.zato_encrypt_at_rest = 'AES256' if encrypt_at_rest else None
self.zato_storage_class = kwargs.pop('storage_class')
self.impl = S3Connection(**kwargs)
def check_connection(self):
self.impl.get_canonical_user_id()
def set(self, key, value, bucket=ZATO_NONE, content_type=ZATO_NONE, metadata=ZATO_NONE,
storage_class=ZATO_NONE, encrypt_at_rest=ZATO_NONE):
_bucket = Bucket(self.impl, bucket if bucket != ZATO_NONE else self.zato_default_bucket)
_key = Key(_bucket)
_key.content_type = content_type if content_type != ZATO_NONE else self.zato_content_type
_key.metadata.update(metadata if metadata != ZATO_NONE else parse_extra_into_dict(self.zato_metadata, False))
_key.name = key
_key.storage_class = storage_class if storage_class != ZATO_NONE else self.zato_storage_class
_key.set_contents_from_string(
value, encrypt_key=(encrypt_at_rest if encrypt_at_rest != ZATO_NONE else self.zato_encrypt_at_rest))
# ################################################################################################################################
# ################################################################################################################################
class S3Wrapper(Wrapper):
""" Wraps a queue of connections to AWS S3.
"""
def __init__(self, config, server):
config.auth_url = config.address
super(S3Wrapper, self).__init__(config, 'AWS S3', server)
def add_client(self):
conn = _S3Connection(aws_access_key_id=self.config.username, aws_secret_access_key=self.config.password,
debug=self.config.debug_level,
suppress_consec_slashes=self.config.suppr_cons_slashes, content_type=self.config.content_type,
metadata=self.config.metadata_ or {}, bucket=self.config.bucket, encrypt_at_rest=self.config.encrypt_at_rest,
storage_class=self.config.storage_class, host=self.server.fs_server_config.misc.aws_host or NoHostProvided)
# Sanity check - no exception here means the config is correct.
conn.check_connection()
self.client.put_client(conn)
# ################################################################################################################################
# ################################################################################################################################
| 3,769
|
Python
|
.py
| 60
| 57.016667
| 130
| 0.502304
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,092
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/cloud/aws/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
| 238
|
Python
|
.py
| 6
| 38.166667
| 82
| 0.729258
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,093
|
msg.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/web_socket/msg.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from datetime import datetime
from http.client import BAD_REQUEST, FORBIDDEN, NOT_FOUND, OK
from json import dumps
from logging import getLogger
from traceback import format_exc
# Bunch
from bunch import Bunch
# Zato
from zato.common.api import DATA_FORMAT, WEB_SOCKET
from zato.common.util.api import make_repr, new_cid
from zato.cy.reqresp.payload import SimpleIOPayload
# Past builtins
from zato.common.py23_.past.builtins import basestring
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
# This is an from our WSX gateway that the actual response, if any, is wrapped in that element
wsx_gateway_response_elem = WEB_SOCKET.GatewayResponseElem
# ################################################################################################################################
# ################################################################################################################################
copy_bad_request = 'Bad request'
copy_forbidden = 'You are not authorized to access this resource'
copy_not_found = 'Not found'
error_response = {
BAD_REQUEST: {
DATA_FORMAT.JSON: dumps({'error': copy_bad_request}).encode('latin1'),
},
FORBIDDEN: {
DATA_FORMAT.JSON: dumps({'error': copy_forbidden}).encode('latin1'),
},
NOT_FOUND: {
DATA_FORMAT.JSON: dumps({'error': copy_not_found}).encode('latin1'),
},
}
# ################################################################################################################################
class MSG_TYPE:
_COMMON = 'zwsx.{}'
REQ_TO_CLIENT = _COMMON.format('rqc')
RESP_AUTH = _COMMON.format('rspa')
RESP_OK = _COMMON.format('rspok')
# A message from server indicating an error, no response from client is expected
MSG_ERR = _COMMON.format('merr')
# As above but in response to a previous request from client
RESP_ERROR = _COMMON.format('rsperr')
# A publish/subscribe message from server to client
PUBSUB_REQ = _COMMON.format('psrq')
# A client response to a previous pub/sub request
PUBSUB_RESP = _COMMON.format('psrsp')
# ################################################################################################################################
class ClientMessage:
""" An individual message received from a WebSocket client.
"""
def __init__(self):
self.action = None
self.service = None
self.sec_type = None
self.username = None
self.password = None
self.id = None
self.timestamp = None
self.cid = new_cid()
self.in_reply_to = None
self.data = Bunch()
self.has_credentials = None
self.token = None
self.ext_client_name = ''
self.ext_client_id = None
self.reply_to_sk = None
self.deliver_to_sk = None
self.client_attrs = {}
self.is_auth = False
self.secret = ''
def __repr__(self):
return make_repr(self)
def get_meta(self, attrs=('action', 'service', 'id', 'timestamp', 'cid', 'in_reply_to', 'ext_client_id', 'ext_client_name')):
out = {}
for name in attrs:
out[name] = getattr(self, name)
return out
# ################################################################################################################################
class ServerMessage:
""" A message sent from a WebSocket server to a client.
"""
is_response = True
def __init__(self, msg_type, cid, in_reply_to=None, status=OK, error_message='', ctx=None, _now=datetime.utcnow):
self.id = cid
self.in_reply_to = in_reply_to
self.data = Bunch()
self.meta = Bunch(id=self.id, timestamp=_now().isoformat(), msg_type=msg_type)
if ctx:
self.meta.ctx = ctx
if self.is_response:
self.meta.status = status
if in_reply_to:
self.meta.in_reply_to = in_reply_to
if error_message:
self.meta.error_message = error_message
def serialize(self, _dumps_func):
""" Serialize server message to client. Note that we make it as small as possible because control messages
in WebSockets (opcode >= 0x07) must have at most 125 bytes.
"""
msg = {'meta': self.meta}
try:
if self.data:
if isinstance(self.data, SimpleIOPayload):
data = self.data.getvalue(serialize=False)
keys = list(data.keys())
if len(keys) != 1:
raise ValueError('Unexpected data `{}`'.format(data))
else:
response_key = keys[0]
data = data[response_key]
# If this is a response from a WSX gateway (helpers.web-sockets-gateway),
# we need to extract its actual payload.
if isinstance(data, dict):
if wsx_gateway_response_elem in data:
data = data[wsx_gateway_response_elem]
else:
data = self.data
if isinstance(data, basestring):
data = data if isinstance(data, str) else data.decode('utf8')
msg['data'] = data
return _dumps_func(msg)
except Exception:
logger.warning('Exception while serializing message `%r`, e:`%s`', msg, format_exc())
raise
# ################################################################################################################################
class AuthenticateResponse(ServerMessage):
def __init__(self, token, cid, *args, **kwargs):
super(AuthenticateResponse, self).__init__(MSG_TYPE.RESP_AUTH, cid, *args, **kwargs)
self.data.token = token
# ################################################################################################################################
class OKResponse(ServerMessage):
def __init__(self, cid, in_reply_to, data, *ignored_args, **ignored_kwargs):
super(OKResponse, self).__init__(MSG_TYPE.RESP_OK, cid, in_reply_to)
self.data = data
# ################################################################################################################################
class ErrorResponse(ServerMessage):
def __init__(self, cid, in_reply_to, status, error_message):
super(ErrorResponse, self).__init__(MSG_TYPE.RESP_ERROR, cid, in_reply_to, status, error_message)
self.data = {'cid': cid}
# ################################################################################################################################
class InvokeClientRequest(ServerMessage):
is_response = False
def __init__(self, cid, data, ctx, _msg_type=MSG_TYPE.REQ_TO_CLIENT):
super(InvokeClientRequest, self).__init__(_msg_type, cid, ctx=ctx)
self.data = data
class InvokeClientPubSubRequest(InvokeClientRequest):
def __init__(self, cid, data, ctx, _msg_type=MSG_TYPE.PUBSUB_REQ):
super(InvokeClientPubSubRequest, self).__init__(cid, data, ctx, _msg_type)
# ################################################################################################################################
class Forbidden(ServerMessage):
is_response = True
def __init__(self, cid, data):
super(Forbidden, self).__init__(MSG_TYPE.MSG_ERR, cid, status=FORBIDDEN)
self.data = data
# ################################################################################################################################
| 8,280
|
Python
|
.py
| 165
| 42.466667
| 130
| 0.470048
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,094
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/web_socket/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# ################################################################################################################################
# ################################################################################################################################
class _UTF8Validator:
""" A pass-through UTF-8 validator for ws4py - we do not need for this layer
to validate UTF-8 bytes because we do it anyway during JSON parsing.
"""
def validate(*ignored_args:'any_', **ignored_kwargs:'any_') -> 'any_':
return True, True, None, None
def reset(*ignored_args:'any_', **ignored_kwargs:'any_') -> 'any_':
pass
from zato.server.ext.ws4py import streaming
streaming.Utf8Validator = _UTF8Validator
# ################################################################################################################################
# ################################################################################################################################
# stdlib
from datetime import datetime, timedelta
from http.client import BAD_REQUEST, FORBIDDEN, INTERNAL_SERVER_ERROR, NOT_FOUND, responses, UNPROCESSABLE_ENTITY
from json import loads as stdlib_loads
from logging import DEBUG, getLogger
from traceback import format_exc
from urllib.parse import urlparse
# Bunch
from bunch import Bunch, bunchify
# gevent
from gevent import sleep, socket, spawn
from gevent.lock import RLock
from gevent.pywsgi import WSGIServer as _Gevent_WSGIServer
# ws4py
from zato.server.ext.ws4py.exc import HandshakeError
from zato.server.ext.ws4py.websocket import WebSocket as _WebSocket
from zato.server.ext.ws4py.server.geventserver import GEventWebSocketPool, WebSocketWSGIHandler
from zato.server.ext.ws4py.server.wsgiutils import WebSocketWSGIApplication
# Zato
from zato.common.api import CHANNEL, DATA_FORMAT, PUBSUB, SEC_DEF_TYPE, WEB_SOCKET
from zato.common.audit_log import DataReceived, DataSent
from zato.common.exception import ParsingException, Reportable, RuntimeInvocationError
from zato.common.pubsub import HandleNewMessageCtx, MSG_PREFIX, PubSubMessage
from zato.common.typing_ import cast_
from zato.common.util.api import new_cid, parse_extra_into_dict
from zato.common.util.hook import HookTool
from zato.common.util.json_ import JSONParser
from zato.common.util.python_ import get_python_id
from zato.common.util.wsx import cleanup_wsx_client, ContextHandler
from zato.common.vault_ import VAULT
from zato.server.connection.connector import Connector
from zato.server.connection.web_socket.msg import AuthenticateResponse, InvokeClientRequest, ClientMessage, copy_forbidden, \
error_response, ErrorResponse, Forbidden, OKResponse, InvokeClientPubSubRequest
from zato.server.pubsub.delivery.tool import PubSubTool
# ################################################################################################################################
if 0:
from gevent._socketcommon import SocketMixin
from zato.common.audit_log import DataEvent
from zato.common.model.wsx import WSXConnectorConfig
from zato.common.typing_ import any_, anydict, anylist, boolnone, callable_, callnone, intnone, optional, stranydict, strset
from zato.server.base.parallel import ParallelServer
DataEvent = DataEvent
ParallelServer = ParallelServer
WSXConnectorConfig = WSXConnectorConfig
# ################################################################################################################################
logger = getLogger('zato_web_socket')
logger_has_debug = logger.isEnabledFor(DEBUG)
logger_zato = getLogger('zato')
logger_zato_has_debug = logger_zato.isEnabledFor(DEBUG)
# ################################################################################################################################
_supported_json_dumps = {'stdlib', 'zato_default', 'rapidjson', 'bson', 'orjson'}
_now=datetime.utcnow
_timedelta=timedelta
# ################################################################################################################################
http400 = '{} {}'.format(BAD_REQUEST, responses[BAD_REQUEST])
http400_bytes = http400.encode('latin1')
http403 = '{} {}'.format(FORBIDDEN, responses[FORBIDDEN])
http403_bytes = http403.encode('latin1')
http404 = '{} {}'.format(NOT_FOUND, responses[NOT_FOUND])
http404_bytes = http404.encode('latin1')
# ################################################################################################################################
_wsgi_drop_keys = ('ws4py.socket', 'wsgi.errors', 'wsgi.input')
# ################################################################################################################################
code_invalid_utf8 = 4001
code_pings_missed = 4002
# ################################################################################################################################
_missing = object()
# ################################################################################################################################
# Maps WSGI keys to our own
new_conn_map_config = {
'REMOTE_ADDR': 'remote_addr',
'HTTP_X_FORWARDED_FOR': 'forwarded_for',
'PATH_INFO': 'path_info',
'REMOTE_PORT': 'remote_port',
'HTTP_USER_AGENT': 'user_agent',
'SERVER_NAME': 'server_name',
'SERVER_PORT': 'server_port',
'REQUEST_METHOD': 'http_method',
}
new_conn_pattern = ('{remote_addr}:{remote_port} -> {channel_name} -> fwd:{forwarded_for} -> ' \
'{server_name}:{server_port}{path_info} -> ({user_agent} - {http_method})')
# ################################################################################################################################
class close_code:
runtime_invoke_client = 3701
runtime_background_ping = 3702
unhandled_error = 3703
runtime_error = 4003
connection_error = 4003
default_closed = 4004
default_diconnect = 4005
# ################################################################################################################################
VAULT_TOKEN_HEADER=VAULT.HEADERS.TOKEN_RESPONSE
# ################################################################################################################################
hook_type_to_method = {
WEB_SOCKET.HOOK_TYPE.ON_CONNECTED: 'on_connected',
WEB_SOCKET.HOOK_TYPE.ON_DISCONNECTED: 'on_disconnected',
WEB_SOCKET.HOOK_TYPE.ON_PUBSUB_RESPONSE: 'on_pubsub_response',
WEB_SOCKET.HOOK_TYPE.ON_VAULT_MOUNT_POINT_NEEDED: 'on_vault_mount_point_needed',
}
# ################################################################################################################################
_cannot_send = 'Cannot send on a terminated websocket'
_audit_msg_type = WEB_SOCKET.AUDIT_KEY
# ################################################################################################################################
log_msg_max_size = 8192
_interact_update_interval = WEB_SOCKET.DEFAULT.INTERACT_UPDATE_INTERVAL
# ################################################################################################################################
ExtraProperties = WEB_SOCKET.ExtraProperties
WebSocketAction = WEB_SOCKET.ACTION
# ################################################################################################################################
class HookCtx:
__slots__ = (
'hook_type', 'config', 'pub_client_id', 'ext_client_id', 'ext_client_name', 'connection_time', 'user_data',
'forwarded_for', 'forwarded_for_fqdn', 'peer_address', 'peer_host', 'peer_fqdn', 'peer_conn_info_pretty', 'msg'
)
def __init__(self, hook_type:'str', *args:'any_', **kwargs:'any_') -> 'None':
self.hook_type = hook_type
for name in self.__slots__:
if name != 'hook_type':
setattr(self, name, kwargs.get(name))
# ################################################################################################################################
class TokenInfo:
def __init__(self, value:'any_', ttl:'int'):
self.value = value
self.ttl = ttl
self.creation_time = _now()
self.expires_at = self.creation_time
self.extend()
def extend(self, extend_by:'intnone'=None):
self.expires_at = self.expires_at + _timedelta(seconds=extend_by or self.ttl)
# ################################################################################################################################
class WebSocket(_WebSocket):
""" Encapsulates information about an individual connection from a WebSocket client.
"""
store_ctx: 'bool'
ctx_file: 'ContextHandler'
client_attrs: 'stranydict'
def __init__(
self,
container:'any_',
config:'WSXConnectorConfig',
_unusued_sock:'any_',
_unusued_protocols:'any_',
_unusued_extensions:'any_',
wsgi_environ:'anydict',
**kwargs:'any_'
) -> 'None':
# The object containing this WebSocket
self.container = container
# Note: configuration object is shared by all WebSockets and any writes will be visible to all of them
self.config = config
# This is needed for API completeness with non-Zato WSX clients
self.url = self.config.address
# For later reference
self.initial_http_wsgi_environ = wsgi_environ
# Referred to soon enough so created here
self.pub_client_id = 'ws.{}'.format(new_cid())
# A dictionary of attributes that each client can send across
self.client_attrs = {}
# Zato parallel server this WebSocket runs on
self.parallel_server = cast_('ParallelServer', self.config.parallel_server)
# JSON dumps function can be overridden by users
self._json_dump_func = self._set_json_dump_func()
# A reusable JSON parser
self._json_parser = JSONParser()
if config.extra_properties:
self.extra_properties = stdlib_loads(config.extra_properties) # type: stranydict
# Check if we should store runtime context for later use
self.store_ctx = bool(self.extra_properties.get(ExtraProperties.StoreCtx))
else:
self.extra_properties = {}
self.store_ctx = False
# If yes, we can obtain a file object to write the context information with
if self.store_ctx:
self.ctx_handler = ContextHandler(ctx_container_name=self.config.name, is_read_only=False)
super(WebSocket, self).__init__(
self.parallel_server,
_unusued_sock,
_unusued_protocols,
_unusued_extensions,
wsgi_environ,
**kwargs
)
# ################################################################################################################################
def _set_json_dump_func(
self,
_default:'str'='zato_default',
_supported:'strset'=_supported_json_dumps
) -> 'callable_':
json_library = self.parallel_server.fs_server_config.wsx.get('json_library', _default)
if json_library not in _supported:
# Warn only if something was set by users
if json_library:
logger.warning('Unrecognized JSON library `%s` configured for WSX, not one of `%s`, switching to `%s`',
json_library, _supported, _default)
json_library = _default
if json_library in ('orjson', 'zato_default'):
from orjson import dumps as dumps_func
elif json_library == 'rapidjson':
from rapidjson import dumps as dumps_func # type: ignore
elif json_library == 'bson':
from bson.json_util import dumps as dumps_func
else:
from zato.common.json_ import dumps as dumps_func
logger.info('Setting JSON dumps function based on `%s`', json_library)
return dumps_func
# ################################################################################################################################
def _init(self):
# Assign core attributes to this object before calling parent class
self.python_id = get_python_id(self)
# Must be set here and then to True later on because our parent class may already want
# to accept connections, and we need to postpone their processing until we are initialized fully.
self._initialized = False
pings_missed_threshold = getattr(self.config, 'pings_missed_threshold', None)
pings_missed_threshold = pings_missed_threshold or WEB_SOCKET.DEFAULT.PINGS_MISSED_THRESHOLD
ping_interval = getattr(self.config, 'ping_interval', None)
ping_interval = ping_interval or WEB_SOCKET.DEFAULT.PING_INTERVAL
self.has_session_opened = False
self._token = None
self.update_lock = RLock()
self.ext_client_id = None
self.ext_client_name = None
self.connection_time = self.last_seen = datetime.utcnow()
self.sec_type = self.config.sec_type
self.pings_missed = 0
self.pings_missed_threshold = pings_missed_threshold
self.ping_interval = ping_interval
self.user_data = Bunch() # Arbitrary user-defined data
self._disconnect_requested = False # Have we been asked to disconnect this client?
# Audit log configuration ..
self.is_audit_log_sent_active = getattr(self.config, 'is_audit_log_sent_active', False)
self.is_audit_log_received_active = getattr(self.config, 'is_audit_log_received_active', False)
# .. and audit log setup.
self.parallel_server.set_up_object_audit_log_by_config(_audit_msg_type, self.pub_client_id, self.config, False)
# This will be populated by the on_vault_mount_point_needed hook
self.vault_mount_point = None
# Last the we received a ping response (pong) from our peer
self.ping_last_response_time = None
#
# If the peer ever subscribes to a pub/sub topic we will periodically
# store in the ODB information about the last time the peer either sent
# or received anything from us. Note that we store it if:
#
# * The peer has at least one subscription, and
# * At least self.pubsub_interact_interval seconds elapsed since the last update
#
# And:
#
# * The peer received a pub/sub message, or
# * The peer sent a pub/sub message
#
# Or:
#
# * The peer did not send or receive anything, but
# * The peer correctly responds to ping messages
#
# Such a logic ensures that we do not overwhelm the database with frequent updates
# if the peer uses pub/sub heavily - it is costly to do it for each message.
#
# At the same time, if the peer does not receive or send anything but it is still connected
# (because it responds to ping) we set its SQL status too.
#
# All of this lets background processes clean up WSX clients that subscribe at one
# point but they are never seen again, which may (theoretically) happen if a peer disconnects
# in a way that does not allow for Zato to clean up its subscription status in the ODB.
#
self.pubsub_interact_interval = _interact_update_interval
self.interact_last_updated = None
self.last_interact_source = None
self.interact_last_set = None
# Manages access to service hooks
if self.config.hook_service:
self.hook_tool = HookTool(self.config.parallel_server, HookCtx, hook_type_to_method, self.invoke_service)
self.on_connected_service_invoker = self.hook_tool.get_hook_service_invoker(
cast_('str', self.config.hook_service), WEB_SOCKET.HOOK_TYPE.ON_CONNECTED)
self.on_disconnected_service_invoker = self.hook_tool.get_hook_service_invoker(
cast_('str', self.config.hook_service), WEB_SOCKET.HOOK_TYPE.ON_DISCONNECTED)
self.on_pubsub_response_service_invoker = self.hook_tool.get_hook_service_invoker(
cast_('str', self.config.hook_service), WEB_SOCKET.HOOK_TYPE.ON_PUBSUB_RESPONSE)
self.on_vault_mount_point_needed = self.hook_tool.get_hook_service_invoker(
cast_('str', self.config.hook_service), WEB_SOCKET.HOOK_TYPE.ON_VAULT_MOUNT_POINT_NEEDED)
else:
self.hook_tool = None
self.on_connected_service_invoker = None
self.on_disconnected_service_invoker = None
self.on_pubsub_response_service_invoker = None
self.on_vault_mount_point_needed = None
# For publish/subscribe over WSX
self.pubsub_tool = PubSubTool(self.parallel_server.worker_store.pubsub, self,
PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id, deliver_pubsub_msg=self.deliver_pubsub_msg)
# Active WebSocket client ID (WebSocketClient model, web_socket_client.id in SQL)
self._sql_ws_client_id = -1
# For tokens assigned externally independent of our WS-level self.token.
# Such tokens will be generated by Vault, for instance.
self.ext_token = ''
# Drop WSGI keys pointing to complex Python objects such as sockets
for name in _wsgi_drop_keys:
_ = self.initial_http_wsgi_environ.pop(name, None)
# Responses to previously sent requests - keyed by request IDs
self.responses_received = {}
_local_address = self.sock.getsockname() # type: ignore
self._local_address = '{}:{}'.format(_local_address[0], _local_address[1])
_peer_address = self.sock.getpeername() # type: ignore
self._peer_address = '{}:{}'.format(_peer_address[0], _peer_address[1])
self.forwarded_for = self.initial_http_wsgi_environ.get('HTTP_X_FORWARDED_FOR')
if self.forwarded_for:
self.forwarded_for_fqdn = socket.getfqdn(self.forwarded_for)
else:
self.forwarded_for_fqdn = WEB_SOCKET.DEFAULT.FQDN_UNKNOWN
_peer_fqdn = WEB_SOCKET.DEFAULT.FQDN_UNKNOWN
self._peer_host = _peer_fqdn
try:
get_host_by_addr_func = socket.gethostbyaddr # type: ignore
self._peer_host = get_host_by_addr_func(_peer_address[0])[0]
_peer_fqdn = socket.getfqdn(self._peer_host)
except Exception as e:
logger.info('WSX exception in FQDN lookup `%s` (%s)', e.args, _peer_address)
finally:
self._peer_fqdn = _peer_fqdn
self.peer_conn_info_pretty = self.get_peer_info_pretty()
# We always expect for input data to be JSON
self._parse_func = self.parse_json
# Store context details
if self.store_ctx:
self.ctx_handler.store(self)
# All set, we can process connections now
self._initialized = True
# ################################################################################################################################
@property
def token(self) -> 'TokenInfo':
return cast_('TokenInfo', self._token)
@token.setter
def token(self, value:'any_') -> 'None':
if not self._token:
self._token = TokenInfo(value, self.config.token_ttl)
else:
self._token.value = value
self._token.extend()
# ################################################################################################################################
# This is a property so as to make it easier to add logging calls to observe what is getting and setting the value
@property
def sql_ws_client_id(self) -> 'int':
return self._sql_ws_client_id
@sql_ws_client_id.setter
def sql_ws_client_id(self, value:'int') -> 'None':
self._sql_ws_client_id = value
# ################################################################################################################################
def set_last_interaction_data(
self,
source, # type: str
_interval=_interact_update_interval # type: int
) -> 'None':
""" Updates metadata regarding pub/sub about this WSX connection.
"""
with self.update_lock:
# Local aliases
now = _now()
# Update last interaction metadata time for our peer
self.last_interact_source = source
# It is possible that we are setting the metadata the first time here,
# in which case we will always invoke the service,
# having first stored current timestamp for later use.
if not self.interact_last_set:
self.interact_last_set = now
needs_services = True
else:
# We must have been already called before, in which case we execute services only if it is our time to do it.
needs_services = True if self.interact_last_updated + timedelta(minutes=_interval) < now else False # type: ignore
# Are we to invoke the services this time?
if needs_services:
pub_sub_request = {
'sub_key': self.pubsub_tool.get_sub_keys(),
'last_interaction_time': now,
'last_interaction_type': self.last_interact_source,
'last_interaction_details': self.get_peer_info_pretty(),
}
wsx_request = {
'id': self.sql_ws_client_id,
'last_seen': now,
}
if logger_has_debug:
logger.debug('Setting pub/sub interaction metadata `%s`', pub_sub_request)
self.invoke_service('zato.pubsub.subscription.update-interaction-metadata', pub_sub_request)
if logger_has_debug:
logger.debug('Setting WSX last seen `%s`', wsx_request)
self.invoke_service('zato.channel.web-socket.client.set-last-seen', wsx_request)
# Finally, store it for the future use
self.interact_last_updated = now
# ################################################################################################################################
def deliver_pubsub_msg(self, sub_key:'str', msg:'PubSubMessage | anylist') -> 'None':
""" Delivers one or more pub/sub messages to the connected WSX client.
"""
ctx = {}
if isinstance(msg, PubSubMessage):
len_msg = 1
else:
len_msg = len(msg)
msg = msg[0] if len_msg == 1 else msg
# A list of messages is given on input so we need to serialize each of them individually
if isinstance(msg, list):
cid = new_cid()
data = []
for elem in msg:
data.append(elem.serialized if elem.serialized else elem.to_external_dict())
if elem.reply_to_sk:
ctx_reply_to_sk = ctx.setdefault('', [])
ctx_reply_to_sk.append(elem.reply_to_sk)
# A single message was given on input
else:
cid = msg.pub_msg_id
data = msg.serialized if msg.serialized else msg.to_external_dict()
if msg.reply_to_sk:
ctx['reply_to_sk'] = msg.reply_to_sk
logger.info('Delivering %d pub/sub message{} to sub_key `%s` (ctx:%s)'.format('s' if len_msg > 1 else ''),
len_msg, sub_key, ctx)
# Actually deliver messages
self.invoke_client(cid, data, ctx=ctx, _Class=InvokeClientPubSubRequest)
# We get here if there was no exception = we can update pub/sub metadata
self.set_last_interaction_data('pubsub.deliver_pubsub_msg')
# ################################################################################################################################
def add_sub_key(self, sub_key:'str') -> 'None':
self.pubsub_tool.add_sub_key(sub_key)
# ################################################################################################################################
def remove_sub_key(self, sub_key:'str') -> 'None':
self.pubsub_tool.remove_sub_key(sub_key)
# ################################################################################################################################
def get_peer_info_dict(self) -> 'stranydict':
return {
'name': self.ext_client_name,
'ext_client_id': self.ext_client_id,
'forwarded_for_fqdn': self.forwarded_for_fqdn,
'peer_fqdn': self._peer_fqdn,
'pub_client_id': self.pub_client_id,
'python_id': self.python_id,
'sock': str(getattr(self, 'sock', '')),
'swc': self.sql_ws_client_id,
}
# ################################################################################################################################
def get_peer_info_pretty(self) -> 'str':
sock = getattr(self, 'sock', None)
return 'name:`{}` id:`{}` fwd_for:`{}` conn:`{}` pub:`{}`, py:`{}`, sock:`{}`, swc:`{}`'.format(
self.ext_client_name, self.ext_client_id, self.forwarded_for_fqdn, self._peer_fqdn,
self.pub_client_id, self.python_id, sock, self.sql_ws_client_id)
# ################################################################################################################################
def get_on_connected_hook(self) -> 'callnone':
""" Returns a hook triggered when a new connection was made.
"""
if self.hook_tool:
return self.on_connected_service_invoker
# ################################################################################################################################
def get_on_disconnected_hook(self) -> 'callnone':
""" Returns a hook triggered when an existing connection was dropped.
"""
if self.hook_tool:
return self.on_disconnected_service_invoker
# ################################################################################################################################
def get_on_pubsub_hook(self) -> 'callnone':
""" Returns a hook triggered when a pub/sub response arrives from the connected client.
"""
if self.hook_tool:
return self.on_pubsub_response_service_invoker
# ################################################################################################################################
def get_on_vault_mount_point_needed(self) -> 'callnone':
""" Returns a hook triggered when a Vault moint point needed to check credentials is not known.
"""
if self.hook_tool:
return self.on_vault_mount_point_needed
# ################################################################################################################################
def parse_json(
self,
data:'any_',
cid:'str'='',
_create_session:'str'=WEB_SOCKET.ACTION.CREATE_SESSION,
_response:'str'=WEB_SOCKET.ACTION.CLIENT_RESPONSE,
_code_invalid_utf8:'int'=code_invalid_utf8
) -> 'ClientMessage':
""" Parses an incoming message into a Bunch object.
"""
# Parse JSON into a dictionary
parsed = self._json_parser.parse(data) # type: any_
# Create a request message
msg = ClientMessage()
# Request metadata is optional
meta = parsed.get('meta', {})
if meta:
msg.action = meta.get('action', _response)
msg.id = meta['id']
msg.timestamp = meta['timestamp']
msg.token = meta.get('token') # Optional because it won't exist during first authentication
if client_attrs := (meta.get('attrs') or {}):
msg.client_attrs = parse_extra_into_dict(client_attrs)
# self.ext_client_id and self.ext_client_name will exist after create-session action
# so we use them if they are available but fall back to meta.client_id and meta.client_name during
# the very create-session action.
ext_client_id = meta.get('client_id')
if ext_client_id:
self.ext_client_id = meta.get('client_id')
ext_client_name = meta.get('client_name', '')
if ext_client_name:
if isinstance(ext_client_name, dict):
_ext_client_name = []
for key, value in sorted(ext_client_name.items()):
_ext_client_name.append('{}: {}'.format(key, value))
ext_client_name = '; '.join(_ext_client_name)
msg.ext_client_name = ext_client_name
msg.ext_client_id = self.ext_client_id
if msg.action == _create_session:
msg.username = meta.get('username')
# Secret is optional because WS channels may be without credentials attached
msg.secret = meta['secret'] if self.config.needs_auth else ''
msg.is_auth = True
else:
msg.in_reply_to = meta.get('in_reply_to') or None
msg.is_auth = False
ctx = meta.get('ctx')
if ctx:
msg.reply_to_sk = ctx.get('reply_to_sk')
msg.deliver_to_sk = ctx.get('deliver_to_sk')
# Data is optional
msg.data = parsed.get('data', {})
return msg
# ################################################################################################################################
def parse_xml(self, data:'any_') -> 'None':
raise NotImplementedError('Not supported yet')
# ################################################################################################################################
def create_session(
self,
cid:'str',
request:'ClientMessage',
_sec_def_type_vault:'str'=SEC_DEF_TYPE.VAULT,
_VAULT_TOKEN_HEADER:'str'=VAULT_TOKEN_HEADER
) -> 'optional[AuthenticateResponse]':
""" Creates a new session in the channel's auth backend and assigned metadata based on the backend's response.
"""
# This dictionary will be written to
headers = {}
if not self.config.needs_auth:
can_create_session = True
else:
# Discover which Vault mount point credentials will be under, unless we know it already.
if not self.vault_mount_point:
hook = self.get_on_vault_mount_point_needed()
if hook:
hook(**self._get_hook_request())
headers['HTTP_X_ZATO_VAULT_MOUNT_POINT'] = self.vault_mount_point
auth_func = cast_('callable_', self.config.auth_func)
can_create_session = auth_func(
request.cid, self.sec_type, {'username':request.username, 'secret':request.secret}, self.config.sec_name,
self.config.vault_conn_default_auth_method, self.initial_http_wsgi_environ, headers)
if can_create_session:
with self.update_lock:
# If we are using Vault, use its own header
if self.config.sec_type == _sec_def_type_vault:
self.ext_token = headers['zato.http.response.headers'][_VAULT_TOKEN_HEADER]
self_token = self.ext_token
# Otherwise, generate our own
else:
self_token = new_cid()
self.token = 'zwsxt.{}'.format(self_token)
self.has_session_opened = True
self.ext_client_id = request.ext_client_id
self.ext_client_name = request.ext_client_name
# Update peer name pretty now that we have more details about it
self.peer_conn_info_pretty = self.get_peer_info_pretty()
logger.info('Assigning wsx py:`%s` to `%s` (%s %s)', self.python_id, self.pub_client_id,
self.ext_client_id, self.ext_client_name)
_timestamp = _now()
logger.info('Tok auth: [%s / %s] ts:%s exp:%s -> %s',
self.token.value, self.pub_client_id, _timestamp, self.token.expires_at,
_timestamp > self.token.expires_at)
return AuthenticateResponse(self.token.value, request.cid, request.id).serialize(self._json_dump_func)
# ################################################################################################################################
def on_forbidden(self, action:'str', data:'str'=copy_forbidden) -> 'None':
cid = new_cid()
logger.warning(
'Peer %s (%s) %s, closing its connection to %s (%s), cid:`%s` (%s)', self._peer_address, self._peer_fqdn, action,
self._local_address, self.config.name, cid, self.peer_conn_info_pretty)
# If the client is already known to have disconnected there is no point in sending a Forbidden message.
if self.is_client_disconnected():
self.update_terminated_status()
return
try:
msg = Forbidden(cid, data)
serialized = msg.serialize(self._json_dump_func)
self.send(serialized, cid)
except AttributeError as e:
# Catch a lower-level exception which may be raised in case the client
# disconnected and we did not manage to send the Forbidden message.
# In this situation, the lower level will raise an attribute error
# with a specific message. Otherwise, we reraise the exception.
if not e.args[0] == "'NoneType' object has no attribute 'text_message'":
raise
else:
self.update_terminated_status()
# ################################################################################################################################
def update_terminated_status(self) -> 'None':
self.server_terminated = True
self.client_terminated = True
# ################################################################################################################################
def is_client_disconnected(self) -> 'bool':
return self.terminated or self.sock is None
def is_client_connected(self) -> 'bool':
return not self.is_client_disconnected()
# ################################################################################################################################
def send_background_pings(self, ping_interval:'int') -> 'None':
logger.info('Starting WSX background pings (%s:%s) for `%s`',
ping_interval, self.pings_missed_threshold, self.peer_conn_info_pretty)
try:
while self.stream and (not self.server_terminated):
# Sleep for N seconds before sending a ping but check if we are connected upfront because
# we could have disconnected in between while and sleep calls.
sleep(ping_interval)
# Ok, still connected
if self.stream and (not self.server_terminated):
# The response object will be None in case there is an exception
response = None
try:
_ts_before_invoke = _now()
if logger_has_debug:
logger.info('Tok ext0: [%s / %s] ts:%s exp:%s -> %s',
self.token.value, self.pub_client_id, _ts_before_invoke, self.token.expires_at,
_ts_before_invoke > self.token.expires_at)
response = self.invoke_client(new_cid(), None, use_send=False)
except ConnectionError as e:
logger.warning('ConnectionError; set keep_sending to False; closing connection -> `%s`', e.args)
self.disconnect_client(code=close_code.connection_error, reason='Background pingConnectionError')
except RuntimeError:
logger.warning('RuntimeError; set keep_sending to False; closing connection -> `%s`', format_exc())
self.disconnect_client(code=close_code.runtime_error, reason='Background ping RuntimeError')
with self.update_lock:
if response:
_timestamp = _now()
self.pings_missed = 0
self.ping_last_response_time = _timestamp
if logger_has_debug:
logger.info('Tok ext1: [%s / %s] ts:%s exp:%s -> %s',
self.token.value, self.pub_client_id, _timestamp, self.token.expires_at,
_timestamp > self.token.expires_at)
self.token.extend(ping_interval)
if logger_has_debug:
logger.info('Tok ext2: [%s / %s] ts:%s exp:%s -> %s',
self.token.value, self.pub_client_id, _timestamp, self.token.expires_at,
_timestamp > self.token.expires_at)
else:
self.pings_missed += 1
if self.pings_missed < self.pings_missed_threshold:
logger.info(
'Peer %s (%s) missed %s/%s ping messages from %s (%s). Last response time: %s{} (%s)'.format(
' UTC' if self.ping_last_response_time else ''),
self._peer_address,
self._peer_fqdn,
self.pings_missed,
self.pings_missed_threshold,
self._local_address,
self.config.name,
self.ping_last_response_time,
self.peer_conn_info_pretty)
else:
self.on_pings_missed()
return
# No stream or server already terminated = we can quit
else:
logger.info('Stopping background pings for peer %s (%s), stream:`%s`, st:`%s`, m:%s/%s (%s)',
self._peer_address,
self._peer_fqdn,
self.stream,
self.server_terminated,
self.pings_missed,
self.pings_missed_threshold,
self.peer_conn_info_pretty)
return
except Exception:
logger.warning(format_exc())
# ################################################################################################################################
def _get_hook_request(self) -> 'Bunch':
out = bunchify({
'peer_address': self._peer_address,
'peer_host': self._peer_host,
'peer_fqdn': self._peer_fqdn,
})
for name in HookCtx.__slots__:
if name not in('hook_type', 'peer_address', 'peer_host', 'peer_fqdn', 'msg'):
out[name] = getattr(self, name)
return out
# ################################################################################################################################
def on_pings_missed(self) -> 'None':
logger.warning(
'Peer %s (%s) missed %s/%s pings, forcing its connection to close (%s)',
self._peer_address, self._peer_fqdn, self.pings_missed, self.pings_missed_threshold,
self.peer_conn_info_pretty)
self.disconnect_client(new_cid(), code_pings_missed, 'Pings missed')
self.update_terminated_status()
# ################################################################################################################################
def register_auth_client(self, _assigned_msg:'str'='Assigned sws_id:`%s` to `%s` (%s %s %s)') -> 'None':
""" Registers peer in ODB and sets up background pings to keep its connection alive.
Called only if authentication succeeded.
"""
response = self.invoke_service('zato.channel.web-socket.client.create', {
'pub_client_id': self.pub_client_id,
'ext_client_id': self.ext_client_id,
'ext_client_name': self.ext_client_name,
'is_internal': True,
'local_address': self.local_address,
'peer_address': self.peer_address,
'peer_fqdn': self._peer_fqdn,
'connection_time': self.connection_time,
'last_seen': self.last_seen,
'channel_name': self.config.name,
'peer_forwarded_for': self.forwarded_for,
'peer_forwarded_for_fqdn': self.forwarded_for_fqdn,
}, needs_response=True)
self.sql_ws_client_id = response['ws_client_id']
logger.info(
_assigned_msg, self.sql_ws_client_id, self.python_id, self.pub_client_id, self.ext_client_id, self.ext_client_name)
# Run the relevant on_connected hook, if any is available
hook = self.get_on_connected_hook()
if hook:
hook(**self._get_hook_request())
_ = spawn(self.send_background_pings, self.ping_interval)
# ################################################################################################################################
def unregister_auth_client(self) -> 'None':
""" Unregisters an already registered peer in ODB.
"""
hook = self.get_on_disconnected_hook()
hook_request = self._get_hook_request() if hook else None
# To clear out our own delivery tasks
opaque_func_list = [self.pubsub_tool.remove_all_sub_keys]
cleanup_wsx_client(self.has_session_opened, self.invoke_service, self.pub_client_id, list(self.pubsub_tool.sub_keys),
self.get_on_disconnected_hook(), self.config.hook_service, hook_request, opaque_func_list)
# ################################################################################################################################
def handle_create_session(self, cid:'str', request:'ClientMessage') -> 'None':
if request.is_auth:
response = self.create_session(cid, request)
if response:
# Assign any potential attributes sent across by the client WebSocket
self.client_attrs = request.client_attrs
# Register the client for future use
self.register_auth_client()
# Send an auth response to the client
self.send(response, cid)
logger.info(
'Client %s logged in successfully to %s (%s) (%s %s)', self.pub_client_id, self._local_address,
self.config.name, self.ext_client_id, self.ext_client_name)
else:
self.on_forbidden('sent invalid credentials')
else:
self.on_forbidden('is not authenticated')
# ################################################################################################################################
def invoke_service(
self,
service_name:'str',
data:'any_',
cid:'str'='',
needs_response:'bool'=True,
_channel:'str'=CHANNEL.WEB_SOCKET,
_data_format:'str'=DATA_FORMAT.DICT,
serialize:'bool'=False
) -> 'any_':
# It is possible that this method will be invoked before self.__init__ completes,
# because self's parent manages the underlying TCP stream, in which can self
# will not be fully initialized yet so we need to wait a bit until it is.
while not self._initialized:
sleep(0.1)
environ = {
'web_socket': self,
'sql_ws_client_id': self.sql_ws_client_id,
'ws_channel_config': self.config,
'ws_token': self.token,
'ext_token': self.ext_token,
'pub_client_id': self.pub_client_id,
'ext_client_id': self.ext_client_id,
'ext_client_name': self.ext_client_name,
'peer_conn_info_pretty': self.peer_conn_info_pretty,
'connection_time': self.connection_time,
'pings_missed': self.pings_missed,
'pings_missed_threshold': self.pings_missed_threshold,
'peer_host': self._peer_host,
'peer_fqdn': self._peer_fqdn,
'forwarded_for': self.forwarded_for,
'forwarded_for_fqdn': self.forwarded_for_fqdn,
'initial_http_wsgi_environ': self.initial_http_wsgi_environ,
}
msg = {
'cid': cid or new_cid(),
'data_format': _data_format,
'service': service_name,
'payload': data,
'environ': environ,
'wsx': self,
}
on_message_callback = cast_('callable_', self.config.on_message_callback)
response = on_message_callback(
msg,
CHANNEL.WEB_SOCKET,
None,
needs_response=needs_response,
serialize=serialize
)
return response
# ################################################################################################################################
def handle_client_message(self, cid:'str', msg:'Bunch') -> 'None':
func = self._handle_client_response if msg.action == WebSocketAction.CLIENT_RESPONSE else self._handle_invoke_service
func(cid, msg)
# ################################################################################################################################
def _handle_invoke_service(self, cid:'str', msg:'Bunch') -> 'None':
try:
service_response = self.invoke_service(cast_('str', self.config.service_name), msg.data, cid=cid)
except Exception as e:
# This goes to WSX logs, with a full traceback
logger.warning('Service `%s` could not be invoked, id:`%s` cid:`%s`, conn:`%s`, e:`%s`',
self.config.service_name, msg.id, cid, self.peer_conn_info_pretty, format_exc())
# This goes to server.log and has only an error message
logger_zato.warning('Service `%s` could not be invoked, id:`%s` cid:`%s`, conn:`%s`, e:`%s`',
self.config.service_name, msg.id, cid, self.peer_conn_info_pretty, e)
# Errors known to map to HTTP ones
if isinstance(e, Reportable):
status = e.status
error_message = e.msg
# Catch SimpleIO-related errors, i.e. missing input parameters
elif isinstance(e, ParsingException):
status = BAD_REQUEST
error_message = 'I/O processing error'
# Anything else
else:
status = INTERNAL_SERVER_ERROR
error_message = 'Internal server error'
response = ErrorResponse(cid, msg.id, status, error_message)
else:
response = OKResponse(cid, msg.id, service_response)
serialized = response.serialize(self._json_dump_func)
logger.info('Sending response `%s` to `%s` (%s %s)',
self._shorten_data(serialized), self.pub_client_id, self.ext_client_id, self.ext_client_name)
try:
self.send(serialized, msg.cid, cid)
except AttributeError as e:
if e.args[0] == "'NoneType' object has no attribute 'text_message'":
_msg = 'Service response discarded (client disconnected), cid:`%s`, msg.meta:`%s`'
_meta = msg.get_meta()
logger.warning(_msg, _meta)
logger_zato.warning(_msg, _meta)
# ################################################################################################################################
def _wait_for_event(
self,
wait_time:'int',
condition_callable:'callable_',
_delta:'callable_'=timedelta,
_sleep:'callable_'=sleep,
*args:'any_',
**kwargs:'any_'
) -> 'any_':
now = _now()
until = now + _delta(seconds=wait_time)
while now < until:
response = condition_callable(*args, **kwargs)
if response:
return response
else:
_sleep(0.01)
now = _now()
# ################################################################################################################################
def _handle_client_response(
self,
cid:'str',
msg:'any_',
_msg_id_prefix:'str'=MSG_PREFIX.MSG_ID
) -> 'None':
""" Processes responses from WSX clients - either invokes callbacks for pub/sub responses
or adds the message to the list of received ones because someone is waiting for it.
"""
# Pub/sub response
if msg.in_reply_to.startswith(_msg_id_prefix):
hook = self.get_on_pubsub_hook()
if not hook:
log_msg = 'Ignoring pub/sub response, on_pubsub_response hook not implemented for `%s`, conn:`%s`, msg:`%s`'
logger.info(log_msg, self.config.name, self.peer_conn_info_pretty, msg)
if logger_zato_has_debug:
logger_zato.debug(log_msg, self.config.name, self.peer_conn_info_pretty, msg)
else:
request = self._get_hook_request()
request['msg'] = msg
hook(**request)
# Regular synchronous response, simply enqueue it and someone else will take care of it
else:
self.responses_received[msg.in_reply_to] = msg
def _has_client_response(self, request_id:'str') -> 'any_':
return self.responses_received.get(request_id)
def _wait_for_client_response(self, request_id:'str', wait_time:'int'=5) -> 'any_':
""" Wait until a response from client arrives and return it or return None if there is no response up to wait_time.
"""
return self._wait_for_event(wait_time, self._has_client_response, request_id=request_id)
# ################################################################################################################################
def _received_message(
self,
data:'any_',
_default_data:'str'='',
*args:'any_',
**kwargs:'any_'
) -> 'None':
# This is one of methods that can be invoked before self.__init__ completes,
# because self's parent manages the underlying TCP stream, in which can self
# will not be fully initialized yet so we need to wait a bit until it is.
while not self._initialized:
sleep(0.1)
try:
# Input bytes must be UTF-8
try:
data.decode('utf8')
except UnicodeDecodeError as e:
reason = 'Invalid UTF-8 bytes'
msg = '{}; `{}`'.format(reason, e.args)
logger.warning(msg)
logger_zato.warning(msg)
if self.has_session_opened:
response = ErrorResponse('<no-cid>', '<no-msg-id>', UNPROCESSABLE_ENTITY, reason)
serialized = response.serialize(self._json_dump_func)
log_msg = 'About to send the invalid UTF-8 message to client'
logger.warning(log_msg)
logger_zato.warning(log_msg)
self.send(serialized, new_cid())
return
else:
log_msg = 'Disconnecting client due to invalid UTF-8 data'
logger.warning(log_msg)
logger_zato.warning(log_msg)
self.disconnect_client('<no-cid>', code_invalid_utf8, reason)
return
cid = new_cid()
request = self._parse_func(data or _default_data) # type: any_
now = _now()
self.last_seen = now
if self.is_audit_log_received_active:
self._store_audit_log_data(DataReceived, data, cid)
# If client is authenticated, allow it to re-authenticate, which grants a new token, or to invoke a service.
# Otherwise, authentication is required.
if self.has_session_opened:
# Reject request if an already existing token was not given on input, it should have been
# because the client is authenticated after all.
if not request.token:
self.on_forbidden('did not send token')
return
if request.token != self.token.value:
self.on_forbidden('sent an invalid token (`{!r}` instead of `{!r}`)'.format(request.token, self.token.value))
return
# Reject request if token is provided but it already expired
_timestamp = _now()
logger.info('Tok rcv: [%s / %s] ts:%s exp:%s -> %s',
self.token.value, self.pub_client_id, _timestamp, self.token.expires_at, _timestamp > self.token.expires_at)
if _timestamp > self.token.expires_at:
self.on_forbidden('used an expired token; tok: [{} / {}] ts:{} > exp:{}'.format(
self.token.value, self.pub_client_id, _timestamp, self.token.expires_at))
return
# Ok, we can proceed
try:
self.handle_client_message(cid, request) if not request.is_auth else self.handle_create_session(cid, request)
except ConnectionError as e:
msg = 'Ignoring message (ConnectionError), cid:`%s`; conn:`%s`; e:`%s`'
logger.info(msg, cid, self.peer_conn_info_pretty, e.args)
logger_zato.info(msg, cid, self.peer_conn_info_pretty, e.args)
except RuntimeError as e:
if e.args[0] == _cannot_send:
msg = 'Ignoring message (socket terminated #1), cid:`%s`, request:`%s` conn:`%s`'
logger.info(msg, cid, request, self.peer_conn_info_pretty)
logger_zato.info(msg, cid, request, self.peer_conn_info_pretty)
else:
raise
# Unauthenticated - require credentials on input
else:
self.handle_create_session(cid, request)
if logger_has_debug:
logger.debug('Response returned cid:`%s`, time:`%s`', cid, _now() - now)
except Exception:
logger.warning(format_exc())
# ################################################################################################################################
def received_message(self, message:'Bunch') -> 'None':
logger.info('Received message %r from `%s` (%s %s)', self._shorten_data(message.data),
self.pub_client_id, self.ext_client_id, self.ext_client_name)
try:
self._received_message(message.data)
except Exception:
logger.warning(format_exc())
# ################################################################################################################################
def send(self, data:'any_'='', cid:'str'='', in_reply_to:'str'='') -> 'None':
if self.is_audit_log_sent_active:
self._store_audit_log_data(DataSent, data, cid, in_reply_to)
# Call the super-class that will actually send the message.
super().send(data)
# ################################################################################################################################
def _store_audit_log_data(
self,
event_class:'any_',
data:'any_',
cid:'str'='',
in_reply_to:'str'='',
) -> 'None':
# Describe our event ..
data_event = event_class()
data_event.type_ = _audit_msg_type
data_event.object_id = self.pub_client_id
data_event.data = data if isinstance(data, str) else str(data)
data_event.timestamp = _now()
data_event.msg_id = cid
data_event.in_reply_to = in_reply_to
# .. and store it in the audit log.
self.parallel_server.audit_log.store_data(data_event)
# ################################################################################################################################
def notify_pubsub_message(self, cid:'str', request:'any_') -> 'None':
""" Invoked by internal services each time a pub/sub message is available for at least one of sub_keys
this WSX client is responsible for.
"""
self.pubsub_tool.handle_new_messages(HandleNewMessageCtx(cid, request['has_gd'], request['sub_key_list'],
request['non_gd_msg_list'], request['is_bg_call'], request['pub_time_max']))
# ################################################################################################################################
def subscribe_to_topic(self, cid:'str', request:'any_') -> 'None':
""" Subscribes current WebSocket a topic pointed to by input request object.
"""
self.invoke_service('zato.pubsub.subscription.create-wsx-subscription-for-current', {
'topic_name': request
}, cid=cid)
# ################################################################################################################################
def run(self) -> 'None':
try:
self._init()
super(WebSocket, self).run()
except Exception:
logger.warning('Exception in WebSocket.run `%s`', format_exc())
# ################################################################################################################################
def _ensure_session_created(self) -> 'None':
""" Runs in its own greenlet and waits for an authentication request to arrive by self.create_session_by,
which is a timestamp object. If self.has_session_opened is not True by that time, connection to the remote end
is closed.
"""
try:
if self._wait_for_event(self.config.new_token_wait_time, lambda: self.has_session_opened):
return
# We get here if self.has_session_opened has not been set to True by self.create_session_by
self.on_forbidden('did not create a session within {}s (#1)'.format(self.config.new_token_wait_time))
except Exception as e:
if e.args[0] == "'NoneType' object has no attribute 'text_message'":
self.on_forbidden('did not create a session within {}s (#2)'.format(self.config.new_token_wait_time))
else:
logger.warning('Exception in WSX _ensure_session_created `%s`', format_exc())
# ################################################################################################################################
def _shorten_data(self, data:'str', max_size:'int'=log_msg_max_size) -> 'str':
# Reusable
len_data = len(data)
# No need to shorten anything as long as we fit in the max length allowed ..
if len_data <= max_size:
out = data
# ..otherwise, we need to make a shorter copy
else:
out = '%s [...]' % (data[:max_size])
return '%s (%s B)' % (out, len_data)
# ################################################################################################################################
def invoke_client(
self,
cid:'str',
request:'any_',
timeout:'int'=5,
ctx:'any_'=None,
use_send:'bool'=True,
_Class:'any_'=InvokeClientRequest,
wait_for_response:'bool'=True
) -> 'any_':
""" Invokes a remote WSX client with request given on input, returning its response,
if any was produced in the expected time.
"""
# If input request is a string, try to decode it from JSON, but leave as-is in case
# of an error or if it is not a string.
if isinstance(request, str):
try:
request = stdlib_loads(request)
except ValueError:
pass
# Serialize to string
msg = _Class(cid, request, ctx)
serialized = msg.serialize(self._json_dump_func)
# Log what is about to be sent
if use_send:
logger.info('Sending message `%s` from `%s` to `%s` `%s` `%s` `%s`', self._shorten_data(serialized),
self.python_id, self.pub_client_id, self.ext_client_id, self.ext_client_name, self.peer_conn_info_pretty)
try:
if use_send:
self.send(serialized, cid, msg.in_reply_to)
else:
# Do not send whitespace so as not to the exceed the 125 bytes length limit
# that each ping message has to be contained within.
serialized = serialized.replace(' ', '').replace('\n', '')
self.ping(serialized)
except RuntimeError as e:
if str(e) == _cannot_send:
msg = 'Cannot send message (socket terminated #2), cid:`%s`, msg:`%s` conn:`%s`'
data_msg = self._shorten_data(msg)
logger.info(data_msg, cid, serialized, self.peer_conn_info_pretty)
logger_zato.info(data_msg, cid, serialized, self.peer_conn_info_pretty)
self.disconnect_client(cid, close_code.runtime_invoke_client, 'Client invocation runtime error')
raise RuntimeInvocationError(cid, 'WSX client disconnected cid:`{}, peer:`{}`'.format(cid, self.peer_conn_info_pretty))
# Wait for response but only if it is not a pub/sub message,
# these are always asynchronous and that channel's WSX hook
# will process the response, if any arrives.
if _Class is not InvokeClientPubSubRequest:
if wait_for_response:
response = self._wait_for_client_response(msg.id, timeout)
if response:
return response if isinstance(response, bool) else response.data # It will be bool in pong responses
# ################################################################################################################################
def _close_connection(self, verb:'str', *_ignored_args:'any_', **_ignored_kwargs:'any_') -> 'None':
logger.info('{} %s (%s) to %s (%s %s %s%s'.format(verb),
self._peer_address, self._peer_fqdn, self._local_address, self.config.name, self.ext_client_id,
self.pub_client_id, ' {})'.format(self.ext_client_name) if self.ext_client_name else ')')
self.unregister_auth_client()
self.container.clients.pop(self.pub_client_id, None)
# Unregister the client from audit log
if self.is_audit_log_sent_active or self.is_audit_log_received_active:
self.parallel_server.audit_log.delete_container(_audit_msg_type, self.pub_client_id)
# ################################################################################################################################
def disconnect_client(self, cid:'str'='', code:'int'=close_code.default_diconnect, reason:'str'='') -> 'None':
""" Disconnects the remote client, cleaning up internal resources along the way.
"""
self._disconnect_requested = True
self._close_connection('cid:{}; c:{}; r:{}; Disconnecting client from'.format(cid, code, reason))
if self.stream:
self.close(code, reason)
# ################################################################################################################################
def opened(self) -> 'None':
logger.info('Handling new WSX conn from %s (%s) to %s (%s %s) (%s %s) (%s)', self._peer_address, self._peer_fqdn,
self._local_address, self.config.name, self.python_id, self.forwarded_for, self.forwarded_for_fqdn,
self.pub_client_id)
_ = spawn(self._ensure_session_created)
# ################################################################################################################################
def closed(self, code:'int'=close_code.default_closed, reason:'str'='') -> 'None':
# Our self.disconnect_client must have cleaned up everything already
if not self._disconnect_requested:
self._close_connection('c:{}; r:{}; Client closed its connection from'.format(code, reason))
on_socket_terminated = closed
# ################################################################################################################################
def ponged(self, msg:'Bunch') -> 'None':
# Audit log comes first
if self.is_audit_log_received_active:
self._store_audit_log_data(DataReceived, msg.data)
# Pretend it's an actual response from the client,
# we cannot use in_reply_to because pong messages are 1:1 copies of ping ones.
data = self._json_parser.parse(msg.data) # type: any_
if data:
msg_id = data['meta']['id']
self.responses_received[msg_id] = True
# Since we received a pong response, it means that the peer is connected,
# in which case we update its pub/sub metadata.
self.set_last_interaction_data('wsx.ponged')
# ################################################################################################################################
def unhandled_error(
self,
e:'Exception',
_msg:'str'='Low-level exception caught, about to close connection from `%s`, e:`%s`'
) -> 'None':
""" Called by the underlying WSX library when a low-level TCP/OS exception occurs.
"""
# Do not log too many details for common disconnection events ..
if isinstance(e, ConnectionError):
details = e.args
# .. but log everything in other cases.
else:
details = format_exc()
peer_info = self.get_peer_info_pretty()
logger.info(_msg, peer_info, details)
logger_zato.info(_msg, peer_info, details)
self.disconnect_client('<unhandled-error>', close_code.runtime_background_ping, 'Unhandled error caught')
# ################################################################################################################################
def close(
self,
code:'int'=1000,
reason:'str'='',
_msg:'str'='Error while closing connection from `%s`, e:`%s`',
_msg_ignored:'str'='Caught an exception while closing connection from `%s`, e:`%s`'
) -> 'None':
""" Re-implemented from the base class to be able to catch exceptions in self._write when closing connections.
"""
if not self.server_terminated:
self.server_terminated = True
try:
if self.stream:
self._write(self.stream.close(code=code, reason=reason).single(mask=self.stream.always_mask))
else:
raise ConnectionError('WSX stream is already closed')
except Exception as e:
peer_info = self.get_peer_info_pretty()
# Ignore non-essential errors about broken pipes, connections being already reset etc.
if isinstance(e, ConnectionError):
e_description = e.args
logger.info(_msg_ignored, peer_info, e_description)
logger_zato.info(_msg_ignored, peer_info, e_description)
# Log details of exceptions of other types.
else:
exc = format_exc()
logger.info(_msg, peer_info, exc)
logger_zato.info(_msg, peer_info, exc)
# ################################################################################################################################
# ################################################################################################################################
class WebSocketContainer(WebSocketWSGIApplication):
def __init__(
self,
config:'WSXConnectorConfig',
*args:'any_',
**kwargs:'any_'
) -> 'None':
self.config = config
self.clients = {}
super(WebSocketContainer, self).__init__(*args, **kwargs)
# ################################################################################################################################
def make_websocket(self, sock:'SocketMixin', protocols:'any_', extensions:'any_', wsgi_environ:'stranydict') -> 'any_':
try:
websocket = self.handler_cls(self, self.config, sock, protocols, extensions, wsgi_environ.copy())
self.clients[websocket.pub_client_id] = websocket
wsgi_environ['ws4py.websocket'] = websocket
return websocket
except Exception:
logger.warning(format_exc())
# ################################################################################################################################
def __call__(self, wsgi_environ:'stranydict', start_response:'callable_') -> 'any_':
try:
# Populate basic information about the connection
new_conn_map = {
'channel_name': self.config.name,
} # type: stranydict
for wsgi_key, map_key in new_conn_map_config.items():
value = wsgi_environ.get(wsgi_key)
new_conn_map[map_key] = value
# Log basic details about the incoming connection
new_conn_info = new_conn_pattern.format(**new_conn_map)
logger.info('About to handle WSX conn: %s', new_conn_info)
# Make sure this is a WebSockets request
if 'HTTP_UPGRADE' not in wsgi_environ:
raise HandshakeError('No HTTP_UPGRADE in wsgi_environ')
# Do we have such a path?
if wsgi_environ['PATH_INFO'] != self.config.path:
start_response(http404, {})
_response_dict = error_response[NOT_FOUND]
_serialized = _response_dict[self.config.data_format]
return [_serialized]
# Yes, we do, although we are not sure yet if input is valid,
# e.g. HTTP_UPGRADE may be missing.
else:
_ = super(WebSocketContainer, self).__call__(wsgi_environ, start_response)
except HandshakeError:
logger.warning('Handshake error; e:`%s`', format_exc())
start_response(http400, {})
_response_dict = error_response[BAD_REQUEST]
_serialized = _response_dict[self.config.data_format]
return [_serialized]
except Exception as e:
logger.warning('Could not execute __call__; e:`%s`', e.args[0])
raise
# ################################################################################################################################
def invoke_client(self, cid:'str', pub_client_id:'str', request:'any_', timeout:'int') -> 'any_':
#
# We need to handle a few cases:
#
# 1) We have a specific pub_client_id, in which case we invoke that one client and the response is not a list
# 2) We have no pub_client_id and we have only one client so we invoke that one client and the response is not a list
# 3) We have no pub_client_id and we have multiple clients so we invoke them all and the response is a list
#
#
# Case 1)
#
if pub_client_id:
return self.clients[pub_client_id].invoke_client(cid, request, timeout) # type: ignore
else:
out = {} # type: ignore
for pub_client_id, wsx in self.clients.items(): # type: ignore
response:'any_' = wsx.invoke_client(cid, request, timeout)
out[pub_client_id] = response
if len(out) > 1:
return out # type: ignore
else:
key = list(out)[0] # type: ignore
response = out[key] # type: ignore
return response
# ################################################################################################################################
def invoke_client_by_attrs(self, cid:'str', attrs:'stranydict', request:'any_', timeout:'int') -> 'any_':
# Iterate over all the currently connected WebSockets ..
for client in self.clients.values():
# .. by default, assume that we do not need to invoke this client ..
should_invoke = False
# .. add static typing ..
client = cast_('WebSocket', client)
# .. go through each of the attrs that the client is expected to have ..
for expected_key, expected_value in attrs.items():
# .. check if the client has such a key at all ..
client_value = client.client_attrs.get(expected_key, _missing)
# .. if not, we do not need to continue ..
if client_value is _missing:
continue
# .. otherwise, confirm that the value is the same ..
# .. and iterate further if it is not ..
if client_value != expected_value:
continue
# .. if we are here, it means that this client can be invoked ..
should_invoke = True
# .. do invoke it now in background ..
if should_invoke:
_ = spawn(client.invoke_client, cid, request, wait_for_response=False)
# ################################################################################################################################
def broadcast(self, cid:'str', request:'any_') -> 'None':
for client in self.clients.values():
_ = spawn(client.invoke_client, cid, request, wait_for_response=False)
# ################################################################################################################################
def disconnect_client(self, cid:'str', pub_client_id:'str') -> 'any_':
client = self.clients.get(pub_client_id)
if client:
return client.disconnect_client(cid)
else:
logger.info('No such WSX client `%s` (%s) (disconnect_client)', pub_client_id, cid)
# ################################################################################################################################
def notify_pubsub_message(self, cid:'str', pub_client_id:'str', request:'any_') -> 'any_':
client = self.clients.get(pub_client_id)
if client:
return client.notify_pubsub_message(cid, request)
else:
logger.info('No such WSX client `%s` (%s) (notify_pubsub_message)', pub_client_id, cid)
# ################################################################################################################################
def subscribe_to_topic(self, cid:'str', pub_client_id:'str', request:'any_') -> 'any_':
client = self.clients.get(pub_client_id)
if client:
return client.subscribe_to_topic(cid, request)
else:
logger.info('No such WSX client `%s` (%s) (subscribe_to_topic)', pub_client_id, cid)
# ################################################################################################################################
def get_client_by_pub_id(self, pub_client_id:'str') -> 'any_':
client = self.clients.get(pub_client_id)
if client:
return client
else:
logger.info('No such WSX client `%s` (get_client_by_pub_id)', pub_client_id)
# ################################################################################################################################
# ################################################################################################################################
class WSXWSGIHandler(WebSocketWSGIHandler):
def process_result(self) -> 'None':
for data in self.result or '':
if data:
self.write(data)
else:
self.write(b'')
if self.status and not self.headers_sent:
# In other words, the application returned an empty
# result iterable (and did not use the write callable)
# Trigger the flush of the headers.
self.write(b'')
if self.response_use_chunked:
self._sendall(b'0\r\n\r\n')
# ################################################################################################################################
# ################################################################################################################################
class WSXGEventWebSocketPool(GEventWebSocketPool):
""" Overrides self.clear in order to use __self__ instead of im_self (Python 3).
"""
def clear(self):
for greenlet in list(self):
try:
websocket = greenlet._run.__self__
if websocket:
websocket.close(1001, 'Server is shutting down')
except Exception as e:
logger.info('WSX pool clear exception (info) -> %s', e)
finally:
self.discard(greenlet)
# ################################################################################################################################
# ################################################################################################################################
class WebSocketServer(_Gevent_WSGIServer):
""" A WebSocket server exposing Zato services to client applications.
"""
handler_class = WSXWSGIHandler
def __init__(
self,
config:'WSXConnectorConfig',
auth_func:'callable_',
on_message_callback:'callable_'
) -> 'None':
address_info = urlparse(config.address)
host, port = address_info.netloc.split(':') # type: ignore
config.host = host # type: ignore
config.port = int(port)
config.path = address_info.path # type: ignore
config.needs_tls = address_info.scheme == 'wss'
config.auth_func = auth_func
config.on_message_callback = on_message_callback
config.needs_auth = bool(config.sec_name)
super(WebSocketServer, self).__init__((config.host, config.port), WebSocketContainer(config, handler_cls=WebSocket))
self.pool = WSXGEventWebSocketPool()
# ################################################################################################################################
def stop(self, *args:'any_', **kwargs:'any_') -> 'None':
""" Reimplemented from the parent class to be able to call shutdown prior to its calling self.socket.close.
"""
# self.socket will exist only if we have previously successfully
# bound to an address. Otherwise, there will be no such attribute.
self.pool.clear()
if hasattr(self, 'socket'):
self.socket.shutdown(2) # SHUT_RDWR has value of 2 in 'man 2 shutdown'
super(WebSocketServer, self).stop(*args, **kwargs)
# ################################################################################################################################
# These two methods are reimplemented from gevent.server to make it possible to use SO_REUSEPORT.
@classmethod
def get_listener(self:'any_', address:'any_', backlog:'any_'=None, family:'any_'=None) -> 'any_': # type: ignore
if backlog is None:
backlog = self.backlog
return WebSocketServer._make_socket(address, backlog=backlog, reuse_addr=self.reuse_addr, family=family)
# ################################################################################################################################
@staticmethod
def _make_socket(
address:'str',
backlog:'int'=50,
reuse_addr:'boolnone'=None,
family:'any_'=socket.AF_INET # type: ignore
) -> 'any_':
sock = socket.socket(family=family) # type: ignore
if reuse_addr is not None:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, reuse_addr) # type: ignore
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) # type: ignore
try:
sock.bind(address)
except socket.error as e:
strerror = getattr(e, 'strerror', None)
if strerror is not None:
e.strerror = strerror + ': ' + repr(address) # type: ignore
raise
sock.listen(backlog)
sock.setblocking(0)
return sock
# ################################################################################################################################
# ################################################################################################################################
def invoke_client(self, cid:'str', pub_client_id:'str', request:'any_', timeout:'int') -> 'any_':
return self.application.invoke_client(cid, pub_client_id, request, timeout)
def invoke_client_by_attrs(self, cid:'str', attrs:'stranydict', request:'any_', timeout:'int') -> 'any_':
return self.application.invoke_client_by_attrs(cid, attrs, request, timeout)
def broadcast(self, cid:'str', request:'any_') -> 'any_':
return self.application.broadcast(cid, request)
def disconnect_client(self, cid:'str', pub_client_id:'str') -> 'any_':
return self.application.disconnect_client(cid, pub_client_id)
def notify_pubsub_message(self, cid:'str', pub_client_id:'str', request:'any_') -> 'any_':
return self.application.notify_pubsub_message(cid, pub_client_id, request)
def subscribe_to_topic(self, cid:'str', pub_client_id:'str', request:'any_') -> 'any_':
return self.application.subscribe_to_topic(cid, pub_client_id, request)
def get_client_by_pub_id(self, pub_client_id:'str') -> 'any_':
return self.application.get_client_by_pub_id(pub_client_id)
# ################################################################################################################################
# ################################################################################################################################
class ChannelWebSocket(Connector):
""" A WebSocket channel connector to which external client applications connect.
"""
start_in_greenlet = True
_wsx_server: 'WebSocketServer'
def _start(self) -> 'None':
config = cast_('any_', self.config)
self._wsx_server = WebSocketServer(config, self.auth_func, self.on_message_callback)
self.is_connected = True
self._wsx_server.start()
def _stop(self) -> 'None':
if self.is_connected:
self._wsx_server.stop(3)
self.is_connected = False
def get_log_details(self) -> 'str':
return cast_('str', self.config.address)
def invoke(
self,
cid:'str',
pub_client_id:'str'='',
request:'any_'=None,
timeout:'int'=5,
remove_wrapper:'bool'=True
) -> 'any_':
response = self._wsx_server.invoke_client(cid, pub_client_id, request, timeout)
if remove_wrapper:
if isinstance(response, dict):
if 'response' in response:
return response['response'] # type: ignore
return response # type: ignore
def invoke_by_attrs(self, cid:'str', attrs:'stranydict', request:'any_', timeout:'int'=5) -> 'any_':
return self._wsx_server.invoke_client_by_attrs(cid, attrs, request, timeout)
def broadcast(self, cid:'str', request:'any_') -> 'any_':
return self._wsx_server.broadcast(cid, request)
def disconnect_client(self, cid:'str', pub_client_id:'str', *ignored_args:'any_', **ignored_kwargs:'any_') -> 'any_':
return self._wsx_server.disconnect_client(cid, pub_client_id)
def notify_pubsub_message(self, cid:'str', pub_client_id:'str', request:'any_') -> 'any_':
return self._wsx_server.notify_pubsub_message(cid, pub_client_id, request)
def subscribe_to_topic(self, cid:'str', pub_client_id:'str', request:'any_') -> 'any_':
return self._wsx_server.subscribe_to_topic(cid, pub_client_id, request)
def get_client_by_pub_id(self, pub_client_id:'str') -> 'any_':
return self._wsx_server.get_client_by_pub_id(pub_client_id)
def get_conn_report(self) -> 'stranydict':
return self._wsx_server.environ
# Convenience aliases
invoke_client = invoke
invoke_client_by_attrs = invoke_by_attrs
# ################################################################################################################################
# ################################################################################################################################
| 85,525
|
Python
|
.py
| 1,510
| 45.671523
| 131
| 0.510634
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,095
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/connector/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from copy import deepcopy
from datetime import datetime
from logging import getLogger
from traceback import format_exc
# gevent
from gevent import sleep, spawn
from gevent.lock import RLock
# Zato
from zato.common.util.api import spawn_greenlet
# ################################################################################################################################
if 0:
from typing import Any, Callable, Dict as dict_
from zato.common.model.connector import ConnectorConfig
Any = Any
Callable = Callable
ConnectorConfig = ConnectorConfig
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
class connector_type:
""" All types of ZeroMQ connections that we support.
"""
class channel:
zmq = 'ZeroMQ channel'
class out:
vault_conn = 'Vault connection'
zmq = 'ZeroMQ outgoing'
class duplex:
amqp = 'AMQP'
web_socket = 'WebSocket'
zmq_v01 = 'ZeroMQ MDP v0.1'
class Inactive(Exception):
pass
# ################################################################################################################################
class EventLogger:
def __init__(self, enter_verb, exit_verb, enter_func, exit_func, predicate_func=None):
self.enter_verb = enter_verb
self.exit_verb = exit_verb
self.enter_func = enter_func
self.exit_func = exit_func
self.predicate_func = predicate_func
def __enter__(self):
self.enter_func(self.enter_verb)
def __exit__(self, *args, **kwargs):
spawn_greenlet(self.exit_func, self.exit_verb, self.predicate_func)
# ################################################################################################################################
class Connector:
""" A connector for long running background connections, such as WebSockets or AMQP. Includes means to run channels
and outgoing connections.
"""
# Whether that connector's start method should be called in its own greenlet
start_in_greenlet = False
def __init__(self, name, type, config, on_message_callback=None, auth_func=None, channels=None, outconns=None,
parallel_server=None):
# type: (str, str, ConnectorConfig, Callable, Callable, dict, dict, Callable) -> None
self.name = name
self.type = type
self.config = config
self.config.parallel_server = parallel_server
self.on_message_callback = on_message_callback # Invoked by channels for each message received
self.auth_func = auth_func # Invoked by channels that need to authenticate users
# Service to invoke by channels for each message received
self.service = getattr(config, 'service_name', None)
self.channels = channels or {} # type: dict
self.outconns = outconns or {} # type: dict
self.id = self.config.id
self.is_active = self.config.is_active # type: bool
self.is_inactive = not self.is_active # type: bool
self.is_connected = False
self.keep_connecting = True
self.keep_running = False
self.lock = RLock()
self.id_self = hex(id(self))
# May be provided by subclasses
self.conn = None
# ################################################################################################################################
def get_log_details(self):
""" Can be overridden in subclasses.
"""
return ''
_get_conn_string = get_prev_log_details = get_log_details
# ################################################################################################################################
def _start_loop(self):
""" Establishes a connection to the external resource in a loop that keeps running as long as self.is_connected is False.
The flag must be set to True in a subclass's self._start method.
"""
attempts = 0
log_each = 10
start = datetime.utcnow()
if not self.is_active:
logger.warning('Skipped creation of an inactive connection `%s` (%s)', self.name, self.type)
return
try:
while self.keep_connecting:
while not self.is_connected:
try:
self._start()
except Exception:
# Ok, we are not connected but it's possible that self.keep_connecting is already False,
# for instance, because someone deleted us even before we connected to the remote end.
# In this case, self.is_connected will never be True so we cannot loop indefinitely.
# Instead, we just need to return from the method to stop the connection attempts.
if not self.keep_connecting:
return
logger.warning('Caught %s exception `%s` (id:%s) (`%s` %s)',
self.type, format_exc(), self.id_self, self.name, self.get_log_details())
sleep(2)
# We go here if ._start did not set self.is_conneted to True.
# The if below is needed because we could have connected in between the sleep call and now.
if not self.is_connected:
attempts += 1
if attempts % log_each == 0:
logger.warning('Could not connect to %s (%s) after %s attempts, time spent so far: %s (id:%s)',
self.get_log_details(), self.name, attempts, datetime.utcnow() - start, self.id_self)
# Ok, break from the outermost loop
self.keep_connecting = False
# Now that we are connected we can create all channels and outgoing connections depending on this connector.
if self.channels:
self.create_channels()
if self.outconns:
self.create_outconns()
except KeyboardInterrupt:
self.keep_connecting = False
# ################################################################################################################################
def create_channels(self):
pass
# ################################################################################################################################
def create_outconns(self):
pass
# ################################################################################################################################
def create_channel(self, config):
raise NotImplementedError('May be implemented in subclasses')
def edit_channel(self, config):
raise NotImplementedError('May be implemented in subclasses')
def delete_channel(self, config):
raise NotImplementedError('May be implemented in subclasses')
# ################################################################################################################################
def create_outconn(self, config):
raise NotImplementedError('May be implemented in subclasses')
def edit_outconn(self, config):
raise NotImplementedError('May be implemented in subclasses')
def delete_outconn(self, config):
raise NotImplementedError('May be implemented in subclasses')
# ################################################################################################################################
def _start(self):
raise NotImplementedError('Must be implemented in subclasses')
# ################################################################################################################################
def _send(self):
raise NotImplementedError('Must be implemented in subclasses')
# ################################################################################################################################
def send(self, msg, *args, **kwargs):
with self.lock:
if self.is_inactive:
raise Inactive('Connection `{}` is inactive ({})'.format(self.name, self.type))
return self._send(msg, *args, **kwargs)
# ################################################################################################################################
def _start_stop_logger(self, enter_verb, exit_verb, predicate_func=None):
return EventLogger(enter_verb, exit_verb, self._info_start_stop, self._info_start_stop, predicate_func)
def _info_start_stop(self, verb, predicate=None):
log_details = self.get_prev_log_details() if 'Stop' in verb else self.get_log_details()
# We cannot always log that the connector started or stopped because actions take place asynchronously,
# in background. Thus we may receive a predicate function that will block until it is safe to emit a log entry.
if verb == 'Started' and predicate:
if not predicate():
return
if self.is_active:
logger.info(
'%s %s connector `%s` (id:%s) %s', verb, self.type, self.name, self.id_self,
'({})'.format(log_details if log_details else self.get_log_details()))
# ################################################################################################################################
def _wait_until_connected(self):
""" Sleeps undefinitely until self.is_connected is True. Used as a predicate in self._start_stop_logger.
Returns True if self.is_connected is True at the time of this method's completion. It may be False if we are
told to stop connecting from layers above us.
"""
while not self.is_connected:
sleep(0.1)
if not self.keep_connecting:
return
return True
# ################################################################################################################################
def _spawn_start(self):
spawn(self._start_loop).get()
# ################################################################################################################################
def start(self, needs_log=True):
if self.is_inactive:
logger.info('Skipped creation of an inactive connector `%s` (%s)', self.name, self.type)
return
with self._start_stop_logger('Starting', 'Started', self._wait_until_connected):
self.keep_running = True
self.keep_connecting = True
try:
if self.start_in_greenlet:
spawn_greenlet(self._spawn_start, timeout=1)
else:
self._start_loop()
except Exception:
logger.warning(format_exc())
# ################################################################################################################################
def stop(self):
with self._start_stop_logger('Stopping',' Stopped'):
self._stop()
self.keep_connecting = False # Set to False in case .stop is called before the connection was established
self.keep_running = False
# ################################################################################################################################
def _stop(self):
""" Can be, but does not have to, overwritten by subclasses to customize the behaviour.
"""
# ################################################################################################################################
def get_conn_report(self):
raise NotImplementedError('Needs to be implemented by subclasses')
# ################################################################################################################################
class ConnectorStore:
""" Base container for all connectors.
"""
def __init__(self, type, connector_class, parallel_server=None):
self.type = type
self.connector_class = connector_class
self.parallel_server = parallel_server
self.connectors = {} # type: dict_[str, Connector]
self.lock = RLock()
# ################################################################################################################################
def _create(self, name, config, on_message_callback=None, auth_func=None, channels=None, outconns=None, needs_start=False):
# type: (str, ConnectorConfig, Callable, Callable, dict, dict, bool)
connector = self.connector_class(
name, self.type, config, on_message_callback, auth_func, channels, outconns, self.parallel_server)
self.connectors[name] = connector
if needs_start:
connector.start()
# ################################################################################################################################
def create(self, name, config, on_message_callback=None, auth_func=None, channels=None, outconns=None, needs_start=False):
# type: (str, ConnectorConfig, Callable, Callable, dict, dict, bool)
with self.lock:
self._create(name, config, on_message_callback, auth_func, channels, outconns, needs_start)
# ################################################################################################################################
def _edit(self, old_name, config):
# type: (str, ConnectorConfig)
connector = self._delete(old_name)
self._create(
config.name, config, connector.on_message_callback, connector.auth_func, connector.channels,
connector.outconns, True)
# ################################################################################################################################
def edit(self, old_name, config, *ignored_args):
# type: (str, Any)
with self.lock:
self._edit(old_name, config)
# ################################################################################################################################
def _delete(self, name):
# type: (str)
connector = self.connectors[name]
connector.stop()
del self.connectors[name]
return connector
# ################################################################################################################################
def delete(self, name):
# type: (str)
with self.lock:
self._delete(name)
# ################################################################################################################################
def change_password(self, name, config):
# type: (str, ConnectorConfig)
with self.lock:
new_config = deepcopy(self.connectors[name].config)
new_config.password = config.password
self._edit(new_config.name, new_config)
# ################################################################################################################################
def create_channel(self, name, config):
# type: (str, dict)
with self.lock:
self.connectors[name].create_channel(config)
# ################################################################################################################################
def edit_channel(self, name, config):
# type: (str, dict)
with self.lock:
self.connectors[name].edit_channel(config)
# ################################################################################################################################
def delete_channel(self, name, config):
# type: (str, dict)
with self.lock:
self.connectors[name].delete_channel(config)
# ################################################################################################################################
def create_outconn(self, name, config):
# type: (str, dict)
with self.lock:
self.connectors[name].create_outconn(config)
# ################################################################################################################################
def edit_outconn(self, name, config):
# type: (str, dict)
with self.lock:
self.connectors[name].edit_outconn(config)
# ################################################################################################################################
def delete_outconn(self, name, config):
# type: (str, dict)
with self.lock:
self.connectors[name].delete_outconn(config)
# ################################################################################################################################
def start(self, name=None):
# type: (str)
with self.lock:
for c in self.connectors.values():
# Perhaps we want to start a single connector so we need to filter out the other ones
if name and name != c.name:
continue
c.start()
# ################################################################################################################################
def invoke(self, name, *args, **kwargs):
# type: (str, Any, Any)
return self.connectors[name].invoke(*args, **kwargs)
# ################################################################################################################################
def notify_pubsub_message(self, name, *args, **kwargs):
# type: (str, Any, Any)
return self.connectors[name].notify_pubsub_message(*args, **kwargs)
# ################################################################################################################################
| 18,240
|
Python
|
.py
| 320
| 47.975
| 130
| 0.445955
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,096
|
ipc.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/connector/subprocess_/ipc.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from datetime import datetime, timedelta
from http.client import NOT_ACCEPTABLE, SERVICE_UNAVAILABLE
from logging import getLogger
from traceback import format_exc
# gevent
from gevent import sleep
# requests
from requests import get, post
# Zato
from zato.common.exception import ConnectorClosedException
from zato.common.json_internal import dumps, loads
from zato.common.util.api import get_free_port
from zato.common.util.config import get_url_protocol_from_config_item
from zato.common.util.proc import start_python_process
# ################################################################################################################################
if 0:
from requests import Response
from zato.server.base.parallel import ParallelServer
ParallelServer = ParallelServer
Response = Response
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
address_pattern='{}://127.0.0.1:{}/{}'
not_enabled_pattern = '{connector_name} component is not enabled - install PyMQI, set component_enabled.{check_enabled} '
not_enabled_pattern += 'to True in server.conf and restart all servers before {connector_name} connections can be used.'
# ################################################################################################################################
_closed_status_code = (NOT_ACCEPTABLE, SERVICE_UNAVAILABLE)
# ################################################################################################################################
class SubprocessIPC:
""" Base class for IPC with subprocess-based connectors.
"""
check_enabled = None
connector_name = '<connector-name-empty>'
callback_suffix = '<callback-suffix-empty>'
ipc_config_name = '<ipc-config-name-empty>'
auth_username = '<auth-username-empty>'
pidfile_suffix = 'not-configured'
connector_module = '<connector-module-empty>'
action_definition_create = None
action_outgoing_create = None
action_channel_create = None
action_ping = None
# ################################################################################################################################
def __init__(self, server:'ParallelServer') -> 'None':
self.server = server
self.api_protocol = get_url_protocol_from_config_item(self.server.use_tls)
self.ipc_tcp_port:'int | None' = None
# ################################################################################################################################
def _check_enabled(self):
if not self.server.fs_server_config.component_enabled[self.check_enabled]:
raise Exception(not_enabled_pattern.format(**{
'connector_name': self.connector_name,
'check_enabled': self.check_enabled
}))
# ################################################################################################################################
def get_credentials(self):
""" Returns a username/password pair using which it is possible to authenticate with a connector.
"""
config = self.server.worker_store.basic_auth_get(self.auth_username)['config']
return config.username, config.password
# ################################################################################################################################
def start_connector(self, ipc_tcp_start_port, timeout=5, extra_options_kwargs=None):
""" Starts an HTTP server acting as an connector process. Its port will be greater than ipc_tcp_start_port,
which is the starting point to find a free port from.
"""
# Ensure we are enabled before we continue
if self.check_enabled:
self._check_enabled()
# Turn into a dict for later use
extra_options_kwargs = extra_options_kwargs or {}
self.ipc_tcp_port = get_free_port(ipc_tcp_start_port)
logger.info('Starting {} connector for server `%s` on port `%s`'.format(self.connector_name),
self.server.name, self.ipc_tcp_port)
# Credentials for both servers and connectors
username, password = self.get_credentials()
# Employ IPC to exchange subprocess startup configuration
self.server.connector_config_ipc.set_config(self.ipc_config_name, dumps({
'port': self.ipc_tcp_port,
'username': username,
'password': password,
'server_port': self.server.port,
'server_name': self.server.name,
'server_path': '/zato/internal/callback/{}'.format(self.callback_suffix),
'base_dir': self.server.base_dir,
'needs_pidfile': not self.server.has_fg,
'pidfile_suffix': self.pidfile_suffix,
'logging_conf_path': self.server.logging_conf_path
}))
# Start connector in a sub-process
self._start_connector_process(extra_options_kwargs)
# Wait up to timeout seconds for the connector to start as indicated by its responding to a PING request
now = datetime.utcnow()
warn_after = now + timedelta(seconds=60)
should_warn = False
until = now + timedelta(seconds=timeout)
is_ok = False
address = address_pattern.format(self.api_protocol, self.ipc_tcp_port, 'ping')
auth = self.get_credentials()
while not is_ok or now >= until:
if not should_warn:
if now >= warn_after:
should_warn = True
is_ok = self._ping_connector(address, auth, should_warn)
if is_ok:
break
else:
sleep(2)
now = datetime.utcnow()
if not is_ok:
logger.warning('{} connector (%s) could not be started after %s'.format(self.connector_name), address, timeout)
else:
return is_ok
# ################################################################################################################################
def _start_connector_process(self, extra_options_kwargs):
# type: (dict) -> None
# Base extra options
extra_options={
'deployment_key': self.server.deployment_key,
'shmem_size': self.server.shmem_size
}
# Merge any additional ones
extra_options.update(extra_options_kwargs)
# Start the process now
start_python_process(
'{} connector'.format(self.connector_name),
False,
self.connector_module,
'',
extra_options=extra_options,
stderr_path=self.server.stderr_path
)
# ################################################################################################################################
def _ping_connector(self, address, auth, should_warn):
try:
response = get(address, data='{}', auth=auth)
except Exception:
if should_warn:
logger.info(format_exc())
else:
return response.ok
# ################################################################################################################################
def ping(self, id):
return self.invoke_connector({
'action': self.action_ping.value,
'id': id
})
# ################################################################################################################################
def send_message(self, msg):
# type: (dict) -> None
if self.check_enabled:
self._check_enabled()
for k, v in msg.items():
if isinstance(v, bytes):
msg[k] = v.decode('utf8')
msg['action'] = self.action_send.value
response = self.invoke_connector(msg)
# If we are here, it means that there was no error because otherwise an exception
# would have been raised by invoke_connector.
response = loads(response.text)
return response
# ################################################################################################################################
def invoke_connector(self, msg, raise_on_error=True, address_pattern=address_pattern):
if self.check_enabled:
self._check_enabled()
address = address_pattern.format(self.api_protocol, self.ipc_tcp_port, 'api')
response = post(address, data=dumps(msg), auth=self.get_credentials()) # type: Response
if not response.ok:
if raise_on_error:
if response.status_code in _closed_status_code:
raise ConnectorClosedException(None, response.text)
else:
raise Exception(response.text)
else:
logger.warning('Error message from {} connector `{}`'.format(self.connector_name, response.text))
else:
return response
# ################################################################################################################################
def _create_initial_objects(self, config_dict, action, text_pattern, text_func):
for value in config_dict.values():
config = value['config']
logger.info(text_pattern, text_func(config))
config['action'] = action.value
self.invoke_connector(config, False)
# ################################################################################################################################
def create_initial_definitions(self, config_dict, text_func):
text_pattern = 'Creating {} definition `%s`'.format(self.connector_name)
self._create_initial_objects(config_dict, self.action_definition_create, text_pattern, text_func)
# ################################################################################################################################
def create_initial_outconns(self, config_dict, text_func):
text_pattern = 'Creating {} outconn `%s`'.format(self.connector_name)
self._create_initial_objects(config_dict, self.action_outgoing_create, text_pattern, text_func)
# ################################################################################################################################
def create_initial_channels(self, config_dict, text_func):
text_pattern = 'Creating {} channel `%s`'.format(self.connector_name)
self._create_initial_objects(config_dict, self.action_channel_create, text_pattern, text_func)
# ################################################################################################################################
# ################################################################################################################################
| 11,161
|
Python
|
.py
| 204
| 46.387255
| 130
| 0.491235
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,097
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/connector/subprocess_/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 238
|
Python
|
.py
| 6
| 38.166667
| 82
| 0.729258
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,098
|
base.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/connector/subprocess_/base.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
import os
import signal
import sys
from functools import wraps
from http.client import BAD_REQUEST, FORBIDDEN, INTERNAL_SERVER_ERROR, NOT_ACCEPTABLE, OK, responses, SERVICE_UNAVAILABLE
from logging import Formatter, getLogger, StreamHandler
from logging.handlers import RotatingFileHandler
from os import getppid, path
from threading import RLock
from traceback import format_exc
from wsgiref.simple_server import make_server as wsgiref_make_server
# Bunch
from bunch import bunchify
# Requests
from requests import post as requests_post
# YAML
import yaml
# Python 2/3 compatibility
from builtins import bytes
# Zato
from zato.common.api import MISC
from zato.common.broker_message import code_to_name
from zato.common.json_internal import dumps, loads
from zato.common.util.api import parse_cmd_line_options
from zato.common.util.auth import parse_basic_auth
from zato.common.util.open_ import open_r, open_w
from zato.common.util.posix_ipc_ import ConnectorConfigIPC
# ################################################################################################################################
if 0:
from bunch import Bunch
from logging import Logger
Bunch = Bunch
Logger = Logger
# ################################################################################################################################
def get_logging_config(conn_type, file_name):
return {
'loggers': {
'zato_{}'.format(conn_type): {
'qualname':'zato_{}'.format(conn_type),
'level':'INFO',
'propagate':False,
'handlers':[conn_type]
}
},
'handlers': {
conn_type: {
'formatter':'default',
'backupCount':10,
'mode':'a',
'maxBytes':20000000,
'filename':
'./logs/{}.log'.format(file_name)
},
},
'formatters': {
'default': {
'format': '%(asctime)s - %(levelname)s - %(process)d:%(threadName)s - %(name)s:%(lineno)d - %(message)s'
}
}
}
# ################################################################################################################################
_http_200 = '{} {}'.format(OK, responses[OK])
_http_400 = '{} {}'.format(BAD_REQUEST, responses[BAD_REQUEST])
_http_403 = '{} {}'.format(FORBIDDEN, responses[FORBIDDEN])
_http_406 = '{} {}'.format(NOT_ACCEPTABLE, responses[NOT_ACCEPTABLE])
_http_500 = '{} {}'.format(INTERNAL_SERVER_ERROR, responses[INTERNAL_SERVER_ERROR])
_http_503 = '{} {}'.format(SERVICE_UNAVAILABLE, responses[SERVICE_UNAVAILABLE])
_path_api = '/api'
_path_ping = '/ping'
_paths = (_path_api, _path_ping)
# ################################################################################################################################
# ################################################################################################################################
def ensure_id_exists(container_name):
def ensure_id_exists_impl(func):
@wraps(func)
def inner(self, msg, _not_given=object()):
# type: (BaseConnectionContainer, Bunch)
# Make sure we have a config container of that name
container = getattr(self, container_name, _not_given) # type: dict
if container is _not_given:
raise Exception('No such attribute `{}` in `{}`'.format(container_name, self))
if not msg.id in container:
raise Exception('No such ID `{}` among `{}` ({})'.format(
msg.id, sorted(container.items()), container_name))
return func(self, msg)
return inner
return ensure_id_exists_impl
# ################################################################################################################################
def ensure_prereqs_ready(func):
@wraps(func)
def inner(self, *args, **kwargs):
# type: (BaseConnectionContainer)
if self.has_prereqs:
if not self.check_prereqs_ready():
raise Exception(self.get_prereqs_not_ready_message())
return func(self, *args, **kwargs)
return inner
# ################################################################################################################################
# ################################################################################################################################
class Response:
def __init__(self, status=_http_200, data=b'', content_type='text/json'):
self.status = status
self.data = data
self.content_type = content_type
# ################################################################################################################################
# ################################################################################################################################
class BaseConnectionContainer:
# Subclasses may indicate that they have their specific prerequisites
# that need to be fulfilled before connections can be used,
# e.g. IBM MQ requires installation of PyMQI.
has_prereqs = False
# Set by our subclasses that actually create connections
connection_class = None
# Logging configuration that will be set by subclasses
ipc_name = 'invalid-notset-ipc-name'
conn_type = 'invalid-notset-conn-type'
logging_file_name = 'invalid-notset-logging-file-name'
remove_id_from_def_msg = True
remove_name_from_def_msg = True
def __init__(self):
if len(sys.argv) > 1:
self.options = sys.argv[1]
self.options = parse_cmd_line_options(self.options) # type: dict
self.options['zato_subprocess_mode'] = True
else:
self.options = {
'zato_subprocess_mode': False,
'deployment_key': 'test',
'shmem_size': 100_000,
}
# Subclasses may want to update the options here
self.enrich_options()
self.deployment_key = self.options['deployment_key']
self.shmem_size = int(self.options['shmem_size'])
self.host = '127.0.0.1'
self.port = None
self.username = None
self.password = None
self.server_auth = None
self.basic_auth_expected = None
self.server_port = None
self.server_path = None
self.server_address = 'http://127.0.0.1:{}{}'
self.lock = RLock()
self.logger = None # type: Logger
self.parent_pid = getppid()
self.config_ipc = ConnectorConfigIPC()
if self.options['zato_subprocess_mode']:
self.config_ipc.create(self.deployment_key, self.shmem_size, False)
self.connections = {}
self.outconns = {}
self.channels = {}
self.outconn_id_to_def_id = {} # Maps outgoing connection IDs to their underlying definition IDs
self.channel_id_to_def_id = {} # Ditto but for channels
self.outconn_name_to_id = {} # Maps outgoing connection names to their IDs
self.set_config()
self.post_init()
# ################################################################################################################################
def enrich_options(self):
# type: (None) -> None
pass
# ################################################################################################################################
def post_init(self):
""" Can be implemented by subclasses to further customise the container.
"""
# ################################################################################################################################
def set_config(self):
""" Sets self attributes, as configured in shmem by our parent process.
"""
if self.options['zato_subprocess_mode']:
config = self.config_ipc.get_config('zato-{}'.format(self.ipc_name))
config = loads(config)
else:
config = {
'username': 'zato.username',
'password': 'zato.password',
'port': 35035,
'server_port': 35036,
'server_path': '/zato/base-connection-container',
'base_dir': os.path.expanduser('~/env/qs-1'),
'needs_pidfile': False,
}
config = bunchify(config)
self.username = config.username
self.password = config.password
self.server_auth = (self.username, self.password)
self.base_dir = config.base_dir
self.port = config.port
self.server_port = config.server_port
self.server_path = config.server_path
self.server_address = self.server_address.format(self.server_port, self.server_path)
if self.options['zato_subprocess_mode']:
with open_r(config.logging_conf_path) as f:
logging_config = yaml.load(f, yaml.FullLoader)
if not 'zato_{}'.format(self.conn_type) in logging_config['loggers']:
logging_config = get_logging_config(self.conn_type, self.logging_file_name)
# Configure logging for this connector
self.set_up_logging(logging_config)
else:
log_format = '%(asctime)s - %(levelname)s - %(process)d:%(threadName)s - %(name)s:%(lineno)d - %(message)s'
logging.basicConfig(level=logging.DEBUG, format=log_format)
self.logger = getLogger('zato')
self.logger.warning('QQQ %s', self)
# Store our process's pidfile
if config.needs_pidfile:
self.store_pidfile(config.pidfile_suffix)
# ################################################################################################################################
def check_prereqs_ready(self):
return True
# ################################################################################################################################
def get_prereqs_not_ready_message(self):
return '<default-not-set-prereqs-not-ready-message>'
# ################################################################################################################################
def set_up_logging(self, config):
logger_conf = config['loggers']['zato_{}'.format(self.conn_type)]
handler_conf = config['handlers'][self.conn_type]
del handler_conf['formatter']
handler_conf.pop('class', False)
formatter_conf = config['formatters']['default']['format']
self.logger = getLogger(logger_conf['qualname'])
self.logger.setLevel(getattr(logging, logger_conf['level']))
formatter = Formatter(formatter_conf)
handler_conf['filename'] = path.abspath(path.join(self.base_dir, handler_conf['filename']))
handler = RotatingFileHandler(**handler_conf)
handler.setFormatter(formatter)
stdout_handler = StreamHandler(sys.stdout)
stdout_handler.setFormatter(formatter)
self.logger.addHandler(handler)
self.logger.addHandler(stdout_handler)
# ################################################################################################################################
def store_pidfile(self, suffix):
pidfile = os.path.join(self.base_dir, '{}-{}'.format(MISC.PIDFILE, suffix))
with open_w(pidfile) as f:
f.write(str(os.getpid()))
# ################################################################################################################################
def _post(self, msg, _post=requests_post):
self.logger.info('POST to `%s` (%s), msg:`%s`', self.server_address, self.username, msg)
for k, v in msg.items():
if isinstance(v, bytes):
msg[k] = v.decode('utf8')
try:
_post(self.server_address, data=dumps(msg), auth=self.server_auth)
except Exception as e:
self.logger.warning('Exception in BaseConnectionContainer._post: `%s`', e.args[0])
# ################################################################################################################################
def on_mq_message_received(self, msg_ctx):
return self._post({
'msg': msg_ctx.mq_msg.to_dict(),
'channel_id': msg_ctx.channel_id,
'queue_name': msg_ctx.queue_name,
'service_name': msg_ctx.service_name,
'data_format': msg_ctx.data_format,
})
# ################################################################################################################################
def _create_definition(self, msg, needs_connect=True):
""" A low-level method to create connection definitions. Must be called with self.lock held.
"""
msg.pop('cluster_id', None)
msg['needs_jms'] = msg.pop('use_jms', False)
msg.pop('_encryption_needed', False)
msg.pop('_encrypted_in_odb', False)
id = msg.pop('id') if self.remove_id_from_def_msg else msg['id']
if self.remove_name_from_def_msg:
msg.pop('name')
msg.pop('old_name', None)
# We always create and add a connetion ..
conn = self.connection_class(self.logger, **msg)
self.connections[id] = conn
# .. because even if it fails here, it will be eventually established during one of .send or .receive,
# however, it is possible that our caller already knows that the connection will fail so we need
# to take it into account too.
if needs_connect:
conn.connect()
return conn
# ################################################################################################################################
def _create_outconn(self, msg):
""" A low-level method to create an outgoing connection. Must be called with self.lock held.
"""
# Not all outgoing connections have their parent definitions
def_id = msg.get('def_id')
if def_id:
# Just to be on the safe side, make sure that our connection exists
if not msg.def_id in self.connections:
return Response(_http_503, 'Could not find def_id among {}'.format(self.connections.keys()), 'text/plain')
# Map outconn to its definition
self.outconn_id_to_def_id[msg.id] = msg.def_id
# Create the outconn now
self.outconns[msg.id] = msg
# Maps outconn name to its ID
self.outconn_name_to_id[msg.name] = msg.id
self.logger.info('Added connection `%s`, self.outconns -> `%s`', msg.name, self.outconns)
# Everything OK
return Response()
# ################################################################################################################################
@ensure_id_exists('outconns')
@ensure_prereqs_ready
def _delete_outconn(self, msg, outconn_name=None):
""" A low-level implementation of outconn deletion. Must be called with self.lock held.
"""
outconn_name = outconn_name if outconn_name else self.outconns[msg.id].name
del self.outconns[msg.id]
del self.outconn_id_to_def_id[msg.id]
del self.outconn_name_to_id[outconn_name]
# ################################################################################################################################
def _on_send_exception(self):
msg = 'Exception in _on_OUTGOING_SEND (2) `{}`'.format(format_exc())
self.logger.warning(msg)
return Response(_http_503, msg)
# ################################################################################################################################
def handle_http_request(self, path, msg, ok=b'OK'):
""" Dispatches incoming HTTP requests - either reconfigures the connector or puts messages to queues.
"""
self.logger.info('MSG received %s %s', path, msg)
if path == _path_ping:
return Response()
else:
msg = msg.decode('utf8')
msg = loads(msg)
msg = bunchify(msg)
# Delete what handlers don't need
msg.pop('msg_type', None) # Optional if message was sent by a server that is starting up vs. API call
action = msg.pop('action')
handler = getattr(self, '_on_{}'.format(code_to_name[action]))
return handler(msg)
# ################################################################################################################################
def check_credentials(self, auth):
""" Checks incoming username/password and returns True only if they were valid and as expected.
"""
username, password = parse_basic_auth(auth)
if username != self.username:
self.logger.warning('Invalid username or password')
return
elif password != self.password:
self.logger.warning('Invalid username or password')
return
else:
# All good, we let the request in
return True
# ################################################################################################################################
def on_wsgi_request(self, environ, start_response):
# Default values to use in case of any internal errors
status = _http_406
content_type = 'text/plain'
try:
content_length = environ.get('CONTENT_LENGTH')
if not content_length:
status = _http_400
data = 'Missing content'
content_type = 'text/plain'
else:
data = environ['wsgi.input'].read(int(content_length))
if self.check_credentials(environ.get('HTTP_AUTHORIZATION')):
response = self.handle_http_request(environ['PATH_INFO'], data)
status = response.status
data = response.data
content_type = response.content_type
else:
status = _http_403
data = 'You are not allowed to access this resource'
content_type = 'text/plain'
except Exception:
self.logger.warning(format_exc())
content_type = 'text/plain'
status = _http_400
data = format_exc()
finally:
try:
headers = [('Content-type', content_type)]
if not isinstance(data, bytes):
data = data.encode('utf8')
start_response(status, headers)
return [data]
except Exception:
exc_formatted = format_exc()
self.logger.warning('Exception in finally block `%s`', exc_formatted)
# ################################################################################################################################
@ensure_id_exists('channels')
@ensure_prereqs_ready
def on_channel_delete(self, msg):
""" Stops and deletes an existing channel.
"""
with self.lock:
channel = self.channels[msg.id]
channel.keep_running = False
del self.channels[channel.id]
del self.channel_id_to_def_id[channel.id]
# ################################################################################################################################
@ensure_prereqs_ready
def on_channel_create(self, msg):
""" Creates a new channel listening for messages from a given endpoint.
"""
with self.lock:
conn = self.connections[msg.def_id]
channel = self._create_channel_impl(conn, msg)
channel.start()
self.channels[channel.id] = channel
self.channel_id_to_def_id[channel.id] = msg.def_id
return Response()
# ################################################################################################################################
@ensure_prereqs_ready
def on_outgoing_edit(self, msg):
""" Updates and existing outconn by deleting and creating it again with latest configuration.
"""
with self.lock:
self._delete_outconn(msg, msg.old_name)
return self._create_outconn(msg)
# ################################################################################################################################
@ensure_prereqs_ready
def on_outgoing_create(self, msg):
""" Creates a new outgoing connection using an already existing definition.
"""
with self.lock:
return self._create_outconn(msg)
# ################################################################################################################################
@ensure_prereqs_ready
def on_outgoing_delete(self, msg):
""" Deletes an existing outgoing connection.
"""
with self.lock:
self._delete_outconn(msg)
return Response()
# ################################################################################################################################
@ensure_prereqs_ready
@ensure_id_exists('connections')
def on_definition_ping(self, msg):
""" Pings a remote endpoint.
"""
try:
self.connections[msg.id].ping()
except Exception as e:
return Response(_http_503, str(e.args[0]), 'text/plain')
else:
return Response()
# ################################################################################################################################
@ensure_id_exists('connections')
@ensure_prereqs_ready
def on_definition_change_password(self, msg):
""" Changes the password of an existing definition and reconnects to the remote end.
"""
with self.lock:
try:
conn = self.connections[msg.id]
conn.close()
conn.password = str(msg.password)
conn.connect()
except Exception as e:
self.logger.warning(format_exc())
return Response(_http_503, str(e.args[0]), 'text/plain')
else:
return Response()
# ################################################################################################################################
@ensure_id_exists('connections')
@ensure_prereqs_ready
def on_definition_delete(self, msg):
""" Deletes a definition along with its associated outconns and channels.
"""
with self.lock:
def_id = msg.id
delete_id = None
delete_name = None
# Stop all connections ..
try:
conn = self.connections[def_id]
delete_id = conn.id
delete_name = conn.name
self.connections[def_id].close()
except Exception:
self.logger.warning(format_exc())
finally:
try:
del self.connections[def_id]
except Exception:
self.logger.warning(format_exc())
# .. continue to delete outconns regardless of errors above ..
for outconn_id, outconn_def_id in self.outconn_id_to_def_id.items():
if outconn_def_id == def_id:
del self.outconn_id_to_def_id[outconn_id]
del self.outconns[outconn_id]
# .. delete channels too.
for channel_id, channel_def_id in self.channel_id_to_def_id.items():
if channel_def_id == def_id:
del self.channel_id_to_def_id[channel_id]
del self.channels[channel_id]
if delete_id:
self.logger.info('Deleted `%s` (%s)', delete_name, delete_id)
return Response()
# ################################################################################################################################
@ensure_id_exists('connections')
@ensure_prereqs_ready
def on_definition_edit(self, msg):
""" Updates an existing definition - close the current one, including channels and outconns,
and creates a new one in its place.
"""
with self.lock:
def_id = msg.id
old_conn = self.connections[def_id]
# Edit messages don't carry passwords
msg.password = old_conn.password
# It's possible that we are editing a connection that has no connected yet,
# e.g. if password was invalid, so this needs to be guarded by an if.
if old_conn.is_connected:
self.connections[def_id].close()
# Overwrites the previous connection object
new_conn = self._create_definition(msg, old_conn.is_connected)
# Stop and start all channels using this definition.
for channel_id, _def_id in self.channel_id_to_def_id.items():
if def_id == _def_id:
channel = self.channels[channel_id]
channel.stop()
channel.conn = new_conn
channel.start()
return Response()
# ################################################################################################################################
@ensure_prereqs_ready
def on_definition_create(self, msg):
""" Creates a new definition from the input message.
"""
with self.lock:
try:
self._create_definition(msg)
except Exception as e:
self.logger.warning(format_exc())
return Response(_http_503, str(e.args[0]))
else:
return Response()
# ################################################################################################################################
def _create_channel_impl(self, *args, **kwargs):
raise NotImplementedError('Should be overridden in subclasses')
# ################################################################################################################################
def make_server(self):
return wsgiref_make_server(self.host, self.port, self.on_wsgi_request)
# ################################################################################################################################
def run(self):
server = self.make_server()
try:
server.serve_forever()
except KeyboardInterrupt:
try:
# Attempt to clean up, if possible
server.shutdown()
for conn in self.connections.values():
conn.close()
except Exception:
# Log exception if cleanup was not possible
self.logger.warning('Exception in shutdown procedure `%s`', format_exc())
finally:
# Anything happens, we need to shut down the process
os.kill(os.getpid(), signal.SIGTERM)
# ################################################################################################################################
# ################################################################################################################################
| 27,715
|
Python
|
.py
| 557
| 39.879713
| 130
| 0.477097
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,099
|
ibm_mq.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/connector/subprocess_/impl/ibm_mq.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
Copyright 2006-2008 SpringSource (http://springsource.com), All Rights Reserved
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# stdlib
import logging
from logging import DEBUG
from http.client import BAD_REQUEST, FORBIDDEN, INTERNAL_SERVER_ERROR, NOT_ACCEPTABLE, OK, responses, SERVICE_UNAVAILABLE
from time import sleep
from traceback import format_exc
# Python 2/3 compatibility
from zato.common.py23_ import start_new_thread
# Zato
from zato.common.json_internal import dumps
from zato.server.connection.jms_wmq.jms import WebSphereMQException, NoMessageAvailableException
from zato.server.connection.jms_wmq.jms.connection import WebSphereMQConnection
from zato.server.connection.jms_wmq.jms.core import TextMessage
from zato.server.connection.connector.subprocess_.base import BaseConnectionContainer, Response
# ################################################################################################################################
logger_zato = logging.getLogger('zato')
# ################################################################################################################################
_http_200 = '{} {}'.format(OK, responses[OK])
_http_400 = '{} {}'.format(BAD_REQUEST, responses[BAD_REQUEST])
_http_403 = '{} {}'.format(FORBIDDEN, responses[FORBIDDEN])
_http_406 = '{} {}'.format(NOT_ACCEPTABLE, responses[NOT_ACCEPTABLE])
_http_500 = '{} {}'.format(INTERNAL_SERVER_ERROR, responses[INTERNAL_SERVER_ERROR])
_http_503 = '{} {}'.format(SERVICE_UNAVAILABLE, responses[SERVICE_UNAVAILABLE])
_path_api = '/api'
_path_ping = '/ping'
_paths = (_path_api, _path_ping)
_cc_failed = 2 # pymqi.CMQC.MQCC_FAILED
_rc_conn_broken = 2009 # pymqi.CMQC.MQRC_CONNECTION_BROKEN
_rc_not_authorized = 2035 # pymqi.CMQC.MQRC_NOT_AUTHORIZED
_rc_q_mgr_quiescing = 2161 # pymqi.CMQC.MQRC_Q_MGR_QUIESCING
_rc_host_not_available = 2538 # pymqi.CMQC.MQRC_HOST_NOT_AVAILABLE
# A list of reason codes upon which we will try to reconnect
_rc_reconnect_list = [_rc_conn_broken, _rc_q_mgr_quiescing, _rc_host_not_available]
# ################################################################################################################################
class _MessageCtx:
__slots__ = ('mq_msg', 'channel_id', 'queue_name', 'service_name', 'data_format')
def __init__(self, mq_msg, channel_id, queue_name, service_name, data_format):
self.mq_msg = mq_msg
self.channel_id = channel_id
self.queue_name = queue_name
self.service_name = service_name
self.data_format = data_format
# ################################################################################################################################
class IBMMQChannel:
""" A process to listen for messages from IBM MQ queue managers.
"""
def __init__(self, conn, is_active, channel_id, queue_name, service_name, data_format, on_message_callback, logger):
self.conn = conn
self.is_active = is_active
self.id = channel_id
self.queue_name = queue_name
self.service_name = service_name
self.data_format = data_format
self.on_message_callback = on_message_callback
self.keep_running = True if is_active else False
self.logger = logger
self.has_debug = self.logger.isEnabledFor(DEBUG)
# PyMQI is an optional dependency so let's import it here rather than on module level
import pymqi
self.pymqi = pymqi
# ################################################################################################################################
def _get_destination_info(self):
return 'destination:`%s`, %s' % (self.queue_name, self.conn.get_connection_info())
# ################################################################################################################################
def start(self, sleep_on_error=3, _connection_closing='zato.connection.closing'):
""" Runs a background queue listener in its own thread.
"""
def _invoke_callback(msg_ctx):
try:
self.on_message_callback(msg_ctx)
except Exception:
self.logger.warning('Could not invoke message callback %s', format_exc())
def _impl():
while self.keep_running:
try:
msg = self.conn.receive(self.queue_name, 100)
if self.has_debug:
self.logger.debug('Message received `%s`' % str(msg).decode('utf-8'))
if msg == _connection_closing:
self.logger.info('Received request to quit, closing channel for queue `%s` (%s)',
self.queue_name, self.conn.get_connection_info())
self.keep_running = False
return
if msg:
start_new_thread(_invoke_callback, (
_MessageCtx(msg, self.id, self.queue_name, self.service_name, self.data_format),))
except NoMessageAvailableException:
if self.has_debug:
self.logger.debug('Consumer for queue `%s` did not receive a message. `%s`' % (
self.queue_name, self._get_destination_info(self.queue_name)))
except self.pymqi.MQMIError as e:
if e.reason == self.pymqi.CMQC.MQRC_UNKNOWN_OBJECT_NAME:
self.logger.warning('No such queue `%s` found for %s', self.queue_name, self.conn.get_connection_info())
else:
self.logger.warning('%s in run, reason_code:`%s`, comp_code:`%s`' % (
e.__class__.__name__, e.reason, e.comp))
# In case of any low-level PyMQI error, sleep for some time
# because there is nothing we can do at this time about it.
self.logger.info('Sleeping for %ss', sleep_on_error)
sleep(sleep_on_error)
except WebSphereMQException as e:
sleep(sleep_on_error)
conn_info = self.conn.get_connection_info()
# Try to reconnect if the reason code points to one that is of a transient nature
while self.keep_running and e.completion_code == _cc_failed and e.reason_code in _rc_reconnect_list:
try:
self.logger.warning('Reconnecting channel `%s` due to MQRC `%s` and MQCC `%s`',
conn_info, e.reason_code, e.completion_code)
self.conn.reconnect()
self.conn.ping()
break
except WebSphereMQException as exc:
e = exc
sleep(sleep_on_error)
except Exception:
self.logger.error('Stopping channel `%s` due to `%s`', conn_info, format_exc())
raise
else:
self.logger.error(
'Stopped channel `%s` due to MQRC `%s` and MQCC `%s`',
conn_info, e.reason_code, e.completion_code)
raise
except Exception as e:
self.logger.error('Exception in the main loop %r %s %s', e.args, type(e), format_exc())
sleep(sleep_on_error)
# Start listener in a thread
start_new_thread(_impl, ())
# ################################################################################################################################
def stop(self):
self.keep_running = False
# ################################################################################################################################
# ################################################################################################################################
class IBMMQConnectionContainer(BaseConnectionContainer):
has_prereqs = True
connection_class = WebSphereMQConnection
ipc_name = 'ibm-mq'
conn_type = 'ibm_mq'
logging_file_name = 'ibm-mq'
def __init__(self):
# PyMQI is an optional dependency so let's import it here rather than on module level
try:
import pymqi
except ImportError:
self.pymqi = None
else:
self.pymqi = pymqi
# Call our parent to initialize everything
super().__init__()
# ################################################################################################################################
def check_prereqs_ready(self):
return bool(self.pymqi)
# ################################################################################################################################
def get_prereqs_not_ready_message(self):
return 'PyMQI library could not be imported. Is PyMQI installed? Is ibm_mq set to True in server.conf?'
# ################################################################################################################################
def on_mq_message_received(self, msg_ctx):
return self._post({
'msg': msg_ctx.mq_msg.to_dict(),
'channel_id': msg_ctx.channel_id,
'queue_name': msg_ctx.queue_name,
'service_name': msg_ctx.service_name,
'data_format': msg_ctx.data_format,
})
# ################################################################################################################################
def _on_DEFINITION_WMQ_CREATE(self, msg):
""" Creates a new connection to IBM MQ.
"""
# Require that PyMQI be available
if not self.pymqi:
return Response(_http_503, 'Could not find pymqi module, IBM MQ connections will not start')
# Call our parent which will actually create the definition
return super().on_definition_create(msg)
# ################################################################################################################################
def _on_DEFINITION_WMQ_EDIT(self, msg):
return super().on_definition_edit(msg)
# ################################################################################################################################
def _on_DEFINITION_WMQ_DELETE(self, msg):
return super().on_definition_delete(msg)
# ################################################################################################################################
def _on_DEFINITION_WMQ_CHANGE_PASSWORD(self, msg):
return super().on_definition_change_password(msg)
# ################################################################################################################################
def _on_DEFINITION_WMQ_PING(self, msg):
return super().on_definition_ping(msg)
# ################################################################################################################################
def _on_OUTGOING_WMQ_DELETE(self, msg):
return super().on_outgoing_delete(msg)
# ################################################################################################################################
def _on_OUTGOING_WMQ_CREATE(self, msg):
return super().on_outgoing_create(msg)
# ################################################################################################################################
def _on_OUTGOING_WMQ_EDIT(self, msg):
return super().on_outgoing_edit(msg)
# ################################################################################################################################
def _on_CHANNEL_WMQ_CREATE(self, msg):
return super().on_channel_create(msg)
# ################################################################################################################################
def _on_CHANNEL_WMQ_EDIT(self, msg):
""" Updates an IBM MQ MQ channel by stopping it and starting again with a new configuration.
"""
with self.lock:
channel = self.channels[msg.id]
channel.stop()
channel.queue_name = msg.queue.encode('utf8')
channel.service_name = msg.service_name
channel.data_format = msg.data_format
channel.keep_running = True if msg.is_active else False
channel.start()
return Response()
# ################################################################################################################################
def _on_CHANNEL_WMQ_DELETE(self, msg):
return super().on_channel_delete(msg)
# ################################################################################################################################
def _on_OUTGOING_WMQ_SEND(self, msg, is_reconnect=False):
""" Sends a message to a remote IBM MQ queue - note that the functionality is specific to IBM MQ
and, consequently, it does not make use of any method in the parent class unlike, e.g. _on_CHANNEL_WMQ_DELETE.
"""
with self.lock:
outconn_id = msg.get('id') or self.outconn_name_to_id[msg.outconn_name]
outconn = self.outconns[outconn_id]
if not outconn.is_active:
return Response(_http_406, 'Cannot send messages through an inactive connection', 'text/plain')
else:
def_id = self.outconn_id_to_def_id[outconn_id]
conn = self.connections[def_id]
conn.ping()
try:
delivery_mode = msg.delivery_mode or outconn.delivery_mode
priority = msg.priority or outconn.priority
expiration = msg.expiration or outconn.expiration
jms_correlation_id = msg.get('correlation_id', '')
jms_message_id = msg.get('msg_id', '')
jms_reply_to = msg.get('reply_to', '')
if isinstance(jms_correlation_id, str):
jms_correlation_id = jms_correlation_id.encode('utf8')
if isinstance(jms_message_id, str):
jms_message_id = jms_message_id.encode('utf8')
if isinstance(jms_reply_to, str):
jms_reply_to = jms_reply_to.encode('utf8')
text_msg = TextMessage(
text = msg.data,
jms_delivery_mode = delivery_mode,
jms_priority = priority,
jms_expiration = expiration,
jms_correlation_id = jms_correlation_id,
jms_message_id = jms_message_id,
jms_reply_to = jms_reply_to,
)
conn.send(text_msg, msg.queue_name)
return Response(data=dumps(text_msg.to_dict(False)))
except(self.pymqi.MQMIError, WebSphereMQException) as e:
if isinstance(e, self.pymqi.MQMIError):
cc_code = e.comp
reason_code = e.reason
else:
cc_code = e.completion_code
reason_code = e.reason_code
# Try to reconnect if the connection is broken but only if we have not tried to already
if (not is_reconnect) and cc_code == _cc_failed and reason_code == _rc_conn_broken:
self.logger.warning('Caught MQRC_CONNECTION_BROKEN in send, will try to reconnect connection to %s ',
conn.get_connection_info())
# Sleep for a while before reconnecting
sleep(1)
# Try to reconnect
conn.reconnect()
# Confirm it by pinging the queue manager
conn.ping()
# Resubmit the request
return self._on_OUTGOING_WMQ_SEND(msg, is_reconnect=True)
else:
return self._on_send_exception()
except Exception:
return self._on_send_exception()
# ################################################################################################################################
def _create_channel_impl(self, conn, msg):
return IBMMQChannel(conn, msg.is_active, msg.id, msg.queue.encode('utf8'), msg.service_name, msg.data_format,
self.on_mq_message_received, self.logger)
# ################################################################################################################################
if __name__ == '__main__':
container = IBMMQConnectionContainer()
container.run()
# ################################################################################################################################
| 17,657
|
Python
|
.py
| 300
| 47.43
| 130
| 0.476094
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|