id
int64 0
458k
| file_name
stringlengths 4
119
| file_path
stringlengths 14
227
| content
stringlengths 24
9.96M
| size
int64 24
9.96M
| language
stringclasses 1
value | extension
stringclasses 14
values | total_lines
int64 1
219k
| avg_line_length
float64 2.52
4.63M
| max_line_length
int64 5
9.91M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 7
101
| repo_stars
int64 100
139k
| repo_forks
int64 0
26.4k
| repo_open_issues
int64 0
2.27k
| repo_license
stringclasses 12
values | repo_extraction_date
stringclasses 433
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
10,300
|
xpath.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/message/xpath.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,301
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/generic/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.common.util.sql import get_instance_by_id, get_instance_by_name
from zato.server.service.internal import AdminService
# ################################################################################################################################
class _BaseService(AdminService):
# ################################################################################################################################
def _get_instance_by_id(self, session, model_class, id):
return get_instance_by_id(session, model_class, id)
# ################################################################################################################################
def _get_instance_by_name(self, session, model_class, type_, name):
return get_instance_by_name(session, model_class, type_, name)
# ################################################################################################################################
| 1,119
|
Python
|
.py
| 17
| 62.823529
| 130
| 0.381868
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,302
|
connection.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/generic/connection.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from copy import deepcopy
from datetime import datetime
from traceback import format_exc
from uuid import uuid4
# Zato
from zato.common.api import GENERIC as COMMON_GENERIC, generic_attrs, SEC_DEF_TYPE, SEC_DEF_TYPE_NAME, ZATO_NONE
from zato.common.broker_message import GENERIC
from zato.common.json_internal import dumps, loads
from zato.common.odb.model import GenericConn as ModelGenericConn
from zato.common.odb.query.generic import connection_list
from zato.common.typing_ import cast_
from zato.common.util.api import parse_simple_type
from zato.common.util.config import replace_query_string_items_in_dict
from zato.server.generic.connection import GenericConnection
from zato.server.service import AsIs, Bool, Int
from zato.server.service.internal import AdminService, AdminSIO, ChangePasswordBase, GetListAdminSIO
from zato.server.service.internal.generic import _BaseService
from zato.server.service.meta import DeleteMeta
# Python 2/3 compatibility
from six import add_metaclass
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.common.typing_ import any_, anydict, anylist, strdict
from zato.server.service import Service
anylist = anylist
Bunch = Bunch
Service = Service
# ################################################################################################################################
elem = 'generic_connection'
model = ModelGenericConn
label = 'a generic connection'
broker_message = GENERIC
broker_message_prefix = 'CONNECTION_'
list_func = None
extra_delete_attrs = ['type_']
# ################################################################################################################################
hook = {}
# ################################################################################################################################
config_dict_id_name_outconnn = {
'ftp_source': 'out_ftp',
'sftp_source': 'out_sftp',
}
sec_def_sep = '/'
# ################################################################################################################################
extra_secret_keys = (
#
# Dropbox
#
'oauth2_access_token',
# Salesforce
'consumer_key',
'consumer_secret',
)
# Note that this is a set, unlike extra_secret_keys, because we do not make it part of SIO.
extra_simple_type = {
'is_active',
}
# This key should be left as they are given on input, without trying to parse them into non-string types.
skip_simple_type = {
'api_version',
}
# ################################################################################################################################
# Values of these generic attributes should be converted to ints
int_attrs = ['pool_size', 'ping_interval', 'pings_missed_threshold', 'socket_read_timeout', 'socket_write_timeout']
# ################################################################################################################################
def ensure_ints(data:'strdict') -> 'None':
for name in int_attrs:
value = data.get(name)
try:
value = int(value) if value else value
except ValueError:
pass # Not an integer
else:
data[name] = value
# ################################################################################################################################
class _CreateEditSIO(AdminSIO):
input_required = ('name', 'type_', 'is_active', 'is_internal', 'is_channel', 'is_outconn', Int('pool_size'),
Bool('sec_use_rbac'))
input_optional = ('cluster_id', 'id', Int('cache_expiry'), 'address', Int('port'), Int('timeout'), 'data_format', 'version',
'extra', 'username', 'username_type', 'secret', 'secret_type', 'conn_def_id', 'cache_id', AsIs('tenant_id'),
AsIs('client_id'), AsIs('security_id'), AsIs('sec_tls_ca_cert_id')) + extra_secret_keys + generic_attrs
force_empty_keys = True
# ################################################################################################################################
# ################################################################################################################################
class _CreateEdit(_BaseService):
""" Creates a new or updates an existing generic connection in ODB.
"""
is_create: 'bool'
is_edit: 'bool'
class SimpleIO(_CreateEditSIO):
output_required = ('id', 'name')
default_value = None
response_elem = None
# ################################################################################################################################
def handle(self) -> 'None':
data = deepcopy(self.request.input)
# Build a reusable flag indicating that a secret was sent on input.
secret = data.get('secret', ZATO_NONE)
if (secret is None) or (secret == ZATO_NONE):
has_input_secret = False
input_secret = ''
else:
has_input_secret = True
input_secret = secret
if input_secret:
input_secret = self.crypto.encrypt(input_secret)
input_secret = input_secret.decode('utf8')
raw_request = self.request.raw_request
if isinstance(raw_request, (str, bytes)):
raw_request = loads(raw_request)
for key, value in raw_request.items():
if key not in data:
if key not in skip_simple_type:
value = parse_simple_type(value)
value = self._sio.eval_(key, value, self.server.encrypt)
if key in extra_secret_keys:
if value is None:
value = 'auto.generic.{}'.format(uuid4().hex)
value = self.crypto.encrypt(value)
value = value.decode('utf8')
if key in extra_simple_type:
value = parse_simple_type(value)
data[key] = value
# If the is_active flag does exist but it is None, it should be treated as though it was set to False,
# which is needed because None would be treated as NULL by the SQL database.
if 'is_active' in data:
if data['is_active'] is None:
data['is_active'] = False
# Make sure that specific keys are integers
ensure_ints(data)
# Break down security definitions into components
security_id = data.get('security_id') or ''
if sec_def_sep in security_id:
# Extract the components ..
sec_def_type, security_id = security_id.split(sec_def_sep)
sec_def_type_name = SEC_DEF_TYPE_NAME[sec_def_type]
security_id = int(security_id)
# .. look up the security name by its ID ..
if sec_def_type == SEC_DEF_TYPE.BASIC_AUTH:
func = self.server.worker_store.basic_auth_get_by_id
elif sec_def_type == SEC_DEF_TYPE.OAUTH:
func = self.server.worker_store.oauth_get_by_id
else:
func = None
if func:
sec_def = func(security_id)
security_name = sec_def.name
else:
security_name = 'unset'
# .. potentially overwrites the security type with what we have here ..
data['auth_type'] = sec_def_type
# .. turns the ID into an integer but also remove the sec_type prefix,
# .. e.g. 17 instead of 'oauth/17'.
data['security_id'] = int(security_id)
# .. and store everything else now.
data['sec_def_type_name'] = sec_def_type_name
data['security_name'] = security_name
conn = GenericConnection.from_dict(data)
with closing(self.server.odb.session()) as session:
# If this is the edit action, we need to find our instance in the database
# and we need to make sure that we publish its encrypted secret for other layers ..
if self.is_edit:
model = self._get_instance_by_id(session, ModelGenericConn, data.id)
# Use the secret that was given on input because it may be a new one.
# Otherwise, if no secret is given on input, it means that we are not changing it
# so we can reuse the same secret that the model already uses.
if has_input_secret:
secret = input_secret
else:
secret = model.secret
secret = self.server.decrypt(secret)
conn.secret = secret
data.secret = secret # We need to set it here because we also publish this message to other servers
# .. but if it is the create action, we need to create a new instance
# .. and ensure that its secret is auto-generated.
else:
model = self._new_zato_instance_with_cluster(ModelGenericConn)
secret = self.crypto.generate_secret().decode('utf8')
secret = self.server.encrypt('auto.generated.{}'.format(secret))
secret = cast_('str', secret)
conn.secret = secret
conn_dict = conn.to_sql_dict()
# This will be needed in case this is a rename
old_name = model.name
for key, value in sorted(conn_dict.items()):
# If we are merely creating this connection, do not set the field unless a secret was sent on input.
# If it is an edit, then we will have the secret either from the input or from the model,
# which is why we do to enter this branch.
if self.is_create:
if key == 'secret' and not (has_input_secret):
continue
setattr(model, key, value)
hook_func = hook.get(data.type_)
if hook_func:
hook_func(self, data, model, old_name)
session.add(model)
session.commit()
instance = self._get_instance_by_name(session, ModelGenericConn, data.type_, data.name)
self.response.payload.id = instance.id
self.response.payload.name = instance.name
data['old_name'] = old_name
data['action'] = GENERIC.CONNECTION_EDIT.value if self.is_edit else GENERIC.CONNECTION_CREATE.value
data['id'] = instance.id
self.broker_client.publish(data)
# ################################################################################################################################
# ################################################################################################################################
class Create(_CreateEdit):
""" Creates a new generic connection.
"""
is_create = True
is_edit = False
# ################################################################################################################################
# ################################################################################################################################
class Edit(_CreateEdit):
""" Updates an existing generic connection.
"""
is_create = False
is_edit = True
# ################################################################################################################################
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
""" Deletes a generic connection.
"""
# ################################################################################################################################
# ################################################################################################################################
class GetList(AdminService):
""" Returns a list of generic connections by their type; includes pagination.
"""
_filter_by = ModelGenericConn.name,
class SimpleIO(GetListAdminSIO):
input_required = ('cluster_id',)
input_optional = GetListAdminSIO.input_optional + ('type_',)
# ################################################################################################################################
def get_data(self, session:'any_') -> 'any_':
cluster_id = self.request.input.get('cluster_id') or self.server.cluster_id
data = self._search(connection_list, session, cluster_id, self.request.input.type_, False)
return data
# ################################################################################################################################
def _add_custom_conn_dict_fields(self, conn_dict:'anydict') -> 'None':
pass
# ################################################################################################################################
def _enrich_conn_dict(self, conn_dict:'anydict') -> 'None':
# Local aliases
cluster_id = self.request.input.get('cluster_id') or self.server.cluster_id
# New items that will be potentially added to conn_dict
to_add = {}
# Mask out all the relevant attributes
replace_query_string_items_in_dict(self.server, conn_dict)
# Process all the items found in the database.
for key, value in conn_dict.items():
if value:
if key.endswith('_service_id'):
prefix = key.split('_service_id')[0]
service_attr = prefix + '_service_name'
try:
service_name = self.invoke('zato.service.get-by-id', {
'cluster_id': cluster_id,
'id': value,
})['zato_service_get_by_name_response']['name']
except Exception:
pass
else:
conn_dict[service_attr] = service_name
else:
for id_name_base, out_name in config_dict_id_name_outconnn.items():
item_id = '{}_id'.format(id_name_base)
if key == item_id:
config_dict = self.server.config.get_config_by_item_id(out_name, value)
item_name = '{}_name'.format(id_name_base)
to_add[item_name] = config_dict['name']
# .. add custom fields that do not exist in the database ..
self._add_custom_conn_dict_fields(conn_dict)
# .. and hand the final result back to our caller.
if to_add:
conn_dict.update(to_add)
# ################################################################################################################################
# ################################################################################################################################
def handle(self) -> 'None':
out = {'_meta':{}, 'response':[]}
_meta = cast_('anydict', out['_meta'])
with closing(self.odb.session()) as session:
search_result = self.get_data(session)
_meta.update(search_result.to_dict())
for item in search_result:
conn = GenericConnection.from_model(item)
conn_dict = conn.to_dict()
self._enrich_conn_dict(conn_dict)
cast_('anylist', out['response']).append(conn_dict)
# Results are already included in the list of out['response'] elements
_ = _meta.pop('result', None)
self.response.payload = dumps(out)
# ################################################################################################################################
# ################################################################################################################################
class ChangePassword(ChangePasswordBase):
""" Changes the secret (password) of a generic connection.
"""
password_required = False
class SimpleIO(ChangePasswordBase.SimpleIO):
response_elem = None
# ################################################################################################################################
def _run_pre_handle_tasks_CLOUD_MICROSOFT_365(self, session:'any_', instance:'any_') -> 'None':
# Disabled, no longer in use
return
# stdlib
from json import dumps, loads
from urllib.parse import parse_qs, urlsplit
# office-365
from O365 import Account
auth_url = self.request.input.password1
auth_url = self.server.decrypt(auth_url)
query = urlsplit(auth_url).query
parsed = parse_qs(query)
state = parsed['state']
state = state[0]
opaque1 = instance.opaque1
opaque1 = loads(opaque1)
client_id = opaque1['client_id']
secret_value = opaque1.get('secret_value') or opaque1.get('secret') or opaque1['password']
credentials = (client_id, secret_value)
account = Account(credentials)
_ = account.con.request_token(authorization_url=auth_url, state=state)
opaque1['token'] = account.con.token_backend.token
opaque1 = dumps(opaque1)
instance.opaque1 = opaque1
session.add(instance)
session.commit()
# ################################################################################################################################
def _run_pre_handle_tasks(self, session:'any_', instance:'any_') -> 'None':
conn_type = self.request.input.get('type_')
if conn_type == COMMON_GENERIC.CONNECTION.TYPE.CLOUD_MICROSOFT_365:
self._run_pre_handle_tasks_CLOUD_MICROSOFT_365(session, instance)
# ################################################################################################################################
def handle(self) -> 'None':
def _auth(instance:'any_', secret:'str | bytes') -> 'None':
if secret:
# Always encrypt the secret given on input
instance.secret = self.server.encrypt(secret)
if self.request.input.id:
instance_id = self.request.input.id
else:
with closing(self.odb.session()) as session:
instance_id = session.query(ModelGenericConn).\
filter(ModelGenericConn.name==self.request.input.name).\
filter(ModelGenericConn.type_==self.request.input.type_).\
one().id
with closing(self.odb.session()) as session:
query = session.query(ModelGenericConn)
query = query.filter(ModelGenericConn.id==instance_id)
instance = query.one()
# This steps runs optional post-handle tasks that some types of connections may require.
self._run_pre_handle_tasks(session, instance)
# This step updates the secret.
self._handle(ModelGenericConn, _auth, GENERIC.CONNECTION_CHANGE_PASSWORD.value, instance_id=instance_id,
publish_instance_attrs=['type_'])
# ################################################################################################################################
# ################################################################################################################################
class Ping(_BaseService):
""" Pings a generic connection.
"""
class SimpleIO(AdminSIO):
input_required = 'id'
output_required = 'info'
output_optional = 'is_success'
response_elem = None
def handle(self) -> 'None':
with closing(self.odb.session()) as session:
# To ensure that the input ID is correct
instance = self._get_instance_by_id(session, ModelGenericConn, self.request.input.id)
# Different code paths will be taken depending on what kind of a generic connection this is
custom_ping_func_dict = {
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_SFTP: self.server.connector_sftp.ping_sftp
}
# Most connections use a generic ping function, unless overridden on a case-by-case basis, like with SFTP
ping_func = custom_ping_func_dict.get(instance.type_, self.server.worker_store.ping_generic_connection)
start_time = datetime.utcnow()
try:
_ = ping_func(self.request.input.id)
except Exception:
exc = format_exc()
self.logger.warning(exc)
self.response.payload.info = exc
self.response.payload.is_success = False
else:
response_time = datetime.utcnow() - start_time
info = 'Connection pinged; response time: {}'.format(response_time)
self.logger.info(info)
self.response.payload.info = info
self.response.payload.is_success = True
# ################################################################################################################################
# ################################################################################################################################
class Invoke(AdminService):
""" Invokes a generic connection by its name.
"""
class SimpleIO:
input_required = 'conn_type', 'conn_name'
input_optional = 'request_data'
output_optional = 'response_data'
response_elem = None
def handle(self) -> 'None':
# Local aliases
response = None
# Maps all known connection types to their implementation ..
conn_type_to_container = {
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_HL7_FHIR: self.out.hl7.fhir,
COMMON_GENERIC.CONNECTION.TYPE.OUTCONN_HL7_MLLP: self.out.hl7.mllp
}
# .. get the actual implementation ..
container = conn_type_to_container[self.request.input.conn_type]
# .. and invoke it.
with container[self.request.input.conn_name].conn.client() as client:
try:
response = client.invoke(self.request.input.request_data)
except Exception:
exc = format_exc()
response = exc
self.logger.warning(exc)
finally:
self.response.payload.response_data = response
# ################################################################################################################################
# ################################################################################################################################
| 22,986
|
Python
|
.py
| 429
| 43.825175
| 130
| 0.485789
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,303
|
rest_wrapper.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/generic/rest_wrapper.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from json import dumps
# Zato
from zato.common.api import CONNECTION, URL_TYPE
from zato.common.broker_message import OUTGOING
from zato.common.odb.model import HTTPSOAP
from zato.common.util.sql import parse_instance_opaque_attr, set_instance_opaque_attrs
from zato.server.service import Service
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import stranydict
# ################################################################################################################################
# ################################################################################################################################
def _replace_suffix_from_dict_name(data:'stranydict', wrapper_type:'str') -> 'str':
_prefix = wrapper_type + '.'
_name = data['name'] # type: str
_name = _name.replace(_prefix, '', 1)
return _name
# ################################################################################################################################
# ################################################################################################################################
class GetList(Service):
name = 'zato.generic.rest-wrapper.get-list'
def handle(self) -> 'None':
# Reusable
wrapper_type = ''
# Our response to produce
out = []
# Our service to invoke
service_name = 'zato.http-soap.get-list'
# Filter by this wrapper type from input
if isinstance(self.request.raw_request, dict):
wrapper_type = self.request.raw_request.get('wrapper_type', '') # type: str
# This response has all the REST connections possible ..
response = self.invoke(service_name, {
'include_wrapper': True,
'cluster_id': self.server.cluster_id,
'connection': CONNECTION.OUTGOING,
'transport': URL_TYPE.PLAIN_HTTP,
'paginate': False,
}, skip_response_elem=True)
# .. iterate through each of them ..
for item in response:
# .. ignore items that are not wrappers ..
if not item.get('is_wrapper'):
continue
# .. reusable ..
item_wrapper_type = item.get('wrapper_type') or ''
# .. ignore wrappers of a type other than what was requested ..
if wrapper_type:
if wrapper_type != item_wrapper_type:
continue
# .. enmasse will not send any wrapper type which is why we use the name from the item here ..
suffix_wrapper_type = wrapper_type or item_wrapper_type
# .. replace the name prefix ..
item['name'] = _replace_suffix_from_dict_name(item, suffix_wrapper_type)
# .. and append the item to the result ..
out.append(item)
self.response.payload = dumps(out)
# ################################################################################################################################
# ################################################################################################################################
class _WrapperBase(Service):
_wrapper_impl_suffix = None
_uses_name = False
# SimpleIO
output = '-id', '-name', '-info', '-is_success'
# ################################################################################################################################
def _handle(self, initial:'stranydict') -> 'None':
# Our service to invoke
service_name = 'zato.http-soap.' + self._wrapper_impl_suffix # type: ignore
# Base request to create a new wrapper ..
request = {
'is_wrapper': True,
'cluster_id': self.server.cluster_id,
'connection': CONNECTION.OUTGOING,
'transport': URL_TYPE.PLAIN_HTTP,
'url_path': r'{_zato_path}'
}
# .. extend it with our own extra input ..
request.update(initial)
# .. prepend a prefix to the name given that this is a wrapper ..
# .. but note that the Delete action does not use a name so this block is optional ..
if self._uses_name:
_orig_name = request['name']
_name = _orig_name
_wrapper_type = request.get('wrapper_type')
if _wrapper_type:
name = f'{_wrapper_type }.{_name}'
else:
name = _name
request['name'] = name
# .. and send it to the service.
response = self.invoke(service_name, request, skip_response_elem=True)
# This is used by Create and Edit actions
if self._uses_name:
self.response.payload.name = _orig_name # type: ignore
# These are optional as well
self.response.payload.id = response.get('id')
self.response.payload.info = response.get('info')
self.response.payload.is_success = response.get('is_success')
# ################################################################################################################################
def handle(self):
self._handle(self.request.raw_request)
# ################################################################################################################################
# ################################################################################################################################
class Create(_WrapperBase):
name = 'zato.generic.rest-wrapper.create'
response_elem = None
_wrapper_impl_suffix = 'create'
_uses_name = True
# ################################################################################################################################
# ################################################################################################################################
class Edit(_WrapperBase):
name = 'zato.generic.rest-wrapper.edit'
response_elem = None
_wrapper_impl_suffix = 'edit'
_uses_name = True
# ################################################################################################################################
# ################################################################################################################################
class Delete(_WrapperBase):
name = 'zato.generic.rest-wrapper.delete'
_wrapper_impl_suffix = 'delete'
_uses_name = False
# ################################################################################################################################
# ################################################################################################################################
class ChangePassword(_WrapperBase):
name = 'zato.generic.rest-wrapper.change-password'
_wrapper_impl_suffix = 'edit'
_uses_name = False
def handle(self) -> 'None':
# Reusable
request = self.request.raw_request
# This must always exist
id = request['id']
# This is optional
password = request.get('password') or request.get('password1') or ''
password = self.server.encrypt(password)
with closing(self.odb.session()) as session:
item = session.query(HTTPSOAP).filter_by(id=id).one()
opaque = parse_instance_opaque_attr(item)
opaque['password'] = password
set_instance_opaque_attrs(item, opaque)
session.add(item)
session.commit()
# Notify all the members of the cluster of the change
self.broker_client.publish({
'action': OUTGOING.REST_WRAPPER_CHANGE_PASSWORD.value,
'id': id,
'password': password,
})
# ################################################################################################################################
# ################################################################################################################################
class Ping(_WrapperBase):
name = 'zato.generic.rest-wrapper.ping'
_wrapper_impl_suffix = 'ping'
_uses_name = False
# ################################################################################################################################
# ################################################################################################################################
| 8,773
|
Python
|
.py
| 164
| 45.853659
| 130
| 0.407932
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,304
|
aws.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/aws.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from traceback import format_exc
from uuid import uuid4
# Zato
from zato.common.api import SEC_DEF_TYPE
from zato.common.broker_message import SECURITY
from zato.common.odb.model import Cluster, AWSSecurity
from zato.common.odb.query import aws_security_list
from zato.server.service.internal import AdminService, AdminSIO, ChangePasswordBase, GetListAdminSIO
class GetList(AdminService):
""" Returns a list of AWS definitions available.
"""
_filter_by = AWSSecurity.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_security_aws_get_list_request'
response_elem = 'zato_security_aws_get_list_response'
input_required = ('cluster_id',)
output_required = ('id', 'name', 'is_active', 'username')
def get_data(self, session):
return self._search(aws_security_list, session, self.request.input.cluster_id, False)
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
class Create(AdminService):
""" Creates a new AWS definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_aws_create_request'
response_elem = 'zato_security_aws_create_response'
input_required = ('cluster_id', 'name', 'is_active', 'username')
output_required = ('id', 'name')
def handle(self):
input = self.request.input
input.password = uuid4().hex
with closing(self.odb.session()) as session:
try:
cluster = session.query(Cluster).filter_by(id=input.cluster_id).first()
# Let's see if we already have a definition of that name before committing
# any stuff into the database.
existing_one = session.query(AWSSecurity).\
filter(Cluster.id==input.cluster_id).\
filter(AWSSecurity.name==input.name).first()
if existing_one:
raise Exception('AWS definition [{0}] already exists on this cluster'.format(input.name))
auth = AWSSecurity(None, input.name, input.is_active, input.username, input.password, cluster)
session.add(auth)
session.commit()
except Exception:
self.logger.error('Could not create an AWS definition, e:`{}`', format_exc())
session.rollback()
raise
else:
input.id = auth.id
input.action = SECURITY.AWS_CREATE.value
input.sec_type = SEC_DEF_TYPE.AWS
self.broker_client.publish(input)
self.response.payload.id = auth.id
self.response.payload.name = auth.name
class Edit(AdminService):
""" Updates an AWS definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_aws_edit_request'
response_elem = 'zato_security_aws_edit_response'
input_required = ('id', 'cluster_id', 'name', 'is_active', 'username')
output_required = ('id', 'name')
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
try:
existing_one = session.query(AWSSecurity).\
filter(Cluster.id==input.cluster_id).\
filter(AWSSecurity.name==input.name).\
filter(AWSSecurity.id!=input.id).\
first()
if existing_one:
raise Exception('AWS definition [{0}] already exists on this cluster'.format(input.name))
definition = session.query(AWSSecurity).filter_by(id=input.id).one()
old_name = definition.name
definition.name = input.name
definition.is_active = input.is_active
definition.username = input.username
session.add(definition)
session.commit()
except Exception:
self.logger.error('Could not update the AWS definition, e:`{}`', format_exc())
session.rollback()
raise
else:
input.action = SECURITY.AWS_EDIT.value
input.old_name = old_name
input.sec_type = SEC_DEF_TYPE.AWS
self.broker_client.publish(input)
self.response.payload.id = definition.id
self.response.payload.name = definition.name
class ChangePassword(ChangePasswordBase):
""" Changes the password of an AWS definition.
"""
password_required = False
class SimpleIO(ChangePasswordBase.SimpleIO):
request_elem = 'zato_security_aws_change_password_request'
response_elem = 'zato_security_aws_change_password_response'
def handle(self):
def _auth(instance, password):
instance.password = password
return self._handle(AWSSecurity, _auth, SECURITY.AWS_CHANGE_PASSWORD.value)
class Delete(AdminService):
""" Deletes an AWS definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_aws_delete_request'
response_elem = 'zato_security_aws_delete_response'
input_required = ('id',)
def handle(self):
with closing(self.odb.session()) as session:
try:
auth = session.query(AWSSecurity).\
filter(AWSSecurity.id==self.request.input.id).\
one()
session.delete(auth)
session.commit()
except Exception:
self.logger.error('Could not delete the AWS definition, e:`{}`', format_exc())
session.rollback()
raise
else:
self.request.input.action = SECURITY.AWS_DELETE.value
self.request.input.name = auth.name
self.broker_client.publish(self.request.input)
| 6,226
|
Python
|
.py
| 136
| 34.691176
| 110
| 0.611001
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,305
|
ntlm.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/ntlm.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from traceback import format_exc
from uuid import uuid4
# Zato
from zato.common.api import SEC_DEF_TYPE
from zato.common.broker_message import SECURITY
from zato.common.odb.model import Cluster, NTLM
from zato.common.odb.query import ntlm_list
from zato.server.service.internal import AdminService, AdminSIO, ChangePasswordBase, GetListAdminSIO
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_
# ################################################################################################################################
# ################################################################################################################################
class GetList(AdminService):
""" Returns a list of NTLM definitions available.
"""
_filter_by = NTLM.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_security_ntlm_get_list_request'
response_elem = 'zato_security_ntlm_get_list_response'
input_required = ('cluster_id',)
output_required = ('id', 'name', 'is_active', 'username')
def get_data(self, session):
return self._search(ntlm_list, session, self.request.input.cluster_id, False)
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
# ################################################################################################################################
# ################################################################################################################################
class Create(AdminService):
""" Creates a new NTLM definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_ntlm_create_request'
response_elem = 'zato_security_ntlm_create_response'
input_required = ('cluster_id', 'name', 'is_active', 'username')
output_required = ('id', 'name')
def handle(self):
input = self.request.input
input.password = uuid4().hex
with closing(self.odb.session()) as session:
try:
cluster = session.query(Cluster).filter_by(id=input.cluster_id).first()
# Let's see if we already have a definition of that name before committing
# any stuff into the database.
existing_one = session.query(NTLM).\
filter(Cluster.id==input.cluster_id).\
filter(NTLM.name==input.name).first()
if existing_one:
raise Exception('NTLM definition [{0}] already exists on this cluster'.format(input.name))
auth = NTLM(None, input.name, input.is_active, input.username, input.password, cluster)
session.add(auth)
session.commit()
except Exception:
msg = 'Could not create an NTLM definition, e:`{}`'.format(format_exc())
self.logger.error(msg)
session.rollback()
raise
else:
input.id = auth.id
input.action = SECURITY.NTLM_CREATE.value
input.sec_type = SEC_DEF_TYPE.NTLM
self.broker_client.publish(input)
self.response.payload.id = auth.id
self.response.payload.name = auth.name
# Make sure the object has been created
_:'any_' = self.server.worker_store.wait_for_ntlm(input.name)
# ################################################################################################################################
# ################################################################################################################################
class Edit(AdminService):
""" Updates an NTLM definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_ntlm_edit_request'
response_elem = 'zato_security_ntlm_edit_response'
input_required = ('id', 'cluster_id', 'name', 'is_active', 'username')
output_required = ('id', 'name')
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
try:
existing_one = session.query(NTLM).\
filter(Cluster.id==input.cluster_id).\
filter(NTLM.name==input.name).\
filter(NTLM.id!=input.id).\
first()
if existing_one:
raise Exception('NTLM definition [{0}] already exists on this cluster'.format(input.name))
definition = session.query(NTLM).filter_by(id=input.id).one()
old_name = definition.name
definition.name = input.name
definition.is_active = input.is_active
definition.username = input.username
session.add(definition)
session.commit()
except Exception:
msg = 'Could not update the NTLM definition, e:`{}`'.format(format_exc())
self.logger.error(msg)
session.rollback()
raise
else:
input.action = SECURITY.NTLM_EDIT.value
input.old_name = old_name
input.sec_type = SEC_DEF_TYPE.NTLM
self.broker_client.publish(input)
self.response.payload.id = definition.id
self.response.payload.name = definition.name
# ################################################################################################################################
# ################################################################################################################################
class ChangePassword(ChangePasswordBase):
""" Changes the password of an NTLM definition.
"""
password_required = False
class SimpleIO(ChangePasswordBase.SimpleIO):
request_elem = 'zato_security_ntlm_change_password_request'
response_elem = 'zato_security_ntlm_change_password_response'
def handle(self):
def _auth(instance, password):
instance.password = password
return self._handle(NTLM, _auth, SECURITY.NTLM_CHANGE_PASSWORD.value)
# ################################################################################################################################
# ################################################################################################################################
class Delete(AdminService):
""" Deletes an NTLM definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_ntlm_delete_request'
response_elem = 'zato_security_ntlm_delete_response'
input_required = ('id',)
def handle(self):
with closing(self.odb.session()) as session:
try:
auth = session.query(NTLM).\
filter(NTLM.id==self.request.input.id).\
one()
session.delete(auth)
session.commit()
except Exception:
msg = 'Could not delete the NTLM definition, e:`{}`'.format(format_exc())
self.logger.error(msg)
session.rollback()
raise
else:
self.request.input.action = SECURITY.NTLM_DELETE.value
self.request.input.name = auth.name
self.broker_client.publish(self.request.input)
# ################################################################################################################################
# ################################################################################################################################
| 8,175
|
Python
|
.py
| 156
| 42.121795
| 130
| 0.464451
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,306
|
oauth.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/oauth.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from traceback import format_exc
from uuid import uuid4
# Zato
from zato.common.api import SEC_DEF_TYPE
from zato.common.broker_message import SECURITY
from zato.common.odb.model import Cluster, OAuth
from zato.common.odb.query import oauth_list
from zato.common.util.sql import elems_with_opaque, set_instance_opaque_attrs
from zato.server.service.internal import AdminService, AdminSIO, ChangePasswordBase, GetListAdminSIO
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, anylist
# ################################################################################################################################
# ################################################################################################################################
class GetList(AdminService):
""" Returns a list of Bearer token definitions available.
"""
_filter_by = OAuth.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_security_oauth_get_list_request'
response_elem = 'zato_security_oauth_get_list_response'
input_required = 'cluster_id'
output_required = 'id', 'name', 'is_active', 'username', 'client_id_field', 'client_secret_field', 'grant_type'
output_optional = 'auth_server_url', 'scopes', 'extra_fields', 'data_format'
def get_data(self, session:'any_') -> 'anylist':
return elems_with_opaque(self._search(oauth_list, session, self.request.input.cluster_id, False)) # type: ignore
def handle(self):
with closing(self.odb.session()) as session:
data = self.get_data(session)
self.response.payload[:] = data
# ################################################################################################################################
# ################################################################################################################################
class Create(AdminService):
""" Creates a new Bearer token definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_oauth_create_request'
response_elem = 'zato_security_oauth_create_response'
input_required = 'cluster_id', 'name', 'is_active', 'username', 'client_id_field', \
'client_secret_field', 'grant_type', 'data_format'
input_optional = 'auth_server_url', 'scopes', 'extra_fields'
output_required = 'id', 'name'
def handle(self):
input = self.request.input
input.password = uuid4().hex
with closing(self.odb.session()) as session:
try:
cluster = session.query(Cluster).filter_by(id=input.cluster_id).first()
# Let's see if we already have a definition of that name before committing
# any stuff into the database.
existing_one = session.query(OAuth).\
filter(Cluster.id==input.cluster_id).\
filter(OAuth.name==input.name).first()
if existing_one:
raise Exception('Bearer token definition `{}` already exists in this cluster'.format(input.name))
definition = OAuth()
definition.name = input.name
definition.is_active = input.is_active
definition.username = input.username
definition.proto_version = 'not-used' # type: ignore
definition.sig_method = 'not-used' # type: ignore
definition.max_nonce_log = 0 # type: ignore
definition.cluster = cluster # type: ignore
set_instance_opaque_attrs(definition, input)
session.add(definition)
session.commit()
except Exception:
msg = 'Bearer token definition could not be created, e:`%s`'
self.logger.error(msg, format_exc())
session.rollback()
raise
else:
input.id = definition.id
input.action = SECURITY.OAUTH_CREATE.value
input.sec_type = SEC_DEF_TYPE.OAUTH
self.broker_client.publish(input)
self.response.payload.id = definition.id
self.response.payload.name = definition.name
# Make sure the object has been created
_:'any_' = self.server.worker_store.wait_for_oauth(input.name)
# ################################################################################################################################
# ################################################################################################################################
class Edit(AdminService):
""" Updates an Bearer token definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_oauth_edit_request'
response_elem = 'zato_security_oauth_edit_response'
input_required = 'id', 'cluster_id', 'name', 'is_active', 'username', 'client_id_field', \
'client_secret_field', 'grant_type', 'data_format'
input_optional = 'auth_server_url', 'scopes', 'extra_fields'
output_required = 'id', 'name'
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
try:
existing_one = session.query(OAuth).\
filter(Cluster.id==input.cluster_id).\
filter(OAuth.name==input.name).\
filter(OAuth.id!=input.id).\
first()
if existing_one:
raise Exception('Bearer token definition `{}` already exists in this cluster'.format(input.name))
definition = session.query(OAuth).filter_by(id=input.id).one()
old_name = definition.name
definition.name = input.name
definition.is_active = input.is_active
definition.username = input.username
set_instance_opaque_attrs(definition, input)
session.add(definition)
session.commit()
except Exception:
msg = 'Bearer token definition could not be updated, e:`%s`'
self.logger.error(msg, format_exc())
session.rollback()
raise
else:
input.action = SECURITY.OAUTH_EDIT.value
input.old_name = old_name
input.sec_type = SEC_DEF_TYPE.OAUTH
self.broker_client.publish(input)
self.response.payload.id = definition.id
self.response.payload.name = definition.name
# ################################################################################################################################
# ################################################################################################################################
class ChangePassword(ChangePasswordBase):
""" Changes the password of an Bearer token definition.
"""
password_required = False
class SimpleIO(ChangePasswordBase.SimpleIO):
request_elem = 'zato_security_oauth_change_password_request'
response_elem = 'zato_security_oauth_change_password_response'
def handle(self):
def _auth(instance:'any_', password:'str'):
instance.password = password
return self._handle(OAuth, _auth, SECURITY.OAUTH_CHANGE_PASSWORD.value)
# ################################################################################################################################
# ################################################################################################################################
class Delete(AdminService):
""" Deletes an Bearer token definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_oauth_delete_request'
response_elem = 'zato_security_oauth_delete_response'
input_required = 'id'
def handle(self):
with closing(self.odb.session()) as session:
try:
auth = session.query(OAuth).\
filter(OAuth.id==self.request.input.id).\
one()
session.delete(auth)
session.commit()
except Exception:
msg = 'Bearer token definition could not be deleted, e:`%s`'
self.logger.error(msg, format_exc())
session.rollback()
raise
else:
self.request.input.action = SECURITY.OAUTH_DELETE.value
self.request.input.name = auth.name
self.broker_client.publish(self.request.input)
# ################################################################################################################################
# ################################################################################################################################
| 9,363
|
Python
|
.py
| 172
| 43.819767
| 130
| 0.48737
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,307
|
basic_auth.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/basic_auth.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from traceback import format_exc
from uuid import uuid4
# Zato
from zato.common.api import SEC_DEF_TYPE
from zato.common.broker_message import SECURITY
from zato.common.odb.model import Cluster, HTTPBasicAuth
from zato.common.odb.query import basic_auth_list
from zato.common.rate_limiting import DefinitionParser
from zato.common.util.sql import elems_with_opaque, set_instance_opaque_attrs
from zato.server.service import Boolean
from zato.server.service.internal import AdminService, AdminSIO, ChangePasswordBase, GetListAdminSIO
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_
# ################################################################################################################################
# ################################################################################################################################
class GetList(AdminService):
""" Returns a list of HTTP Basic Auth definitions available.
"""
_filter_by = HTTPBasicAuth.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_security_basic_auth_get_list_request'
response_elem = 'zato_security_basic_auth_get_list_response'
input_required = 'cluster_id',
output_required = 'id', 'name', 'is_active', 'username', 'realm'
output_optional = 'is_rate_limit_active', 'rate_limit_type', 'rate_limit_def', Boolean('rate_limit_check_parent_def')
def get_data(self, session): # type: ignore
data = elems_with_opaque(self._search(basic_auth_list, session, self.request.input.cluster_id, None, False))
return data
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
# ################################################################################################################################
# ################################################################################################################################
class Create(AdminService):
""" Creates a new HTTP Basic Auth definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_basic_auth_create_request'
response_elem = 'zato_security_basic_auth_create_response'
input_required = 'name', 'is_active', 'username', 'realm'
input_optional = 'cluster_id', 'is_rate_limit_active', 'rate_limit_type', 'rate_limit_def', \
Boolean('rate_limit_check_parent_def')
output_required = 'id', 'name'
def handle(self):
# If we have a rate limiting definition, let's check it upfront
DefinitionParser.check_definition_from_input(self.request.input)
input = self.request.input
input.password = uuid4().hex
cluster_id = input.get('cluster_id') or self.server.cluster_id
with closing(self.odb.session()) as session:
try:
cluster = session.query(Cluster).filter_by(id=cluster_id).first()
# Let's see if we already have a definition of that name before committing
# any stuff into the database.
existing_one = session.query(HTTPBasicAuth).\
filter(Cluster.id==cluster_id).\
filter(HTTPBasicAuth.name==input.name).first()
if existing_one:
raise Exception('HTTP Basic Auth definition `{}` already exists in this cluster'.format(input.name))
auth = HTTPBasicAuth(None, input.name, input.is_active, input.username,
input.realm or None, input.password, cluster)
set_instance_opaque_attrs(auth, input)
session.add(auth)
session.commit()
except Exception:
self.logger.error('Could not create an HTTP Basic Auth definition, e:`%s`', format_exc())
session.rollback()
raise
else:
input.id = auth.id
input.action = SECURITY.BASIC_AUTH_CREATE.value
input.sec_type = SEC_DEF_TYPE.BASIC_AUTH
self.broker_client.publish(input)
self.response.payload.id = auth.id
self.response.payload.name = auth.name
# Make sure the object has been created
_:'any_' = self.server.worker_store.wait_for_basic_auth(input.name)
# ################################################################################################################################
# ################################################################################################################################
class Edit(AdminService):
""" Updates an HTTP Basic Auth definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_basic_auth_edit_request'
response_elem = 'zato_security_basic_auth_edit_response'
input_required = 'name', 'is_active', 'username', 'realm'
input_optional = 'id', 'cluster_id', 'is_rate_limit_active', 'rate_limit_type', 'rate_limit_def', \
Boolean('rate_limit_check_parent_def')
output_required = 'id', 'name'
def handle(self):
# If we have a rate limiting definition, let's check it upfront
DefinitionParser.check_definition_from_input(self.request.input)
# Local aliases
input = self.request.input
input_id = input.get('id')
cluster_id = input.get('cluster_id') or self.server.cluster_id
with closing(self.odb.session()) as session: # type: ignore
try:
existing_one = session.query(HTTPBasicAuth).\
filter(Cluster.id==cluster_id).\
filter(HTTPBasicAuth.name==input.name)
if input_id:
existing_one = existing_one.filter(HTTPBasicAuth.id!=input.id)
existing_one = existing_one.first()
if existing_one:
raise Exception('HTTP Basic Auth definition `{}` already exists on this cluster'.format(input.name))
definition = session.query(HTTPBasicAuth)
if input_id:
definition = definition.filter_by(id=input.id)
else:
definition = definition.filter_by(name=input.name)
definition = definition.one()
old_name = definition.name
set_instance_opaque_attrs(definition, input)
definition.name = input.name
definition.is_active = input.is_active
definition.username = input.username
definition.realm = input.realm or None
session.add(definition)
session.commit()
except Exception:
self.logger.error('Could not update HTTP Basic Auth definition, e:`%s`', format_exc())
session.rollback()
raise
else:
input.action = SECURITY.BASIC_AUTH_EDIT.value
input.old_name = old_name
input.sec_type = SEC_DEF_TYPE.BASIC_AUTH
self.broker_client.publish(input)
self.response.payload.id = definition.id
self.response.payload.name = definition.name
# ################################################################################################################################
# ################################################################################################################################
class ChangePassword(ChangePasswordBase):
""" Changes the password of an HTTP Basic Auth definition.
"""
password_required = False
class SimpleIO(ChangePasswordBase.SimpleIO):
request_elem = 'zato_security_basic_auth_change_password_request'
response_elem = 'zato_security_basic_auth_change_password_response'
def handle(self):
def _auth(instance, password): # type: ignore
instance.password = password
return self._handle(HTTPBasicAuth, _auth, SECURITY.BASIC_AUTH_CHANGE_PASSWORD.value)
# ################################################################################################################################
# ################################################################################################################################
class Delete(AdminService):
""" Deletes an HTTP Basic Auth definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_basic_auth_delete_request'
response_elem = 'zato_security_basic_auth_delete_response'
input_required = 'id',
def handle(self):
with closing(self.odb.session()) as session:
try:
auth = session.query(HTTPBasicAuth).\
filter(HTTPBasicAuth.id==self.request.input.id).\
one()
session.delete(auth)
session.commit()
except Exception:
self.logger.error('Could not delete HTTP Basic Auth definition, e:`%s`', format_exc())
session.rollback()
raise
else:
self.request.input.action = SECURITY.BASIC_AUTH_DELETE.value
self.request.input.name = auth.name
self.broker_client.publish(self.request.input)
# ################################################################################################################################
# ################################################################################################################################
| 10,060
|
Python
|
.py
| 180
| 45.433333
| 130
| 0.509872
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,308
|
apikey.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/apikey.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from traceback import format_exc
from uuid import uuid4
# Zato
from zato.common.api import SEC_DEF_TYPE
from zato.common.broker_message import SECURITY
from zato.common.odb.model import Cluster, APIKeySecurity
from zato.common.odb.query import apikey_security_list
from zato.common.rate_limiting import DefinitionParser
from zato.common.util.sql import elems_with_opaque, parse_instance_opaque_attr, set_instance_opaque_attrs
from zato.server.service import Boolean
from zato.server.service.internal import AdminService, AdminSIO, ChangePasswordBase, GetListAdminSIO
# ################################################################################################################################
# ################################################################################################################################
if 0:
from sqlalchemy.orm import Session as SASession
from zato.common.typing_ import any_, anytuple
# ################################################################################################################################
# ################################################################################################################################
class GetList(AdminService):
""" Returns a list of API keys available.
"""
_filter_by = APIKeySecurity.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_security_apikey_get_list_request'
response_elem = 'zato_security_apikey_get_list_response'
input_required = 'cluster_id',
output_required = 'id', 'name', 'is_active', 'username'
output_optional:'anytuple' = 'is_rate_limit_active', 'rate_limit_type', 'rate_limit_def', \
Boolean('rate_limit_check_parent_def'), 'header'
def get_data(self, session:'SASession') -> 'any_':
search_result = self._search(apikey_security_list, session, self.request.input.cluster_id, False)
return elems_with_opaque(search_result) # type: ignore
def handle(self) -> 'None':
with closing(self.odb.session()) as session:
data = self.get_data(session)
self.response.payload[:] = data
# ################################################################################################################################
# ################################################################################################################################
class Create(AdminService):
""" Creates a new API key.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_apikey_create_request'
response_elem = 'zato_security_apikey_create_response'
input_required = 'name', 'is_active'
input_optional:'anytuple' = 'cluster_id', 'is_rate_limit_active', 'rate_limit_type', 'rate_limit_def', \
Boolean('rate_limit_check_parent_def'), 'header'
output_required = 'id', 'name', 'header'
def handle(self) -> 'None':
# If we have a rate limiting definition, let's check it upfront
DefinitionParser.check_definition_from_input(self.request.input)
input = self.request.input
input.username = 'Zato-Not-Used-' + uuid4().hex
input.password = uuid4().hex
input.password = self.server.encrypt(input.password)
input.header = input.header or self.server.api_key_header
cluster_id = input.get('cluster_id') or self.server.cluster_id
with closing(self.odb.session()) as session:
try:
cluster = session.query(Cluster).filter_by(id=cluster_id).first()
# Let's see if we already have a definition of that name before committing
# any stuff into the database.
existing_one = session.query(APIKeySecurity).\
filter(Cluster.id==cluster_id).\
filter(APIKeySecurity.name==input.name).first()
if existing_one:
raise Exception('API key `{}` already exists in this cluster'.format(input.name))
auth = APIKeySecurity(None, input.name, input.is_active, input.username, input.password, cluster)
set_instance_opaque_attrs(auth, input)
session.add(auth)
session.commit()
except Exception:
self.logger.error('API key could not be created, e:`{}`', format_exc())
session.rollback()
raise
else:
input.id = auth.id
input.action = SECURITY.APIKEY_CREATE.value
input.sec_type = SEC_DEF_TYPE.APIKEY
self.broker_client.publish(input)
self.response.payload.id = auth.id
self.response.payload.name = auth.name
self.response.payload.header = input.header
# Make sure the object has been created
_:'any_' = self.server.worker_store.wait_for_apikey(input.name)
# ################################################################################################################################
# ################################################################################################################################
class Edit(AdminService):
""" Updates an API key.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_apikey_edit_request'
response_elem = 'zato_security_apikey_edit_response'
input_required = 'id', 'name', 'is_active'
input_optional:'anytuple' = 'cluster_id', 'is_rate_limit_active', 'rate_limit_type', 'rate_limit_def', \
Boolean('rate_limit_check_parent_def'), 'header'
output_required = 'id', 'name', 'header'
def handle(self) -> 'None':
input = self.request.input
input.header = input.header or self.server.api_key_header
cluster_id = input.get('cluster_id') or self.server.cluster_id
# If we have a rate limiting definition, let's check it upfront
DefinitionParser.check_definition_from_input(input)
with closing(self.odb.session()) as session:
try:
existing_one = session.query(APIKeySecurity).\
filter(Cluster.id==cluster_id).\
filter(APIKeySecurity.name==input.name).\
filter(APIKeySecurity.id!=input.id).\
first()
if existing_one:
raise Exception('API key `{}` already exists in this cluster'.format(input.name))
definition = session.query(APIKeySecurity).filter_by(id=input.id).one()
opaque = parse_instance_opaque_attr(definition)
set_instance_opaque_attrs(definition, input)
old_name = definition.name
definition.name = input.name
definition.is_active = input.is_active
session.add(definition)
session.commit()
except Exception:
self.logger.error('API key could not be updated, e:`{}`', format_exc())
session.rollback()
raise
else:
input.action = SECURITY.APIKEY_EDIT.value
input.old_name = old_name
input.username = definition.username
input.sec_type = SEC_DEF_TYPE.APIKEY
self.broker_client.publish(input)
self.response.payload.id = definition.id
self.response.payload.name = definition.name
self.response.payload.header = opaque.header
# ################################################################################################################################
# ################################################################################################################################
class ChangePassword(ChangePasswordBase):
""" Changes the password of an API key.
"""
password_required = False
class SimpleIO(ChangePasswordBase.SimpleIO):
request_elem = 'zato_security_apikey_change_password_request'
response_elem = 'zato_security_apikey_change_password_response'
def handle(self) -> 'None':
def _auth(instance:'any_', password:'str') -> 'None':
instance.password = password
return self._handle(APIKeySecurity, _auth, SECURITY.APIKEY_CHANGE_PASSWORD.value)
# ################################################################################################################################
# ################################################################################################################################
class Delete(AdminService):
""" Deletes an API key.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_apikey_delete_request'
response_elem = 'zato_security_apikey_delete_response'
input_required = 'id'
def handle(self) -> 'None':
with closing(self.odb.session()) as session:
try:
auth = session.query(APIKeySecurity).\
filter(APIKeySecurity.id==self.request.input.id).\
one()
session.delete(auth)
session.commit()
except Exception:
self.logger.error('API key could not be deleted, e:`{}`', format_exc())
session.rollback()
raise
else:
self.request.input.action = SECURITY.APIKEY_DELETE.value
self.request.input.name = auth.name
self.broker_client.publish(self.request.input)
# ################################################################################################################################
# ################################################################################################################################
| 10,054
|
Python
|
.py
| 180
| 45.633333
| 130
| 0.508756
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,309
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
# Zato
from zato.common.api import SEC_DEF_TYPE
from zato.common.const import ServiceConst
from zato.common.odb import query
from zato.common.odb.model import SecurityBase
from zato.server.service import Boolean, Integer, List
from zato.server.service.internal import AdminService, GetListAdminSIO
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_
# ################################################################################################################################
# ################################################################################################################################
output_required = 'id', 'name', 'is_active', 'sec_type'
output_optional:'any_' = 'username', 'realm', 'password_type', Boolean('reject_empty_nonce_creat'), \
Boolean('reject_stale_tokens'), Integer('reject_expiry_limit'), Integer('nonce_freshness_time'), 'proto_version', \
'sig_method', Integer('max_nonce_log')
# ################################################################################################################################
# ################################################################################################################################
class GetByID(AdminService):
""" Returns a single security definition by its ID.
"""
class SimpleIO(GetListAdminSIO):
response_elem = None
input_required = 'cluster_id', 'id'
output_required = output_required
output_optional = output_optional
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload = query.sec_base(session, self.request.input.cluster_id, self.request.input.id)
# ################################################################################################################################
# ################################################################################################################################
class GetList(AdminService):
""" Returns a list of all security definitions available.
"""
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_security_get_list_request'
response_elem = 'zato_security_get_list_response'
input_optional = 'cluster_id'
input_optional:'any_' = GetListAdminSIO.input_optional + (List('sec_type'), Boolean('needs_internal', default=True))
output_required = output_required
output_optional = output_optional
output_repeated = True
def handle(self):
_cluster_id = self.request.input.get('cluster_id') or self.server.cluster_id
_needs_internal = self.request.input.get('needs_internal') != ''
_internal = {ServiceConst.API_Admin_Invoke_Username}
if _needs_internal:
needs_internal = True if self.request.input.get('needs_internal') is True else False
else:
needs_internal = True
with closing(self.odb.session()) as session:
pairs:'any_' = (
(SEC_DEF_TYPE.APIKEY, query.apikey_security_list),
(SEC_DEF_TYPE.AWS, query.aws_security_list),
(SEC_DEF_TYPE.BASIC_AUTH, query.basic_auth_list),
(SEC_DEF_TYPE.JWT, query.jwt_list),
(SEC_DEF_TYPE.NTLM, query.ntlm_list),
(SEC_DEF_TYPE.OAUTH, query.oauth_list),
(SEC_DEF_TYPE.VAULT, query.vault_connection_list),
(SEC_DEF_TYPE.TLS_CHANNEL_SEC, query.tls_channel_sec_list),
(SEC_DEF_TYPE.TLS_KEY_CERT, query.tls_key_cert_list),
)
for def_type, func in pairs:
filter_by = self.request.input.get('sec_type', [])
if filter_by and def_type not in filter_by:
continue
if func is query.basic_auth_list:
args = session, _cluster_id, None, False
else:
args = session, _cluster_id, False
# By default, we have nothing to filter by ..
kwargs = {}
# .. unless there is a query on input ..
if query_criteria := self.request.input.get('query'):
kwargs['filter_by'] = SecurityBase.name
kwargs['query'] = query_criteria
for definition in func(*args, **kwargs):
if definition.name.startswith('zato') or definition.name in _internal:
if not needs_internal:
continue
self.response.payload.append(definition)
# ################################################################################################################################
# ################################################################################################################################
| 5,290
|
Python
|
.py
| 91
| 48.67033
| 130
| 0.463947
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,310
|
wss.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/wss.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,311
|
jwt.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/jwt.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from http.client import BAD_REQUEST
from traceback import format_exc
from uuid import uuid4
# Cryptography
from cryptography.fernet import Fernet
# Zato
from zato.common.api import SEC_DEF_TYPE
from zato.common.broker_message import SECURITY
from zato.common.odb.model import Cluster, JWT
from zato.common.odb.query import jwt_list
from zato.common.rate_limiting import DefinitionParser
from zato.common.util.sql import elems_with_opaque, set_instance_opaque_attrs
from zato.server.connection.http_soap import Unauthorized
from zato.server.jwt_ import JWT as JWTBackend
from zato.server.service import Boolean, Integer, Service
from zato.server.service.internal import AdminService, AdminSIO, ChangePasswordBase, GetListAdminSIO
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import unicode
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_
# ################################################################################################################################
# ################################################################################################################################
class GetList(AdminService):
""" Returns the list of JWT definitions available.
"""
_filter_by = JWT.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_security_jwt_get_list_request'
response_elem = 'zato_security_jwt_get_list_response'
input_required = 'cluster_id',
output_required = 'id', 'name', 'is_active', 'username', Integer('ttl')
output_optional = 'is_rate_limit_active', 'rate_limit_type', 'rate_limit_def', Boolean('rate_limit_check_parent_def')
def get_data(self, session):
return elems_with_opaque(self._search(jwt_list, session, self.request.input.cluster_id, None, False))
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
# ################################################################################################################################
# ################################################################################################################################
class Create(AdminService):
""" Creates a new JWT definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_jwt_create_request'
response_elem = 'zato_security_jwt_create_response'
input_required = 'cluster_id', 'name', 'is_active', 'username', Integer('ttl')
input_optional = 'is_rate_limit_active', 'rate_limit_type', 'rate_limit_def', Boolean('rate_limit_check_parent_def')
output_required = 'id', 'name'
def handle(self):
# If we have a rate limiting definition, let's check it upfront
DefinitionParser.check_definition_from_input(self.request.input)
input = self.request.input
input.password = uuid4().hex
input.secret = Fernet.generate_key()
with closing(self.odb.session()) as session:
try:
# Let's see if we already have a definition of that name before committing
# any stuff into the database.
existing_one = session.query(JWT).\
filter(Cluster.id==input.cluster_id).\
filter(JWT.name==input.name).first()
if existing_one:
raise Exception('JWT definition `{}` already exists on this cluster'.format(input.name))
item = self._new_zato_instance_with_cluster(JWT)
item.name = input.name
item.is_active = input.is_active
item.username = input.username
item.password = input.password
item.secret = input.secret
item.ttl = input.ttl
item.cluster_id = input.cluster_id
set_instance_opaque_attrs(item, input)
session.add(item)
session.commit()
except Exception:
self.logger.error('Could not create a JWT definition, e:`%s`', format_exc())
session.rollback()
raise
else:
input.id = item.id
input.action = SECURITY.JWT_CREATE.value
input.sec_type = SEC_DEF_TYPE.JWT
self.broker_client.publish(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
# Make sure the object has been created
_:'any_' = self.server.worker_store.wait_for_jwt(input.name)
# ################################################################################################################################
# ################################################################################################################################
class Edit(AdminService):
""" Updates a JWT definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_jwt_edit_request'
response_elem = 'zato_security_jwt_edit_response'
input_required = 'id', 'cluster_id', 'name', 'is_active', 'username', Integer('ttl')
input_optional = 'is_rate_limit_active', 'rate_limit_type', 'rate_limit_def', Boolean('rate_limit_check_parent_def')
output_required = 'id', 'name'
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
try:
existing_one = session.query(JWT).\
filter(Cluster.id==input.cluster_id).\
filter(JWT.name==input.name).\
filter(JWT.id!=input.id).\
first()
if existing_one:
raise Exception('JWT definition `{}` already exists on this cluster'.format(input.name))
item = session.query(JWT).filter_by(id=input.id).one()
old_name = item.name
item.name = input.name
item.is_active = input.is_active
item.username = input.username
item.ttl = input.ttl
item.cluster_id = input.cluster_id
set_instance_opaque_attrs(item, input)
session.add(item)
session.commit()
except Exception:
self.logger.error('Could not update the JWT definition, e:`%s`', format_exc())
session.rollback()
raise
else:
input.action = SECURITY.JWT_EDIT.value
input.old_name = old_name
input.sec_type = SEC_DEF_TYPE.JWT
self.broker_client.publish(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
# ################################################################################################################################
# ################################################################################################################################
class ChangePassword(ChangePasswordBase):
""" Changes the password of a JWT definition.
"""
password_required = False
class SimpleIO(ChangePasswordBase.SimpleIO):
request_elem = 'zato_security_jwt_change_password_request'
response_elem = 'zato_security_jwt_change_password_response'
def handle(self):
def _auth(instance, password):
instance.password = password
return self._handle(JWT, _auth, SECURITY.JWT_CHANGE_PASSWORD.value)
# ################################################################################################################################
# ################################################################################################################################
class Delete(AdminService):
""" Deletes a JWT definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_jwt_delete_request'
response_elem = 'zato_security_jwt_delete_response'
input_required = 'id',
def handle(self):
with closing(self.odb.session()) as session:
try:
auth = session.query(JWT).\
filter(JWT.id==self.request.input.id).\
one()
session.delete(auth)
session.commit()
except Exception:
self.logger.error('Could not delete the JWT definition, e:`%s`', format_exc())
session.rollback()
raise
else:
self.request.input.action = SECURITY.JWT_DELETE.value
self.request.input.name = auth.name
self.broker_client.publish(self.request.input)
# ################################################################################################################################
# ################################################################################################################################
class LogIn(Service):
""" Logs user into using JWT-backed credentials and returns a new token if credentials were correct.
"""
class SimpleIO:
input_required = 'username', 'password'
input_optional = 'totp_code'
output_optional = 'token',
# ################################################################################################################################
def _raise_unathorized(self):
raise Unauthorized(self.cid, 'Invalid credentials', 'jwt')
# ################################################################################################################################
def handle(self, _sec_type=SEC_DEF_TYPE.JWT):
try:
auth_info = JWTBackend(self.odb, self.server.decrypt, self.server.jwt_secret).authenticate(
self.request.input.username, self.server.decrypt(self.request.input.password))
if auth_info:
token = auth_info.token
# Checks if there is an SSO user related to that JWT account
# and logs that person in to SSO or resumes his or her session.
self.server.sso_tool.on_external_auth(
_sec_type, auth_info.sec_def_id, auth_info.sec_def_username, self.cid, self.wsgi_environ,
token, self.request.input.totp_code)
self.response.payload = {'token': auth_info.token}
self.response.headers['Authorization'] = auth_info.token
else:
self._raise_unathorized()
except Exception:
self.logger.warning(format_exc())
self._raise_unathorized()
# ################################################################################################################################
# ################################################################################################################################
class LogOut(Service):
""" Logs a user out of an existing JWT token.
"""
class SimpleIO(AdminSIO):
response_elem = None
output_optional = 'result',
skip_empty_keys = True
def handle(self):
token = self.wsgi_environ.get('HTTP_AUTHORIZATION', '').replace('Bearer ', '')
if isinstance(token, unicode):
token = token.encode('utf8')
if not token:
self.response.status_code = BAD_REQUEST
self.response.payload.result = 'No JWT found'
try:
JWTBackend(self.odb, self.server.decrypt, self.server.jwt_secret).delete(token)
except Exception:
self.logger.warning(format_exc())
self.response.status_code = BAD_REQUEST
self.response.payload.result = 'Token could not be deleted'
# ################################################################################################################################
# ################################################################################################################################
| 12,490
|
Python
|
.py
| 232
| 43.818966
| 130
| 0.485234
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,312
|
xpath.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/xpath.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from traceback import format_exc
from uuid import uuid4
# Zato
from zato.common.api import SEC_DEF_TYPE
from zato.common.broker_message import SECURITY
from zato.common.odb.model import Cluster, XPathSecurity
from zato.common.odb.query import xpath_sec_list
from zato.common.util.api import validate_xpath
from zato.server.service.internal import AdminService, AdminSIO, ChangePasswordBase, GetListAdminSIO
class GetList(AdminService):
""" Returns a list of XPath-based security definitions available.
"""
_filter_by = XPathSecurity.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_security_xpath_get_list_request'
response_elem = 'zato_security_xpath_get_list_response'
input_required = ('cluster_id',)
output_required = ('id', 'name', 'is_active', 'username', 'username_expr')
output_optional = ('password_expr',)
def get_data(self, session):
return self._search(xpath_sec_list, session, self.request.input.cluster_id, False)
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
class _CreateEdit(AdminService):
""" A common class for both Create and Edit.
"""
def validate_input(self):
validate_xpath(self.request.input.username_expr)
validate_xpath(self.request.input.get('password_expr') or '/')
class Create(_CreateEdit):
""" Creates a new XPath-based security definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_xpath_create_request'
response_elem = 'zato_security_xpath_create_response'
input_required = ('cluster_id', 'name', 'is_active', 'username', 'username_expr')
input_optional = ('password_expr',)
output_required = ('id', 'name')
def handle(self):
input = self.request.input
input.password = uuid4().hex
with closing(self.odb.session()) as session:
try:
cluster = session.query(Cluster).filter_by(id=input.cluster_id).first()
# Let's see if we already have a definition of that name before committing
# any stuff into the database.
existing_one = session.query(XPathSecurity).\
filter(Cluster.id==input.cluster_id).\
filter(XPathSecurity.name==input.name).first()
if existing_one:
raise Exception('XPath security definition [{0}] already exists on this cluster'.format(input.name))
auth = self._new_zato_instance_with_cluster(XPathSecurity)
auth.name = input.name
auth.is_active = input.is_active
auth.username = input.username
auth.password = input.password
auth.username_expr = input.username_expr
auth.password_expr = input.get('password_expr')
auth.cluster_id = cluster.id
session.add(auth)
session.commit()
except Exception:
self.logger.error('XPath security definition could not be created, e:`{}', format_exc())
session.rollback()
raise
else:
input.id = auth.id
input.action = SECURITY.XPATH_SEC_CREATE.value
input.sec_type = SEC_DEF_TYPE.XPATH_SEC
self.broker_client.publish(input)
self.response.payload.id = auth.id
self.response.payload.name = auth.name
class Edit(_CreateEdit):
""" Updates an XPath-based security definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_xpath_edit_request'
response_elem = 'zato_security_xpath_edit_response'
input_required = ('id', 'cluster_id', 'name', 'is_active', 'username', 'username_expr')
input_optional = ('password_expr',)
output_required = ('id', 'name')
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
try:
existing_one = session.query(XPathSecurity).\
filter(Cluster.id==input.cluster_id).\
filter(XPathSecurity.name==input.name).\
filter(XPathSecurity.id!=input.id).\
first()
if existing_one:
raise Exception('XPath security definition [{0}] already exists on this cluster'.format(input.name))
auth = session.query(XPathSecurity).filter_by(id=input.id).one()
old_name = auth.name
auth.name = input.name
auth.is_active = input.is_active
auth.username = input.username
auth.username_expr = input.username_expr
auth.password_expr = input.get('password_expr')
session.add(auth)
session.commit()
except Exception:
self.logger.error('XPath security definition could not be updated, e:`{}', format_exc())
session.rollback()
raise
else:
input.action = SECURITY.XPATH_SEC_EDIT.value
input.old_name = old_name
input.sec_type = SEC_DEF_TYPE.XPATH_SEC
self.broker_client.publish(input)
self.response.payload.id = auth.id
self.response.payload.name = auth.name
class ChangePassword(ChangePasswordBase):
""" Changes the password of an XPath-based security definition.
"""
password_required = False
class SimpleIO(ChangePasswordBase.SimpleIO):
request_elem = 'zato_security_xpath_change_password_request'
response_elem = 'zato_security_xpath_change_password_response'
def handle(self):
def _auth(instance, password):
instance.password = password
return self._handle(XPathSecurity, _auth, SECURITY.XPATH_SEC_CHANGE_PASSWORD.value)
class Delete(AdminService):
""" Deletes an XPath-based security definition.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_xpath_delete_request'
response_elem = 'zato_security_xpath_delete_response'
input_required = ('id',)
def handle(self):
with closing(self.odb.session()) as session:
try:
auth = session.query(XPathSecurity).\
filter(XPathSecurity.id==self.request.input.id).\
one()
session.delete(auth)
session.commit()
except Exception:
msg = 'Could not delete the XPath security definition, e:`{}`'.format(format_exc())
self.logger.error(msg)
session.rollback()
raise
else:
self.request.input.action = SECURITY.XPATH_SEC_DELETE.value
self.request.input.name = auth.name
self.broker_client.publish(self.request.input)
| 7,334
|
Python
|
.py
| 156
| 35.852564
| 120
| 0.61532
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,313
|
policy.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/vault/policy.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,314
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/vault/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,315
|
connection.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/vault/connection.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from uuid import uuid4
# SQLAlchemy
from sqlalchemy.orm.exc import NoResultFound
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.broker_message import VAULT
from zato.common.odb.model import VaultConnection
from zato.common.odb.query import service, vault_connection_list
from zato.server.service.internal import AdminService
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
elem = 'security_vault_connection'
model = VaultConnection
label = 'a Vault connection'
get_list_docs = 'Vault connections'
broker_message = VAULT
broker_message_prefix = 'CONNECTION_'
list_func = vault_connection_list
output_optional_extra = ['service_id', 'service_name']
extra_delete_attrs = ['cluster_id', 'service_id']
# ################################################################################################################################
def instance_hook(self, input, instance, attrs):
instance.tls_key_cert_id = instance.tls_key_cert_id or None
instance.tls_ca_cert_id = instance.tls_ca_cert_id or None
instance.username = uuid4().hex
# ################################################################################################################################
def broker_message_hook(self, input, instance, attrs, service_type):
if input.service_id:
with closing(self.odb.session()) as session:
try:
input.service_name = service(session, input.cluster_id, input.service_id).name
except NoResultFound:
input.service_name = '' # That is fine, service is optional for Vault connections
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = VaultConnection.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
| 2,870
|
Python
|
.py
| 58
| 46.448276
| 130
| 0.514337
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,316
|
role.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/rbac/role.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.broker_message import RBAC
from zato.common.odb.model import RBACRole
from zato.common.odb.query import rbac_role, rbac_role_list
from zato.server.service.internal import AdminService
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.server.service import Service
Bunch = Bunch
Service = Service
# ################################################################################################################################
elem = 'security_rbac_role'
model = RBACRole
label = 'an RBAC role'
get_list_docs = 'RBAC roles'
broker_message = RBAC
broker_message_prefix = 'ROLE_'
list_func = rbac_role_list
input_optional_extra = ['parent_name']
output_optional_extra = ['parent_id', 'parent_name']
create_edit_rewrite = ['id']
# ################################################################################################################################
def instance_hook(self, input, instance, attrs):
# type: (Service, Bunch, RBACRole, Bunch)
if attrs.is_create_edit:
is_root = input.name.lower() == 'root'
if is_root:
raise ValueError('Root role cannot be changed')
with closing(self.odb.session()) as session:
parent = rbac_role(session, self.server.cluster_id, input.parent_id, input.parent_name)
input.parent_id = parent.id
if input.parent_id == instance.id:
raise ValueError('A role cannot be its own parent')
else:
instance.parent_id = input.parent_id
# ################################################################################################################################
def response_hook(self, input, instance, attrs, service_type):
# type: (Service, Bunch, RBACRole, Bunch, str)
if service_type == 'create_edit':
self.response.payload.parent_id = instance.parent_id
if instance.parent_id:
with closing(self.odb.session()) as session:
parent = session.query(attrs.model).\
filter(attrs.model.id==instance.parent_id).one()
self.response.payload.parent_name = parent.name
# ################################################################################################################################
def broker_message_hook(self, input, instance, attrs, service_type):
input.id = instance.id
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = RBACRole.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
| 3,859
|
Python
|
.py
| 77
| 45.714286
| 130
| 0.465582
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,317
|
role_permission.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/rbac/role_permission.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.broker_message import RBAC
from zato.common.odb.model import RBACRole, RBACPermission, RBACRolePermission, Service
from zato.common.odb.query import rbac_role, rbac_permission, rbac_role_permission_list, service
from zato.server.service.internal import AdminService
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.common.odb.model import RBACClientRole
Bunch = Bunch
RBACClientRole = RBACClientRole
# ################################################################################################################################
elem = 'security_rbac_role_permission'
model = RBACRolePermission
label = 'an RBAC role permission'
get_list_docs = 'RBAC role permissions'
broker_message = RBAC
broker_message_prefix = 'ROLE_PERMISSION_'
list_func = rbac_role_permission_list
input_optional_extra = ['role_name', 'service_name', 'perm_name']
output_optional_extra = ['role_name', 'service_name', 'perm_name']
create_edit_rewrite = ['id']
skip_input_params = ['name']
extra_delete_attrs = ['role_id', 'service_id', 'perm_id']
skip_create_integrity_error = True
check_existing_one = False
# ################################################################################################################################
def get_extra(service, role_id, service_id, perm_id):
# type: (Service, int, int, int) -> str
with closing(service.odb.session()) as session:
role = session.query(RBACRole).\
filter(RBACRole.id==role_id).one()
_service = session.query(Service).\
filter(Service.id==service_id).one()
perm = session.query(RBACPermission).\
filter(RBACPermission.id==perm_id).one()
return '{}:::{}::{}'.format(role.name, _service.name, perm.name), role.name, _service.name, perm.name
# ################################################################################################################################
def instance_hook(self, input, instance, attrs):
# type: (Service, Bunch, RBACRolePermission, Bunch)
if attrs.is_create_edit:
cluster_id = self.server.cluster_id
with closing(self.odb.session()) as session:
role = rbac_role(session, cluster_id, input.role_id, input.role_name)
_service = service(session, cluster_id, input.service_id, input.service_name)
perm = rbac_permission(session, cluster_id, input.perm_id, input.perm_name)
instance.role_id = role.id
instance.service_id = _service.id
instance.perm_id = perm.id
input.role_id = role.id
input.service_id = _service.id
input.perm_id = perm.id
instance.name = '{}:::{}::{}'.format(role.name, _service.name, perm.name)
# ################################################################################################################################
def response_hook(self, input, instance, attrs, service_type):
# type: (Service, Bunch, RBACClientRole, Bunch, str)
if service_type == 'get_list':
for item in self.response.payload:
item_name, role_name, service_name, perm_name = get_extra(self, item.role_id, item.service_id, item.perm_id)
item.name = item_name
item.role_name = role_name
item.service_name = service_name
item.perm_name = perm_name
elif service_type == 'create_edit':
cluster_id = self.server.cluster_id
with closing(self.odb.session()) as session:
role = rbac_role(session, cluster_id, input.role_id, input.role_name)
_service = service(session, cluster_id, input.service_id, input.service_name)
perm = rbac_permission(session, cluster_id, input.perm_id, input.perm_name)
self.response.payload.role_name = role.name
self.response.payload.service_name = _service.name
self.response.payload.perm_name = perm.name
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = RBACRole.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
class Edit(AdminService):
""" This service is a no-op added only for API completeness.
"""
def handle(self):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
| 5,494
|
Python
|
.py
| 102
| 48.294118
| 130
| 0.523569
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,318
|
client_role.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/rbac/client_role.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.broker_message import RBAC
from zato.common.odb.model import RBACClientRole, RBACRole
from zato.common.odb.query import rbac_client_role_list, rbac_role
from zato.server.service.internal import AdminService, AdminSIO
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.server.service import Service
Bunch = Bunch
Service = Service
# ################################################################################################################################
elem = 'security_rbac_client_role'
model = RBACClientRole
label = 'an RBAC client role'
get_list_docs = 'RBAC client roles'
broker_message = RBAC
broker_message_prefix = 'CLIENT_ROLE_'
list_func = rbac_client_role_list
input_optional_extra = ['role_name']
output_optional_extra = ['client_name', 'role_name']
create_edit_rewrite = ['id']
skip_input_params = ['name']
extra_delete_attrs = ['client_def', 'role_id']
skip_create_integrity_error = True
check_existing_one = False
# ################################################################################################################################
def instance_hook(self, input, instance, attrs):
# type: (Service, Bunch, RBACClientRole, Bunch)
if attrs.is_create_edit:
with closing(self.odb.session()) as session:
role = rbac_role(session, self.server.cluster_id, input.role_id, input.role_name)
instance.role_id = role.id
instance.name = '{}:::{}'.format(instance.client_def, role.name)
# ################################################################################################################################
def response_hook(self, input, instance, attrs, service_type):
# type: (Service, Bunch, RBACClientRole, Bunch, str)
if service_type == 'get_list':
for item in self.response.payload:
with closing(self.odb.session()) as session:
role = session.query(RBACRole).\
filter(RBACRole.id==item.role_id).one()
item.client_name = item.client_def
item.role_name = role.name
elif service_type == 'create_edit':
with closing(self.odb.session()) as session:
role = session.query(RBACRole).\
filter(RBACRole.id==instance.role_id).one()
self.response.payload.client_name = instance.client_def
self.response.payload.role_name = role.name
# Do not return until internal structures have been populated
self.server.worker_store.rbac.wait_for_client_role(instance.role_id)
# ################################################################################################################################
def broker_message_hook(self, input, instance, attrs, service_type):
# type: (Service, Bunch, RBACClientRole, Bunch, str)
if service_type == 'create_edit':
input.role_id = instance.role_id
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = RBACClientRole.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
class GetClientDefList(AdminService):
""" Returns a list of client definitions - Zato's built-in security mechanisms as well as custom ones, as defined by users.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_security_rbac_client_role_get_client_def_list_request'
response_elem = 'zato_security_rbac_client_role_get_client_def_list_response'
input_required = ('cluster_id',)
output_required = ('client_def', 'client_name')
def get_data(self, session):
out = []
service = 'zato.security.get-list'
request = {'cluster_id':self.request.input.cluster_id, 'needs_internal':False}
for item in self.invoke(service, request, as_bunch=True)['zato_security_get_list_response']:
client_name = '{}:::{}'.format(item.sec_type, item.name)
out.append({'client_def':'sec_def:::{}'.format(client_name), 'client_name':client_name})
# It's possible users defined their own security definitions outside of Zato
# and they also need to be taken into account.
custom_auth_list_service = self.server.fs_server_config.rbac.custom_auth_list_service
if custom_auth_list_service:
out.extend(self.invoke(custom_auth_list_service, {})['response']['items'])
return out
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
# ################################################################################################################################
| 5,766
|
Python
|
.py
| 106
| 48.90566
| 130
| 0.535943
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,319
|
permission.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/rbac/permission.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.broker_message import RBAC
from zato.common.odb.model import RBACPermission
from zato.common.odb.query import rbac_permission_list
from zato.server.service.internal import AdminService
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.server.service import Service
Bunch = Bunch
Service = Service
# ################################################################################################################################
elem = 'security_rbac_permission'
model = RBACPermission
label = 'an RBAC permission'
get_list_docs = 'RBAC permissions'
broker_message = RBAC
broker_message_prefix = 'PERMISSION_'
list_func = rbac_permission_list
# ################################################################################################################################
def broker_message_hook(service, input, instance, attrs, service_type):
# type: (Service, Bunch, RBACPermission, Bunch, str)
if service_type == 'create_edit':
input.id = instance.id
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = RBACPermission.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
| 2,471
|
Python
|
.py
| 50
| 47.02
| 130
| 0.441851
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,320
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/rbac/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Zato
from . import client_role, permission, role, role_permission
# So pyflakes doesn't complain
client_role
permission
role
role_permission
| 383
|
Python
|
.py
| 13
| 28.076923
| 82
| 0.769863
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,321
|
channel.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/tls/channel.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.api import SEC_DEF_TYPE
from zato.common.broker_message import SECURITY
from zato.common.odb.model import TLSChannelSecurity
from zato.common.odb.query import tls_channel_sec_list
from zato.common.util.api import parse_tls_channel_security_definition
from zato.server.service.internal import AdminService
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
elem = 'security_tls_channel'
model = TLSChannelSecurity
label = 'a TLS channel security definition'
get_list_docs = 'TLS channel security definitions'
broker_message = SECURITY
broker_message_prefix = 'TLS_CHANNEL_SEC_'
list_func = tls_channel_sec_list
create_edit_input_required_extra = ['value']
skip_input_params = ['sec_type']
# ################################################################################################################################
def instance_hook(self, input, instance, attrs):
if attrs.is_create_edit:
# Parsing returns a generator which we exhaust be converting it into a list.
# An exception is raised on any parsing error.
list(parse_tls_channel_security_definition(self.request.input.value))
# So that username, an artificial and inherited field, is not an empty string.
instance.username = input.username = input.name
instance.value = (input.get('value') or '').encode('utf8')
# ################################################################################################################################
def broker_message_hook(self, input, instance, attrs, service_type):
input.sec_type = SEC_DEF_TYPE.TLS_CHANNEL_SEC
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = TLSChannelSecurity.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
| 2,807
|
Python
|
.py
| 53
| 50.207547
| 130
| 0.514819
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,322
|
ca_cert.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/tls/ca_cert.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.broker_message import SECURITY
from zato.common.odb.model import TLSCACert
from zato.common.odb.query import tls_ca_cert_list
from zato.common.util.api import delete_tls_material_from_fs, get_tls_ca_cert_full_path, get_tls_from_payload, store_tls
from zato.server.service.internal import AdminService
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
elem = 'security_tls_ca_cert'
model = TLSCACert
label = 'a TLS CA cert'
get_list_docs = 'TLS CA certs'
broker_message = SECURITY
broker_message_prefix = 'TLS_CA_CERT_'
list_func = tls_ca_cert_list
skip_input_params = ['info']
output_optional_extra = ['info']
# ################################################################################################################################
def instance_hook(service, input, instance, attrs):
if attrs.is_delete:
return
instance.username = service.cid # Required by model
instance.info = get_tls_from_payload(input.value).encode('utf8')
instance.value = instance.value.encode('utf8')
with service.lock():
full_path = store_tls(service.server.tls_dir, service.request.input.value)
service.logger.info('CA certificate saved under `%s`', full_path)
# ################################################################################################################################
def response_hook(service, input, instance, attrs, service_type):
if service_type == 'create_edit':
service.response.payload.info = instance.info
# ################################################################################################################################
def broker_message_hook(service, input, instance, attrs, service_type):
if service_type == 'delete':
input.value = instance.value
# ################################################################################################################################
def delete_hook(service, input, instance, attrs):
delete_tls_material_from_fs(service.server, instance.info, get_tls_ca_cert_full_path)
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = TLSCACert.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
| 3,335
|
Python
|
.py
| 62
| 50.951613
| 130
| 0.484755
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,323
|
key_cert.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/tls/key_cert.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import unicode
from six import add_metaclass
# Zato
from zato.common.api import SEC_DEF_TYPE
from zato.common.broker_message import SECURITY
from zato.common.odb.model import TLSKeyCertSecurity
from zato.common.odb.query import tls_key_cert_list
from zato.common.util.api import delete_tls_material_from_fs, get_tls_ca_cert_full_path, get_tls_from_payload, store_tls
from zato.server.service.internal import AdminService
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
# ################################################################################################################################
elem = 'security_tls_key_cert'
model = TLSKeyCertSecurity
label = 'a TLS key/cert pair'
get_list_docs = 'TLS key/cert pairs'
broker_message = SECURITY
broker_message_prefix = 'TLS_KEY_CERT_'
list_func = tls_key_cert_list
create_edit_input_required_extra = ['auth_data']
skip_input_params = ['info', 'sec_type']
output_optional_extra = ['info']
# ################################################################################################################################
def instance_hook(self, input, instance, attrs):
if attrs.is_delete:
return
decrypted = self.server.decrypt(input.auth_data).encode('utf8')
instance.username = self.cid # Required by model
instance.sec_type = SEC_DEF_TYPE.TLS_KEY_CERT
instance.info = get_tls_from_payload(decrypted, True).encode('utf8')
instance.auth_data = input.auth_data.encode('utf8')
with self.lock():
full_path = store_tls(self.server.tls_dir, decrypted, True)
self.logger.info('Key/cert pair saved under `%s`', full_path)
# ################################################################################################################################
def response_hook(self, input, instance, attrs, service_type):
if service_type == 'create_edit':
self.response.payload.info = instance.info
elif service_type == 'get_list':
for elem in self.response.payload:
if not isinstance(elem.auth_data, unicode):
elem.auth_data = elem.auth_data.decode('utf8')
elem.auth_data = self.server.decrypt(elem.auth_data)
# ################################################################################################################################
def broker_message_hook(self, input, instance, attrs, service_type):
input.sec_type = SEC_DEF_TYPE.TLS_KEY_CERT
if service_type == 'delete':
input.auth_data = instance.auth_data
# ################################################################################################################################
def delete_hook(self, input, instance, attrs):
delete_tls_material_from_fs(self.server, instance.info, get_tls_ca_cert_full_path)
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = TLSKeyCertSecurity.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
| 4,069
|
Python
|
.py
| 74
| 51.540541
| 130
| 0.498739
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,324
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/security/tls/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
| 238
|
Python
|
.py
| 6
| 38.166667
| 82
| 0.729258
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,325
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/connector/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
| 238
|
Python
|
.py
| 6
| 38.166667
| 82
| 0.729258
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,326
|
amqp_.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/connector/amqp_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Zato
from zato.server.service import Service
# ################################################################################################################################
class Start(Service):
""" Starts an AMQP connection.
"""
# We assign the name explicitly because otherwise it is turned into zato.connector.amqp-.start (note - instead of _).
name = 'zato.connector.amqp_.start'
class SimpleIO:
input_required = ('cluster_id', 'frame_max', 'heartbeat', 'host', 'id', 'name', 'port', 'username', 'vhost', 'password')
input_optional = ('old_name',)
request_elem = 'zato_connector_amqp_start_request'
response_elem = 'zato_connector_amqp_start_response'
def handle(self):
self.server.worker_store.amqp_connection_create(self.request.input)
# ################################################################################################################################
| 1,186
|
Python
|
.py
| 22
| 49.590909
| 130
| 0.535931
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,327
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/kvdb/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import DEBUG, getLogger
from traceback import format_exc
# gevent
from gevent import sleep
# Redis
from redis.sentinel import MasterNotFoundError
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import unicode
# Zato
from zato.common.exception import ZatoException
from zato.common.kvdb.parsing import redis_grammar
from zato.common.util.kvdb import has_redis_sentinels
from zato.server.service.internal import AdminService, AdminSIO
# ################################################################################################################################
kvdb_logger = getLogger('zato_kvdb')
# ################################################################################################################################
class ExecuteCommand(AdminService):
""" Executes a command against the key/value DB.
"""
name = 'zato.kvdb.remote-command.execute'
class SimpleIO(AdminSIO):
request_elem = 'zato_kvdb_remote_command_execute_request'
response_elem = 'zato_kvdb_remote_command_execute_response'
input_required = ('command',)
output_optional = ('result',)
def _fixup_parameters(self, parameters):
""" Fix up quotes so stuff like [SISMEMBER key member] and [SISMEMBER key "member"] is treated the same
(brackets used here for clarity only to separate commands).
"""
if parameters:
has_one = len(parameters) == 1
first_elem_idx = 0 if has_one else 1
if parameters[first_elem_idx][0] == '"' and parameters[-1][-1] == '"':
parameters[first_elem_idx] = parameters[first_elem_idx][1:]
parameters[-1] = parameters[-1][:-1]
return parameters
# ################################################################################################################################
def handle(self):
input_command = self.request.input.command or ''
if not input_command:
msg = 'No command sent'
raise ZatoException(self.cid, msg)
try:
parse_result = redis_grammar.parseString(input_command)
options = {}
command = parse_result.command
parameters = parse_result.parameters if parse_result.parameters else []
parameters = self._fixup_parameters(parameters)
if command == 'CONFIG':
options['parse'] = parameters[0]
elif command == 'OBJECT':
options['infotype'] = parameters[0]
response = self.server.kvdb.conn.execute_command(command, *parameters, **options) or ''
if response and command in('KEYS', 'HKEYS', 'HVALS'):
response = unicode(response).encode('utf-8')
elif command in('HLEN', 'LLEN', 'LRANGE', 'SMEMBERS', 'HGETALL'):
response = str(response)
elif command == 'DUMP':
response = repr(response)
self.response.payload.result = response or '(None)'
except Exception as e:
msg = 'Command parsing error, command:`{}`, e:`{}`'.format(input_command, e.args[0])
self.logger.error('msg:`%s`, e:`%s`', msg, format_exc())
self.response.payload.result = msg
# ################################################################################################################################
class LogConnectionInfo(AdminService):
""" Writes outs to logs information regarding current connections to KVDB.
"""
def handle(self):
config = self.server.fs_server_config.kvdb
sleep_time = float(config.log_connection_info_sleep_time)
has_sentinels = has_redis_sentinels(config)
if kvdb_logger.isEnabledFor(DEBUG):
while True:
if has_sentinels:
try:
master_address = self.kvdb.conn.connection_pool.connection_kwargs['connection_pool'].get_master_address()
kvdb_logger.debug(
'Uses sentinels: `%s %r`, master: `%r`', has_sentinels, config.redis_sentinels, master_address)
except MasterNotFoundError:
tb = format_exc()
self.logger.warning(tb)
kvdb_logger.warning(tb)
else:
kvdb_logger.debug(
'Uses sentinels: `%s`, conn:`%r`', has_sentinels, self.kvdb.conn)
sleep(sleep_time)
# ################################################################################################################################
| 4,908
|
Python
|
.py
| 95
| 41.726316
| 130
| 0.532636
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,328
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/kvdb/data_dict/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import unicode
from zato.common.ext.future.utils import iteritems
# Zato
from zato.common.api import KVDB
from zato.common.exception import ZatoException
from zato.common.util.api import multikeysort, translation_name
from zato.server.service.internal import AdminService
class DataDictService(AdminService):
def __init__(self, *args, **kwargs):
super(DataDictService, self).__init__(*args, **kwargs)
self._dict_items = []
def _name(self, system1, key1, value1, system2, key2):
return translation_name(system1, key1, value1, system2, key2)
def _get_dict_item(self, id):
""" Returns a dictionary entry by its ID.
"""
for item in self._get_dict_items():
if item['id'] == str(id):
return item
else:
msg = 'Could not find the dictionary by its ID:`{}`'.format(id)
raise ZatoException(self.cid, msg)
def _get_dict_items_raw(self):
""" Yields dictionary items without formatting them into Python dictionaries.
"""
conn = self.server.kvdb.conn
if conn:
for id, item in iteritems(conn.hgetall(KVDB.DICTIONARY_ITEM)):
yield id, item
def _get_dict_items(self):
""" Yields nicely formatted dictionary items defined in the KVDB.
"""
if not self._dict_items:
conn = self.server.kvdb.conn
if conn:
for id, item in iteritems(conn.hgetall(KVDB.DICTIONARY_ITEM)):
item = item if isinstance(item, unicode) else item.decode('utf8')
system, key, value = item.split(KVDB.SEPARATOR)
self._dict_items.append({'id':str(id), 'system':system, 'key':key, 'value':value})
self._dict_items = multikeysort(self._dict_items, ['system', 'key', 'value'])
for item in self._dict_items:
yield item
def _get_dict_item_id(self, system, key, value):
""" Returns a dictionary entry ID by its system, key and value.
"""
for item in self._get_dict_items():
if item['system'] == system and item['key'] == key and item['value'] == value:
return item['id']
def _get_translations(self):
""" Yields nicely formatted translations defined in the KVDB.
"""
conn = self.server.kvdb.conn
if conn:
for item in conn.keys(KVDB.TRANSLATION + KVDB.SEPARATOR + '*'):
vals = conn.hgetall(item)
item = item if isinstance(item, unicode) else item.decode('utf8')
item = item.split(KVDB.SEPARATOR)
value2 = vals.get('value2')
value2 = value2 if isinstance(value2, unicode) else value2.decode('utf-8')
yield {'system1':item[1], 'key1':item[2], 'value1':item[3], 'system2':item[4],
'key2':item[5], 'id':str(vals.get('id')), 'value2':value2,
'id1':str(vals.get('id1')), 'id2':str(vals.get('id2')),}
| 3,255
|
Python
|
.py
| 68
| 37.970588
| 102
| 0.600567
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,329
|
impexp.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/kvdb/data_dict/impexp.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import bz2
from base64 import b64decode
# Python 2/3 compatibility
from zato.common.ext.future.utils import iteritems, iterkeys
from zato.common.py23_.past.builtins import unicode
# Zato
from zato.common.api import KVDB
from zato.common.json_internal import loads
from zato.common.util.api import dict_item_name, translation_name
from zato.server.service.internal import AdminSIO
from zato.server.service.internal.kvdb.data_dict import DataDictService
class Import(DataDictService):
""" Imports a bz2-compressed JSON document containing data dictionaries replacing any other existing ones.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_kvdb_data_dict_impexp_import_request'
response_elem = 'zato_kvdb_data_dict_impexp_import_response'
input_optional = 'data'
def handle(self):
if not self.server.kvdb.conn:
return
data = self.request.input.data
data = b64decode(data)
if isinstance(data, unicode):
data = data.encode('utf8')
data = bz2.decompress(data)
data = loads(data)
with self.server.kvdb.conn.pipeline() as p:
p.delete(KVDB.DICTIONARY_ITEM_ID)
p.delete(KVDB.DICTIONARY_ITEM)
p.delete(KVDB.TRANSLATION_ID)
for item in self._get_translations():
key = translation_name(item['system1'], item['key1'], item['value1'], item['system2'], item['key2'])
p.delete(key)
# Another proof software engineering and philosophy have /a lot/ in common!
data = data['data']
p.set(KVDB.DICTIONARY_ITEM_ID, data['last_dict_id'])
if data['last_translation_id']:
p.set(KVDB.TRANSLATION_ID, data['last_translation_id'])
for item in data['dict_list']:
p.hset(KVDB.DICTIONARY_ITEM, item['id'], dict_item_name(item['system'], item['key'], item['value']))
for item in data['translation_list']:
key = list(iterkeys(item))[0]
for value_key, value in iteritems(item[key]):
p.hset(key, value_key, value)
p.execute()
| 2,342
|
Python
|
.py
| 52
| 36.596154
| 116
| 0.649053
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,330
|
dictionary.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/kvdb/data_dict/dictionary.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import re
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import unicode
# Zato
from zato.common.api import KVDB
from zato.common.exception import ZatoException
from zato.common.util.api import dict_item_name
from zato.server.service import Int
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
from zato.server.service.internal.kvdb.data_dict import DataDictService
class GetList(DataDictService):
""" Returns a list of dictionary items.
"""
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_kvdb_data_dict_dictionary_get_list_request'
response_elem = 'zato_kvdb_data_dict_dictionary_get_list_response'
output_required = 'id', 'system', 'key', 'value'
def get_data(self):
return self._get_dict_items()
def handle(self):
self.response.payload[:] = self.get_data()
class _CreateEdit(DataDictService):
NAME_PATTERN = '\w+' # noqa: W605
NAME_RE = re.compile(NAME_PATTERN)
class SimpleIO(AdminSIO):
input_required = 'system', 'key', 'value'
input_optional = 'id'
output_optional = 'id'
def _validate_entry(self, validate_item, id=None):
for elem in('system', 'key'):
name = self.request.input[elem]
match = self.NAME_RE.match(name)
if match and match.group() == name:
continue
else:
msg = 'System and key may contain only letters, digits and an underscore, failed to validate `{}` ' + \
'against the regular expression {}'
msg = msg.format(name, self.NAME_PATTERN)
raise ZatoException(self.cid, msg)
for item in self._get_dict_items():
joined = KVDB.SEPARATOR.join((item['system'], item['key'], item['value']))
if validate_item == joined and id != item['id']:
msg = 'The triple of system:`{}`, key:`{}`, value:`{}` already exists'.format(
item['system'], item['key'], item['value'])
raise ZatoException(self.cid, msg)
return True
def _get_item_name(self):
return dict_item_name(self.request.input.system, self.request.input.key, self.request.input.value)
def handle(self):
if not self.server.kvdb.conn:
return
item = self._get_item_name()
if self.request.input.get('id'):
id = self.request.input.id
else:
id = self.server.kvdb.conn.incr(KVDB.DICTIONARY_ITEM_ID)
id = str(id)
if self._validate_entry(item, id):
self._handle(id)
self.server.kvdb.conn.hset(KVDB.DICTIONARY_ITEM, id, item)
self.response.payload.id = id
def _handle(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by a subclass')
class Create(_CreateEdit):
""" Creates a new dictionary entry.
"""
class SimpleIO(_CreateEdit.SimpleIO):
request_elem = 'zato_kvdb_data_dict_dictionary_create_request'
response_elem = 'zato_kvdb_data_dict_dictionary_create_response'
def _handle(self, *ignored_args, **ignored_kwargs):
pass
class Edit(_CreateEdit):
""" Updates a dictionary entry.
"""
class SimpleIO(_CreateEdit.SimpleIO):
request_elem = 'zato_kvdb_data_dict_dictionary_edit_request'
response_elem = 'zato_kvdb_data_dict_dictionary_edit_response'
def _handle(self, id):
if not self.server.kvdb.conn:
return
for item in self._get_translations():
if item['id1'] == id or item['id2'] == id:
existing_name = self._name(item['system1'], item['key1'], item['value1'], item['system2'], item['key2'])
if item['id1'] == id:
hash_name = self._name(
self.request.input.system, self.request.input.key, self.request.input.value,
item['system2'], item['key2'])
else:
hash_name = self._name(
item['system1'], item['key1'], item['value1'], self.request.input.system, self.request.input.key)
if existing_name == hash_name and item['value2'] == self.request.input:
continue
if existing_name != hash_name:
self.server.kvdb.conn.renamenx(existing_name, hash_name)
if item['id2'] == id:
self.server.kvdb.conn.hset(hash_name, 'value2', self.request.input.value)
class Delete(DataDictService):
""" Deletes a dictionary entry by its ID.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_kvdb_data_dict_dictionary_delete_request'
response_elem = 'zato_kvdb_data_dict_dictionary_delete_response'
input_required = 'id'
output_optional = 'id'
def handle(self):
if not self.server.kvdb.conn:
return
id = str(self.request.input.id)
self.server.kvdb.conn.hdel(KVDB.DICTIONARY_ITEM, id)
for item in self._get_translations():
if item['id1'] == id or item['id2'] == id:
self.server.kvdb.conn.delete(
self._name(item['system1'], item['key1'], item['value1'], item['system2'], item['key2']))
self.response.payload.id = self.request.input.id
class _DictionaryEntryService(DataDictService):
""" Base class for returning a list of systems, keys and values.
"""
def get_data(self, needs_systems=False, by_system=None, by_key=None):
if not self.server.kvdb.conn:
return
for triple in self.server.kvdb.conn.hvals(KVDB.DICTIONARY_ITEM):
triple = triple if isinstance(triple, unicode) else triple.decode('utf-8')
system, key, value = triple.split(KVDB.SEPARATOR)
if needs_systems:
yield system
elif by_system:
if by_key:
if system == by_system and key == by_key:
yield value
elif system == by_system:
yield key
class GetSystemList(_DictionaryEntryService):
""" Returns a list of systems used in dictionaries.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_kvdb_data_dict_dictionary_get_system_list_request'
response_elem = 'zato_kvdb_data_dict_dictionary_get_system_list_response'
output_required = ('name',)
def handle(self):
self.response.payload[:] = ({'name':elem} for elem in sorted(set(self.get_data(True))))
class GetKeyList(_DictionaryEntryService):
""" Returns a list of keys used in a system's dictionary.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_kvdb_data_dict_dictionary_get_key_list_request'
response_elem = 'zato_kvdb_data_dict_dictionary_get_key_list_response'
input_required = ('system',)
output_required = ('name',)
def handle(self):
self.response.payload[:] = ({'name':elem} for elem in sorted(set(self.get_data(False, self.request.input.system))))
class GetValueList(_DictionaryEntryService):
""" Returns a list of values used in a system dictionary's key.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_kvdb_data_dict_dictionary_get_value_list_request'
response_elem = 'zato_kvdb_data_dict_dictionary_get_value_list_response'
input_required = 'system', 'key'
output_required = 'name'
def handle(self):
self.response.payload[:] = ({'name':elem} for elem in sorted(
set(self.get_data(False, self.request.input.system, self.request.input.key))))
class GetLastID(AdminService):
""" Returns the value of the last dictionary's ID or nothing at all if the key for holding its value doesn't exist.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_kvdb_data_dict_dictionary_get_last_id_request'
response_elem = 'zato_kvdb_data_dict_dictionary_get_last_id_response'
output_optional = Int('value')
def handle(self):
if not self.server.kvdb.conn:
return
self.response.payload.value = self.server.kvdb.conn.get(KVDB.DICTIONARY_ITEM_ID) or ''
| 8,408
|
Python
|
.py
| 178
| 37.848315
| 123
| 0.625428
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,331
|
translation.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/kvdb/data_dict/translation.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from hashlib import sha1, sha256
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import unicode
# Zato
from zato.common.api import KVDB
from zato.common.exception import ZatoException
from zato.common.util.api import hexlify, multikeysort
from zato.server.service import Int
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
from zato.server.service.internal.kvdb.data_dict import DataDictService
class _DeletingService(DataDictService):
""" Subclasses of this class know how to delete a translation.
"""
def delete(self, id):
if not self.server.kvdb.conn:
return
for item in self._get_translations():
if int(item['id']) == id:
delete_key = KVDB.SEPARATOR.join((KVDB.TRANSLATION, item['system1'], item['key1'], item['value1'], item['system2'], item['key2']))
self.server.kvdb.conn.delete(delete_key)
class GetList(DataDictService):
""" Returns a list of translations.
"""
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_kvdb_data_dict_translation_get_list_request'
response_elem = 'zato_kvdb_data_dict_translation_get_list_response'
output_required = ('id', 'system1', 'key1', 'value1', 'system2', 'key2', 'value2', 'id1', 'id2')
def get_data(self):
return multikeysort(self._get_translations(), ['system1', 'key1', 'value1', 'system2', 'key2', 'value2'])
def handle(self):
self.response.payload[:] = self.get_data()
class _CreateEdit(DataDictService):
""" A base class for both Create and Edit actions.
"""
def _validate_name(self, name, system1, key1, value1, system2, key2, id):
""" Makes sure the translation doesn't already exist.
"""
if not self.server.kvdb.conn:
return
def _exception():
msg = 'A mapping between system1:[{}], key1:[{}], value1:[{}] and system2:[{}], key2:[{}] already exists'.format(
system1, key1, value1, system2, key2)
self.logger.error(msg)
raise ZatoException(self.cid, msg)
if self.server.kvdb.conn.exists(name):
# No ID means it's a Create so it's a genuine match of an existing mapping
if not id:
_exception()
# We've got an ID so it's an Edit and we need ignore it if we're
# editing ourself.
existing_id = self.server.kvdb.conn.hget(name, 'id')
if not str(existing_id) == str(id):
_exception()
return True
def _get_item_ids(self):
""" Returns IDs of the dictionary entries used in the translation.
"""
item_ids = {'id1':None, 'id2':None}
for idx in('1', '2'):
system = self.request.input.get('system' + idx)
key = self.request.input.get('key' + idx)
value = self.request.input.get('value' + idx)
item_ids['id' + idx] = self._get_dict_item_id(system, key, value)
# This is a sanity check, in theory the input data can't possibly be outside
# of what's in the KVDB.DICTIONARY_ITEM key
for idx in('1', '2'):
if not item_ids['id' + idx]:
msg = 'Could not find the ID for system:[{}], key:[{}], value:[{}]'.format(
self.request.input.get('system' + idx), self.request.input.get('key' + idx),
self.request.input.get('value' + idx))
raise ZatoException(self.cid, msg)
return item_ids
def handle(self):
system1 = self.request.input.system1
key1 = self.request.input.key1
value1 = self.request.input.value1
system2 = self.request.input.system2
key2 = self.request.input.key2
item_ids = self._get_item_ids()
hash_name = self._name(system1, key1, value1, system2, key2)
if self._validate_name(hash_name, system1, key1, value1, system2, key2, self.request.input.get('id')):
self.response.payload.id = self._handle(hash_name, item_ids)
def _handle(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by a subclass')
def _set_hash_fields(self, hash_name, item_ids):
if not self.server.kvdb.conn:
return
self.server.kvdb.conn.hset(hash_name, 'id1', item_ids['id1'])
self.server.kvdb.conn.hset(hash_name, 'id2', item_ids['id2'])
self.server.kvdb.conn.hset(hash_name, 'value2', self.request.input.value2)
class Create(_CreateEdit):
""" Creates a translation between dictionary entries.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_kvdb_data_dict_translation_create_request'
response_elem = 'zato_kvdb_data_dict_translation_create_response'
input_required = ('system1', 'key1', 'value1', 'system2', 'key2', 'value2')
output_required = ('id',)
def _handle(self, hash_name, item_ids):
if not self.server.kvdb.conn:
return
id = self.server.kvdb.conn.incr(KVDB.TRANSLATION_ID)
self.server.kvdb.conn.hset(hash_name, 'id', id)
self._set_hash_fields(hash_name, item_ids)
return id
class Edit(_CreateEdit):
""" Updates a translation between dictionary entries.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_kvdb_data_dict_translation_edit_request'
response_elem = 'zato_kvdb_data_dict_translation_edit_response'
input_required = ('id', 'system1', 'key1', 'value1', 'system2', 'key2', 'value2')
output_required = ('id',)
def _handle(self, hash_name, item_ids):
if not self.server.kvdb.conn:
return
for item in self._get_translations():
if item['id'] == str(self.request.input.id):
existing_name = self._name(item['system1'], item['key1'], item['value1'], item['system2'], item['key2'])
if existing_name != hash_name:
self.server.kvdb.conn.renamenx(existing_name, hash_name)
self._set_hash_fields(hash_name, item_ids)
break
return self.request.input.id
class Delete(_DeletingService):
""" Deletes a translation between dictionary entries.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_kvdb_data_dict_translation_delete_request'
response_elem = 'zato_kvdb_data_dict_translation_delete_response'
input_required = ('id',)
def handle(self):
self.delete(self.request.input.id)
class Translate(AdminService):
""" Translates keys and values between systems.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_kvdb_data_dict_translation_translate_request'
response_elem = 'zato_kvdb_data_dict_translation_translate_response'
input_required = ('system1', 'key1', 'value1', 'system2', 'key2')
output_optional = ('value2', 'repr', 'hex', 'sha1', 'sha256')
def handle(self):
result = self.translate(self.request.input.system1, self.request.input.key1, self.request.input.value1,
self.request.input.system2, self.request.input.key2)
if result:
result = result if isinstance(result, unicode) else result.decode('utf-8')
result_bytes = result.encode('utf8')
self.response.payload.value2 = result
self.response.payload.repr = repr(result)
self.response.payload.hex = hexlify(result)
self.response.payload.sha1 = sha1(result_bytes).hexdigest()
self.response.payload.sha256 = sha256(result_bytes).hexdigest()
class GetLastID(AdminService):
""" Returns the value of the last dictionary's ID or nothing at all if the key for holding its value doesn't exist.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_kvdb_data_dict_translation_get_last_id_request'
response_elem = 'zato_kvdb_data_dict_translation_get_last_id_response'
output_optional = (Int('value'),)
def handle(self):
if not self.server.kvdb.conn:
return
self.response.payload.value = self.server.kvdb.conn.get(KVDB.TRANSLATION_ID)
| 8,346
|
Python
|
.py
| 169
| 40.597633
| 146
| 0.635436
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,332
|
file_transfer.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/channel/file_transfer.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.common.api import FILE_TRANSFER
from zato.common.util.file_transfer import parse_extra_into_list
from zato.server.service import Service
# ################################################################################################################################
# ################################################################################################################################
class ChannelFileTransferHandler(Service):
""" A no-op marker service uses by file transfer channels.
"""
name = FILE_TRANSFER.SCHEDULER_SERVICE
# ################################################################################################################################
def handle(self):
extra = self.request.raw_request
if not extra:
return
# Convert input parameters into a list of channel (observer) IDs ..
extra = parse_extra_into_list(extra)
# .. and run each observer in a new greenlet.
for channel_id in extra:
self.server.worker_store.file_transfer_api.run_snapshot_observer(channel_id, 1)
# ################################################################################################################################
# ################################################################################################################################
| 1,528
|
Python
|
.py
| 27
| 51.888889
| 130
| 0.394896
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,333
|
json_rpc.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/channel/json_rpc.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from traceback import format_exc
# Bunch
from bunch import bunchify
# Zato
from zato.common.api import CONNECTION, JSON_RPC, URL_TYPE
from zato.common. exception import Unauthorized
from zato.common.json_internal import dumps, loads
from zato.common.json_rpc import ErrorCtx, Forbidden, InternalError, ItemResponse, JSONRPCHandler, ParseError, \
RateLimitReached as JSONRPCRateLimitReached, RequestContext
from zato.common.json_schema import ValidationException as JSONSchemaValidationException
from zato.common.odb.model import HTTPSOAP
from zato.common.simpleio_ import drop_sio_elems
from zato.common.rate_limiting.common import AddressNotAllowed, RateLimitReached
from zato.server.service import Boolean, List
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
# ################################################################################################################################
# ################################################################################################################################
get_attrs_req = 'id', 'name', 'is_active', 'url_path', 'sec_type', 'sec_use_rbac', 'security_id'
attrs_opt = 'is_rate_limit_active', 'rate_limit_type', 'rate_limit_def', Boolean('rate_limit_check_parent_def'), \
List('service_whitelist')
# ################################################################################################################################
# ################################################################################################################################
class _BaseSimpleIO(AdminSIO):
skip_empty_keys = True
response_elem = None
# ################################################################################################################################
# ################################################################################################################################
class _GetBase(AdminService):
def pre_process_item(self, item):
item['name'] = item['name'].replace(JSON_RPC.PREFIX.CHANNEL + '.', '', 1)
# ################################################################################################################################
# ################################################################################################################################
class GetList(_GetBase):
_filter_by = HTTPSOAP.name,
class SimpleIO(GetListAdminSIO):
input_required = 'cluster_id'
output_required = get_attrs_req
output_optional = attrs_opt
output_repeated = True
response_elem = None
def handle(self):
out = []
response = self.invoke('zato.http-soap.get-list', {
'cluster_id': self.request.input.cluster_id,
'connection': CONNECTION.CHANNEL,
'transport': URL_TYPE.PLAIN_HTTP,
}, skip_response_elem=True)
for item in response:
if item['name'].startswith(JSON_RPC.PREFIX.CHANNEL):
self.pre_process_item(item)
out.append(item)
self.response.payload[:] = out
# ################################################################################################################################
# ################################################################################################################################
class Get(AdminService):
class SimpleIO(_BaseSimpleIO):
input_required = 'cluster_id'
input_optional = 'id', 'name'
output_required = get_attrs_req
output_optional = attrs_opt
def handle(self):
self.response.payload = self.invoke('zato.http-soap.get', self.request.input, skip_response_elem=True)
# ################################################################################################################################
# ################################################################################################################################
class _CreateEdit(AdminService):
target_service_suffix = '<undefined>'
class SimpleIO(_BaseSimpleIO):
input_required = 'cluster_id', 'name', 'is_active', 'url_path', 'security_id', List('service_whitelist')
input_optional = drop_sio_elems(attrs_opt, 'service_whitelist')
output_required = 'id', 'name'
skip_empty_keys = True
response_elem = None
def handle(self):
request = self.request.input.deepcopy()
request.is_internal = False
request.name = '{}.{}'.format(JSON_RPC.PREFIX.CHANNEL, request.name)
request.connection = CONNECTION.CHANNEL
request.transport = URL_TYPE.PLAIN_HTTP
request.http_accept = '*/*'
request.method = 'POST'
request.service = 'pub.zato.channel.json-rpc.gateway'
request.cache_expiry = 0
response = self.invoke('zato.http-soap.{}'.format(self.target_service_suffix), request, skip_response_elem=True)
self.response.payload.id = response['id']
self.response.payload.name = response['name']
# ################################################################################################################################
# ################################################################################################################################
class Create(_CreateEdit):
target_service_suffix = 'create'
# ################################################################################################################################
# ################################################################################################################################
class Edit(_CreateEdit):
target_service_suffix = 'edit'
class SimpleIO(_CreateEdit.SimpleIO):
input_required = _CreateEdit.SimpleIO.input_required + ('id',)
# ################################################################################################################################
# ################################################################################################################################
class Delete(AdminService):
class SimpleIO(_BaseSimpleIO):
input_required = 'cluster_id', 'id'
def handle(self):
self.invoke('zato.http-soap.delete', self.request.input)
# ################################################################################################################################
# ################################################################################################################################
class JSONRPCGateway(AdminService):
""" A gateway service via which JSON-RPC requests are accepted.
"""
name = 'pub.zato.channel.json-rpc.gateway'
# ################################################################################################################################
def handle(self):
try:
channel_config = self.server.worker_store.request_dispatcher.url_data.get_channel_by_name(self.channel.name)
message = loads(self.request.payload.decode('utf8'))
#
# At this point we know that our own service can be invoked (the gateway itself),
# but we also need to check security as it pertains to the JSON-RPC method itself
# which is another Zato service. Note that since JSON-RPC is a kind of an HTTP-based channel
# we can have security definitions of three types:
#
# a) No security defined
# b) A specific security definition
# c) Deletegated to RBAC
#
# Case a) does not require anything
# Case b) assumes that the very JSON-RPC and the service being invoked share the definition
# Case c) requires an additional check because different RBAC permissions may be assigned
# to the gateway itself vs. the service that is to be invoked
#
channel_item = self.wsgi_environ['zato.channel_item'] # type: dict
if channel_item['sec_use_rbac']:
inner_channel_item = {}
inner_channel_item['url_path'] = channel_item['url_path']
inner_channel_item['service_id'] = self.server.service_store.get_service_id_by_name(message['method'])
self.server.worker_store.request_dispatcher.url_data.check_rbac_delegated_security(
self.chan.sec, self.cid, inner_channel_item, inner_channel_item['url_path'], self.request.raw_request,
self.wsgi_environ, self.request.http.POST, self.server.worker_store)
except Exception as e:
self.logger.warning('JSON-RPC error in `%s` (%s), e:`%s`', self.channel.name, self.cid, format_exc())
error_ctx = ErrorCtx()
error_ctx.cid = self.cid
# JSON parsing error
if isinstance(e, ValueError):
code = ParseError.code
message = 'Parsing error'
# Source address is not allowed to invoke the service
if isinstance(e, (AddressNotAllowed, Unauthorized)):
code = Forbidden.code
message = 'You are not allowed to access this resource'
elif isinstance(e, RateLimitReached):
code = JSONRPCRateLimitReached.code
message = 'Rate limit reached'
# Any other error
else:
code = InternalError.code
message = 'Message could not be handled'
error_ctx.code = code
error_ctx.message = message
out = ItemResponse()
out.error = error_ctx
response = out.to_dict()
else:
ctx = RequestContext()
ctx.cid = self.cid
ctx.message = message
ctx.orig_message = self.request.raw_request
handler = JSONRPCHandler(
self.server.service_store, self.wsgi_environ, bunchify(channel_config), self.invoke, self.channel,
JSONSchemaValidationException)
response = handler.handle(ctx)
self.response.content_type = 'application/json'
self.response.payload = dumps(response)
# ################################################################################################################################
# ################################################################################################################################
| 10,631
|
Python
|
.py
| 179
| 51.134078
| 130
| 0.462959
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,334
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/channel/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,335
|
jms_wmq.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/channel/jms_wmq.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from base64 import b64decode
from binascii import unhexlify
from contextlib import closing
from traceback import format_exc
# Arrow
from arrow import get as arrow_get
# Python 2/3 compatibility
from zato.common.py23_ import pickle_loads
# Zato
from zato.common.api import CHANNEL
from zato.common.broker_message import CHANNEL as BROKER_MSG_CHANNEL
from zato.common.ccsid_ import CCSIDConfig
from zato.common.exception import ServiceMissingException
from zato.common.json_internal import loads
from zato.common.odb.model import ChannelWMQ, Cluster, ConnDefWMQ, Service as ModelService
from zato.common.odb.query import channel_wmq_list
from zato.common.util.api import payload_from_request
from zato.common.util.time_ import datetime_from_ms
from zato.server.service import Service
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
# ################################################################################################################################
class GetList(AdminService):
""" Returns a list of IBM MQ channels.
"""
_filter_by = ChannelWMQ.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_channel_jms_wmq_get_list_request'
response_elem = 'zato_channel_jms_wmq_get_list_response'
input_required = ('cluster_id',)
output_required = ('id', 'name', 'is_active', 'def_id', 'def_name', 'queue', 'service_name')
output_optional = ('data_format',)
def get_data(self, session):
return self._search(channel_wmq_list, session, self.request.input.cluster_id, False)
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
# ################################################################################################################################
class Create(AdminService):
""" Creates a new IBM MQ channel.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_channel_jms_wmq_create_request'
response_elem = 'zato_channel_jms_wmq_create_response'
input_required = ('cluster_id', 'name', 'is_active', 'def_id', 'queue', 'service')
input_optional = ('data_format',)
output_required = ('id', 'name')
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
# Let's see if we already have a channel of that name before committing
# any stuff into the database.
existing_one = session.query(ChannelWMQ.id).\
filter(ConnDefWMQ.cluster_id==input.cluster_id).\
filter(ChannelWMQ.def_id==ConnDefWMQ.id).\
filter(ChannelWMQ.name==input.name).\
first()
if existing_one:
raise Exception('A IBM MQ channel `{}` already exists on this cluster'.format(input.name))
# Is the service's name correct?
service = session.query(ModelService).\
filter(Cluster.id==input.cluster_id).\
filter(ModelService.cluster_id==Cluster.id).\
filter(ModelService.name==input.service).first()
if not service:
msg = 'Service `{}` does not exist in this cluster'.format(input.service)
self.logger.info(msg)
raise ServiceMissingException(msg)
try:
item = ChannelWMQ()
item.name = input.name
item.is_active = input.is_active
item.queue = input.queue
item.def_id = input.def_id
item.service = service
item.data_format = input.data_format
session.add(item)
session.commit()
input.id = item.id
input.service_name = service.name
input.action = BROKER_MSG_CHANNEL.WMQ_CREATE.value
self.broker_client.publish(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
except Exception:
self.logger.error('Could not create an IBM MQ MQ channel, e:`%s`', format_exc())
session.rollback()
raise
# ################################################################################################################################
class Edit(AdminService):
""" Updates an IBM MQ MQ channel.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_channel_jms_wmq_edit_request'
response_elem = 'zato_channel_jms_wmq_edit_response'
input_required = ('id', 'cluster_id', 'name', 'is_active', 'queue', 'service')
input_optional = ('data_format', 'def_id', 'def_name')
output_optional = ('id', 'name')
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
# Let's see if we already have an account of that name before committing
# any stuff into the database.
existing_one = session.query(ChannelWMQ.id).\
filter(ConnDefWMQ.cluster_id==input.cluster_id).\
filter(ChannelWMQ.def_id==ConnDefWMQ.id).\
filter(ChannelWMQ.name==input.name).\
filter(ChannelWMQ.id!=input.id).\
first()
if existing_one:
raise Exception('A IBM MQ channel `{}` already exists on this cluster'.format(input.name))
# Is the service's name correct?
service = session.query(ModelService).\
filter(Cluster.id==input.cluster_id).\
filter(ModelService.cluster_id==Cluster.id).\
filter(ModelService.name==input.service).first()
if not service:
msg = 'Service `{}` does not exist in this cluster'.format(input.service)
self.logger.info(msg)
raise Exception(msg)
try:
# We will have def_id if the request comes through Dashboard
# but not if is coming through enmasse.
def_id = input.def_id
if not def_id:
def_id = session.query(ConnDefWMQ.id).\
filter(ConnDefWMQ.cluster_id==input.cluster_id).\
filter(ConnDefWMQ.name==input.def_name).\
one_or_none()
def_id = def_id[0]
item = session.query(ChannelWMQ).filter_by(id=input.id).one()
item.name = input.name
item.is_active = input.is_active
item.queue = input.queue
item.def_id = def_id
item.service = service
item.data_format = input.data_format
session.add(item)
session.commit()
input.id = item.id
input.service_name = service.name
input.action = BROKER_MSG_CHANNEL.WMQ_EDIT.value
self.broker_client.publish(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
except Exception:
self.logger.error('Could not update IBM MQ definition, e:`%s`', format_exc())
session.rollback()
raise
# ################################################################################################################################
class Delete(AdminService):
""" Deletes an IBM MQ MQ channel.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_channel_jms_wmq_delete_request'
response_elem = 'zato_channel_jms_wmq_delete_response'
input_required = ('id',)
def handle(self):
with closing(self.odb.session()) as session:
try:
def_ = session.query(ChannelWMQ).\
filter(ChannelWMQ.id==self.request.input.id).\
one()
session.delete(def_)
session.commit()
self.broker_client.publish({
'action': BROKER_MSG_CHANNEL.WMQ_DELETE.value, 'id':def_.id
})
except Exception:
session.rollback()
self.logger.error('Could not delete IBM MQ channel, e:`%s`', format_exc())
raise
# ################################################################################################################################
class OnMessageReceived(Service):
""" A callback service invoked by WebSphere connectors for each taken off a queue.
"""
def handle(self, _channel=CHANNEL.IBM_MQ, ts_format='YYYYMMDDHHmmssSS'):
request = loads(self.request.raw_request)
msg = request['msg']
service_name = request['service_name']
# Make MQ-level attributes easier to handle
correlation_id = unhexlify(msg['correlation_id']) if msg['correlation_id'] else None
expiration = datetime_from_ms(msg['expiration']) if msg['expiration'] else None
timestamp = '{}{}'.format(msg['put_date'], msg['put_time'])
timestamp = arrow_get(timestamp, ts_format).replace(tzinfo='UTC').datetime
# Extract MQMD
mqmd = msg['mqmd']
mqmd = b64decode(mqmd)
mqmd = pickle_loads(mqmd)
# Find the message's CCSID
request_ccsid = mqmd.CodedCharSetId
# Try to find an encoding matching the CCSID,
# if not found, use the default one.
try:
encoding = CCSIDConfig.encoding_map[request_ccsid]
except KeyError:
encoding = CCSIDConfig.default_encoding
# Encode the input Unicode data into bytes
msg['text'] = msg['text'].encode(encoding, errors='replace')
# Extract the business payload
data = payload_from_request(self.server.json_parser, self.cid, msg['text'], request['data_format'], None)
# Invoke the target service
self.invoke(service_name, data, _channel, wmq_ctx={
'msg_id': unhexlify(msg['msg_id']),
'correlation_id': correlation_id,
'timestamp': timestamp,
'put_time': msg['put_time'],
'put_date': msg['put_date'],
'expiration': expiration,
'reply_to': msg['reply_to'],
'data': data,
'mqmd': mqmd
})
# ################################################################################################################################
| 10,723
|
Python
|
.py
| 219
| 37.757991
| 130
| 0.552705
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,336
|
amqp_.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/channel/amqp_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from traceback import format_exc
# Zato
from zato.common.broker_message import CHANNEL
from zato.common.exception import ServiceMissingException
from zato.common.odb.model import ChannelAMQP, Cluster, ConnDefAMQP, Service
from zato.common.odb.query import channel_amqp_list
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
# ################################################################################################################################
class GetList(AdminService):
""" Returns a list of AMQP channels.
"""
name = 'zato.channel.amqp.get-list'
_filter_by = ChannelAMQP.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_channel_amqp_get_list_request'
response_elem = 'zato_channel_amqp_get_list_response'
input_required = ('cluster_id',)
output_required = ('id', 'name', 'is_active', 'queue', 'consumer_tag_prefix', 'def_name', 'def_id', 'service_name',
'pool_size', 'ack_mode','prefetch_count')
output_optional = ('data_format',)
def get_data(self, session):
return self._search(channel_amqp_list, session, self.request.input.cluster_id, False)
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
# ################################################################################################################################
class Create(AdminService):
""" Creates a new AMQP channel.
"""
name = 'zato.channel.amqp.create'
class SimpleIO(AdminSIO):
request_elem = 'zato_channel_amqp_create_request'
response_elem = 'zato_channel_amqp_create_response'
input_required = ('cluster_id', 'name', 'is_active', 'def_id', 'queue', 'consumer_tag_prefix', 'service', 'pool_size',
'ack_mode','prefetch_count')
input_optional = ('data_format',)
output_required = ('id', 'name')
def handle(self):
with closing(self.odb.session()) as session:
input = self.request.input
# Let's see if we already have a channel of that name before committing
# any stuff into the database.
existing_one = session.query(ChannelAMQP.id).\
filter(ConnDefAMQP.cluster_id==input.cluster_id).\
filter(ChannelAMQP.def_id==ConnDefAMQP.id).\
filter(ChannelAMQP.name==input.name).\
first()
if existing_one:
raise Exception('An AMQP channel `{}` already exists on this cluster'.format(input.name))
# Is the service's name correct?
service = session.query(Service).\
filter(Cluster.id==input.cluster_id).\
filter(Service.cluster_id==Cluster.id).\
filter(Service.name==input.service).\
first()
if not service:
msg = 'Service `{}` does not exist in this cluster'.format(input.service)
raise ServiceMissingException(self.cid, msg)
try:
item = ChannelAMQP()
item.name = input.name
item.is_active = input.is_active
item.queue = input.queue
item.consumer_tag_prefix = input.consumer_tag_prefix
item.def_id = input.def_id
item.service = service
item.pool_size = input.pool_size
item.ack_mode = input.ack_mode
item.prefetch_count = input.prefetch_count
item.data_format = input.data_format
session.add(item)
session.commit()
input.action = CHANNEL.AMQP_CREATE.value
input.def_name = item.def_.name
input.id = item.id
input.service_name = service.name
self.broker_client.publish(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
except Exception:
self.logger.error('Could not create an AMQP channel, e:`%s`', format_exc())
session.rollback()
raise
# ################################################################################################################################
class Edit(AdminService):
""" Updates an AMQP channel.
"""
name = 'zato.channel.amqp.edit'
class SimpleIO(AdminSIO):
request_elem = 'zato_channel_amqp_edit_request'
response_elem = 'zato_channel_amqp_edit_response'
input_required = ('id', 'cluster_id', 'name', 'is_active', 'def_id', 'queue', 'consumer_tag_prefix', 'service',
'pool_size', 'ack_mode','prefetch_count')
input_optional = ('data_format',)
output_required = ('id', 'name')
def handle(self):
input = self.request.input
with closing(self.odb.session()) as session:
# Let's see if we already have an account of that name before committing
# any stuff into the database.
existing_one = session.query(ChannelAMQP.id).\
filter(ConnDefAMQP.cluster_id==input.cluster_id).\
filter(ChannelAMQP.def_id==ConnDefAMQP.id).\
filter(ChannelAMQP.name==input.name).\
filter(ChannelAMQP.id!=input.id).\
first()
if existing_one:
raise Exception('An AMQP channel `{}` already exists on this cluster'.format(input.name))
# Is the service's name correct?
service = session.query(Service).\
filter(Cluster.id==input.cluster_id).\
filter(Service.cluster_id==Cluster.id).\
filter(Service.name==input.service).\
first()
if not service:
msg = 'Service [{0}] does not exist in this cluster'.format(input.service)
raise Exception(msg)
try:
item = session.query(ChannelAMQP).filter_by(id=input.id).one()
old_name = item.name
item.name = input.name
item.is_active = input.is_active
item.queue = input.queue
item.consumer_tag_prefix = input.consumer_tag_prefix
item.def_id = input.def_id
item.service = service
item.pool_size = input.pool_size
item.ack_mode = input.ack_mode
item.prefetch_count = input.prefetch_count
item.data_format = input.data_format
session.add(item)
session.commit()
input.action = CHANNEL.AMQP_EDIT.value
input.def_name = item.def_.name
input.id = item.id
input.old_name = old_name
input.service_name = service.name
self.broker_client.publish(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
except Exception:
self.logger.error('Could not update the AMQP definition, e:`%s`', format_exc())
session.rollback()
raise
# ################################################################################################################################
class Delete(AdminService):
""" Deletes an AMQP channel.
"""
name = 'zato.channel.amqp.delete'
class SimpleIO(AdminSIO):
request_elem = 'zato_channel_amqp_delete_request'
response_elem = 'zato_channel_amqp_delete_response'
input_required = ('id',)
def handle(self):
with closing(self.odb.session()) as session:
try:
item = session.query(ChannelAMQP).\
filter(ChannelAMQP.id==self.request.input.id).\
one()
item_id = item.id
def_name = item.def_.name
session.delete(item)
session.commit()
self.broker_client.publish({
'action': CHANNEL.AMQP_DELETE.value,
'name': item.name,
'id':item_id,
'def_name':def_name,
})
except Exception:
session.rollback()
self.logger.error('Could not delete the AMQP channel, e:`%s`', format_exc())
raise
# ################################################################################################################################
| 8,809
|
Python
|
.py
| 181
| 36.381215
| 130
| 0.531873
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,337
|
zmq.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/channel/zmq.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from traceback import format_exc
# Zato
from zato.common.broker_message import CHANNEL
from zato.common.odb.model import ChannelZMQ, Cluster, Service as ServiceModel
from zato.common.odb.query import channel_zmq_list
from zato.common.util.api import is_port_taken, require_tcp_port
from zato.common.simpleio_ import drop_sio_elems
from zato.server.service import Int, Service
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
# ################################################################################################################################
class _UpdateSIO(AdminSIO):
""" Common class for Create, Edit and Start SIO parameters.
"""
input_required = ('cluster_id', 'name', 'is_active', 'address', 'socket_type', 'socket_method', 'pool_strategy', \
'service_source', 'service')
input_optional = ('id', 'sub_key', 'data_format', 'msg_source')
# ################################################################################################################################
class GetList(AdminService):
""" Returns a list of ZeroMQ channels.
"""
_filter_by = ChannelZMQ.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_channel_zmq_get_list_request'
response_elem = 'zato_channel_zmq_get_list_response'
input_required = ('cluster_id',)
input_optional = GetListAdminSIO.input_optional + ('msg_source',)
output_required = ('id', 'name', 'is_active', 'address', 'socket_type', 'socket_method',
'service_name', 'pool_strategy', 'service_source', 'data_format')
output_optional = ('sub_key',)
def get_data(self, session):
return self._search(channel_zmq_list, session, self.request.input.cluster_id, False)
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
# ################################################################################################################################
class Create(AdminService):
""" Creates a new ZeroMQ channel.
"""
class SimpleIO(_UpdateSIO):
request_elem = 'zato_channel_zmq_create_request'
response_elem = 'zato_channel_zmq_create_response'
output_required = ('id', 'name')
def handle(self):
input = self.request.input
require_tcp_port(input.address)
with closing(self.odb.session()) as session:
existing_one = session.query(ChannelZMQ.id).\
filter(ChannelZMQ.cluster_id==input.cluster_id).\
filter(ChannelZMQ.name==input.name).\
first()
if existing_one:
raise Exception('A ZeroMQ channel `{}` already exists in this cluster'.format(input.name))
# Is the service's name correct?
service = session.query(ServiceModel).\
filter(Cluster.id==input.cluster_id).\
filter(ServiceModel.cluster_id==Cluster.id).\
filter(ServiceModel.name==input.service).first()
if not service:
msg = 'Service `{}` does not exist in this cluster'.format(input.service)
raise Exception(msg)
try:
sub_key = input.get('sub_key', b'')
item = ChannelZMQ()
item.name = input.name
item.is_active = input.is_active
item.address = input.address
item.socket_type = input.socket_type
item.socket_method = input.socket_method
item.sub_key = sub_key
item.cluster_id = input.cluster_id
item.service = service
item.pool_strategy = input.pool_strategy
item.service_source = input.service_source
item.data_format = input.data_format
session.add(item)
session.commit()
input.action = CHANNEL.ZMQ_CREATE.value
input.sub_key = sub_key
input.service_name = service.name
input.source_server = self.server.get_full_name()
input.id = item.id
input.config_cid = 'channel.zmq.create.{}.{}'.format(input.source_server, self.cid)
self.broker_client.publish(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
except Exception:
self.logger.error('ZeroMQ channel could not be created, e:`%s`', format_exc())
session.rollback()
raise
# ################################################################################################################################
class Edit(AdminService):
""" Updates a ZeroMQ channel.
"""
class SimpleIO(_UpdateSIO):
request_elem = 'zato_channel_zmq_edit_request'
response_elem = 'zato_channel_zmq_edit_response'
output_required = ('id', 'name')
def handle(self):
input = self.request.input
require_tcp_port(input.address)
with closing(self.odb.session()) as session:
existing_one = session.query(ChannelZMQ.id).\
filter(ChannelZMQ.cluster_id==input.cluster_id).\
filter(ChannelZMQ.name==input.name).\
filter(ChannelZMQ.id!=input.id).\
first()
if existing_one:
raise Exception('A ZeroMQ channel `{}` already exists in this cluster'.format(input.name))
# Is the service's name correct?
service = session.query(ServiceModel).\
filter(Cluster.id==input.cluster_id).\
filter(ServiceModel.cluster_id==Cluster.id).\
filter(ServiceModel.name==input.service).first()
if not service:
msg = 'Service `{}` does not exist in this cluster'.format(input.service)
raise Exception(msg)
try:
item = session.query(ChannelZMQ).filter_by(id=input.id).one()
if item.socket_type != input.socket_type:
raise ValueError('Cannot change a ZeroMQ channel\'s socket type')
old_socket_type = item.socket_type
old_name = item.name
item.name = input.name
item.is_active = input.is_active
item.address = input.address
item.socket_type = input.socket_type
item.socket_method = input.socket_method
item.sub_key = input.sub_key
item.service = service
item.pool_strategy = input.pool_strategy
item.service_source = input.service_source
item.data_format = input.data_format
session.add(item)
session.commit()
input.action = CHANNEL.ZMQ_EDIT.value
input.sub_key = input.get('sub_key', b'')
input.service_name = service.name
input.source_server = self.server.get_full_name()
input.id = item.id
input.config_cid = 'channel.zmq.edit.{}.{}'.format(input.source_server, self.cid)
input.old_socket_type = old_socket_type
input.old_name = old_name
self.broker_client.publish(input)
self.response.payload.id = item.id
self.response.payload.name = item.name
except Exception:
self.logger.error('ZeroMQ channel could not be created, e:`%s`', format_exc())
session.rollback()
raise
# ################################################################################################################################
class Delete(AdminService):
""" Deletes a ZeroMQ channel.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_channel_zmq_delete_request'
response_elem = 'zato_channel_zmq_delete_response'
input_required = ('id',)
input_optional = ('msg_source',)
def handle(self):
with closing(self.odb.session()) as session:
try:
item = session.query(ChannelZMQ).\
filter(ChannelZMQ.id==self.request.input.id).\
one()
session.delete(item)
session.commit()
source_server = self.server.get_full_name()
msg = {
'action': CHANNEL.ZMQ_DELETE.value,
'name': item.name,
'id':item.id,
'source_server': source_server,
'socket_type': item.socket_type,
'config_cid': 'channel.zmq.delete.{}.{}'.format(source_server, self.cid)
}
self.broker_client.publish(msg)
except Exception:
session.rollback()
self.logger.error('ZeroMQ channel could not be deleted, e:`%s`', format_exc())
raise
# ################################################################################################################################
class Start(Service):
""" Starts a ZeroMQ channel.
"""
class SimpleIO(_UpdateSIO):
input_required = _UpdateSIO.input_required + ('id', 'config_cid')
input_optional = tuple(drop_sio_elems(_UpdateSIO.input_optional, 'id')) + (Int('bind_port'), 'service_name')
request_elem = 'zato_channel_zmq_start_request'
response_elem = 'zato_channel_zmq_start_response'
def handle(self):
input = self.request.input
if input.bind_port and is_port_taken(input.bind_port):
self.logger.warning('Cannot bind Zero MQ channel `%s` to TCP port %s (already taken)', input.name, input.bind_port)
else:
self.server.worker_store.zmq_channel_create(self.request.input)
# ################################################################################################################################
| 10,381
|
Python
|
.py
| 204
| 39
| 130
| 0.536067
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,338
|
client.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/channel/web_socket/client.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from random import randint
from traceback import format_exc
# ciso8601
try:
from zato.common.util.api import parse_datetime
except ImportError:
from dateutil.parser import parse as parse_datetime
# Zato
from zato.common.broker_message import PUBSUB as BROKER_MSG_PUBSUB
from zato.common.odb.model import ChannelWebSocket, Cluster, WebSocketClient
from zato.common.odb.query import web_socket_client_by_pub_id, web_socket_clients_by_server_id
from zato.common.util.sql import set_instance_opaque_attrs
from zato.server.service import AsIs, List, Opaque
from zato.server.service.internal import AdminService, AdminSIO
# ################################################################################################################################
_wsx_client_table = WebSocketClient.__table__
# ################################################################################################################################
class Create(AdminService):
""" Stores in ODB information about an established connection of an authenticated WebSocket client.
"""
class SimpleIO(AdminSIO):
input_required = (AsIs('pub_client_id'), AsIs('ext_client_id'), 'is_internal', 'local_address', 'peer_address',
'peer_fqdn', 'connection_time', 'last_seen', 'channel_name')
input_optional = 'ext_client_name', 'peer_forwarded_for', 'peer_forwarded_for_fqdn'
output_optional = 'ws_client_id'
response_elem = None
def handle(self):
req = self.request.input
with closing(self.odb.session()) as session:
# Create the client itself
client = self._new_zato_instance_with_cluster(WebSocketClient, self.server.cluster_id)
channel = session.query(ChannelWebSocket).\
filter(Cluster.id==self.server.cluster_id).\
filter(ChannelWebSocket.name==req.channel_name).\
one()
client.id = randint(100_000, 2_000_000_000)
client.is_internal = req.is_internal
client.pub_client_id = req.pub_client_id
client.ext_client_id = req.ext_client_id
client.ext_client_name = req.get('ext_client_name', '')
client.local_address = req.local_address
client.peer_address = req.peer_address
client.peer_fqdn = req.peer_fqdn
client.connection_time = parse_datetime(req.connection_time)
client.last_seen = parse_datetime(req.last_seen)
client.server_proc_pid = self.server.pid
client.channel_id = channel.id
client.server_id = self.server.id
client.server_name = self.server.name
# Opaque attributes
set_instance_opaque_attrs(client, req, ['channel_name'])
session.add(client)
session.commit()
self.response.payload.ws_client_id = client.id
# ################################################################################################################################
class DeleteByPubId(AdminService):
""" Deletes information about a previously established WebSocket connection. Called when a client disconnects.
"""
class SimpleIO(AdminSIO):
input_required = (AsIs('pub_client_id'),)
def handle(self):
with closing(self.odb.session()) as session:
client = web_socket_client_by_pub_id(session, self.request.input.pub_client_id)
if client:
session.execute(_wsx_client_table.delete().where(_wsx_client_table.c.id==client.id))
session.commit()
# ################################################################################################################################
class UnregisterWSSubKey(AdminService):
""" Notifies all workers about sub keys that will not longer be accessible because current WSX client disconnects.
"""
class SimpleIO(AdminSIO):
input_required = List('sub_key_list')
input_optional = 'needs_wsx_close'
def handle(self):
# If configured to, delete the WebSocket's persistent subscription
for sub_key in self.request.input.sub_key_list:
sub = self.pubsub.get_subscription_by_sub_key(sub_key)
if sub:
if self.request.input.needs_wsx_close or (sub and sub.unsub_on_wsx_close):
self.invoke('zato.pubsub.pubapi.unsubscribe', {
'sub_key': sub.sub_key,
'topic_name': sub.topic_name,
})
# Update in-RAM state of workers
self.broker_client.publish({
'action': BROKER_MSG_PUBSUB.WSX_CLIENT_SUB_KEY_SERVER_REMOVE.value,
'sub_key_list': self.request.input.sub_key_list,
})
# ################################################################################################################################
class DeleteByServer(AdminService):
""" Deletes information about a previously established WebSocket connection. Called when a server shuts down.
"""
class SimpleIO(AdminSIO):
input_required = 'needs_pid',
def handle(self):
with closing(self.odb.session()) as session:
server_pid = self.server.pid if self.request.input.get('needs_pid') else None
clients = web_socket_clients_by_server_id(session, self.server.id, server_pid)
clients.delete()
session.commit()
# ################################################################################################################################
class NotifyPubSubMessage(AdminService):
""" Notifies a WebSocket client of new messages available.
"""
class SimpleIO(AdminSIO):
input_required = (AsIs('pub_client_id'), 'channel_name', AsIs('request'))
output_optional = (AsIs('r'),)
response_elem = 'r'
def handle(self):
req = self.request.input
try:
self.response.payload.r = self.server.worker_store.web_socket_api.notify_pubsub_message(
req.channel_name, self.cid, req.pub_client_id, req.request)
except Exception:
self.logger.warning(format_exc())
raise
# ################################################################################################################################
class SetLastSeen(AdminService):
""" Sets last_seen for input WSX client.
"""
class SimpleIO(AdminSIO):
input_required = 'id', Opaque('last_seen')
def handle(self):
with closing(self.odb.session()) as session:
session.execute(
_wsx_client_table.update().\
values(last_seen=self.request.input.last_seen).\
where(_wsx_client_table.c.id==self.request.input.id))
session.commit()
# ################################################################################################################################
| 7,145
|
Python
|
.py
| 137
| 43.532847
| 130
| 0.554934
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,339
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/channel/web_socket/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from json import dumps
from logging import getLogger
from traceback import format_exc
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.api import DATA_FORMAT
from zato.common.broker_message import CHANNEL
from zato.common.odb.model import ChannelWebSocket, PubSubSubscription, PubSubTopic, SecurityBase, Service as ServiceModel, \
WebSocketClient
from zato.common.odb.query import channel_web_socket_list, channel_web_socket, sec_base, service, web_socket_client, \
web_socket_client_by_pub_id, web_socket_client_list, web_socket_sub_key_data_list
from zato.common.util.api import is_port_taken
from zato.common.util.sql import elems_with_opaque
from zato.common.util.time_ import datetime_from_ms
from zato.server.service import AsIs, DateTime, Int, Service
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
# ################################################################################################################################
# Type checking
if 0:
# Zato
from zato.server.connection.web_socket import ChannelWebSocket as ChannelWebSocketImpl
# For pyflakes
ChannelWebSocketImpl = ChannelWebSocketImpl
# ################################################################################################################################
generic_attrs = ['is_audit_log_sent_active', 'is_audit_log_received_active', 'max_len_messages_sent', \
'max_len_messages_received', 'max_bytes_per_message_sent', 'max_bytes_per_message_received',
Int('pings_missed_threshold'), Int('ping_interval')]
elem = 'channel_web_socket'
model = ChannelWebSocket
label = 'a WebSocket channel'
get_list_docs = 'WebSocket channels'
broker_message = CHANNEL
broker_message_prefix = 'WEB_SOCKET_'
list_func = channel_web_socket_list
skip_input_params = ['cluster_id', 'service_id', 'is_out']
create_edit_input_required_extra = ['service_name']
create_edit_input_optional_extra = generic_attrs + ['extra_properties']
input_optional_extra = ['security']
output_optional_extra = ['sec_type', 'service_name', 'address', 'security_name'] + generic_attrs
create_edit_force_rewrite = {'service_name', 'address'}
# ################################################################################################################################
SubscriptionTable = PubSubSubscription.__table__
SubscriptionDelete = SubscriptionTable.delete
WSXChannelTable = ChannelWebSocket.__table__
WSXClientTable = WebSocketClient.__table__
WSXClientDelete = WSXClientTable.delete
# ################################################################################################################################
logger_pubsub = getLogger('zato_pubsub.srv')
# ################################################################################################################################
def _get_hook_service(self):
return self.server.fs_server_config.get('wsx', {}).get('hook_service', '')
# ################################################################################################################################
def broker_message_hook(self, input, instance, attrs, service_type):
input.source_server = self.server.get_full_name()
input.config_cid = 'channel.web_socket.{}.{}.{}'.format(service_type, input.source_server, self.cid)
if service_type == 'create_edit':
with closing(self.odb.session()) as session:
full_data = channel_web_socket(session, input.cluster_id, instance.id)
input.sec_type = full_data.sec_type
input.sec_name = full_data.sec_name
input.vault_conn_default_auth_method = full_data.vault_conn_default_auth_method
input.hook_service = _get_hook_service(self)
# ################################################################################################################################
def instance_hook(self, input, instance, attrs):
if attrs.is_create_edit:
instance.hook_service = _get_hook_service(self)
instance.is_out = False
service = attrs._meta_session.query(ServiceModel).\
filter(ServiceModel.name==input.service_name).\
filter(ServiceModel.cluster_id==input.cluster_id).\
first()
if not service:
raise ValueError('Service not found `{}`'.format(input.service_name))
else:
instance.service = service
# Optionally, assign to the channel a security ID based on its name
if input.get('security'):
sec = attrs._meta_session.query(SecurityBase).\
filter(SecurityBase.name==input.security).\
filter(SecurityBase.cluster_id==input.cluster_id).\
first()
if sec:
instance.security_id = sec.id
# ################################################################################################################################
def response_hook(self, input, _ignored_instance, attrs, service_type):
if service_type == 'get_list' and self.name == 'zato.channel.web-socket.get-list':
with closing(self.odb.session()) as session:
for item in self.response.payload:
_service = service(session, self.server.cluster_id, item.service_id)
item.service_name = _service.name
if item.security_id:
_security = sec_base(session, self.server.cluster_id, item.security_id, use_one=False)
if _security:
item.security_name = _security.name
else:
item.security_name = None
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = ChannelWebSocket.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
class Start(Service):
""" Starts a WebSocket channel.
"""
class SimpleIO:
input_required = 'id', 'config_cid'
input_optional = Int('bind_port'), 'name', 'service_name', 'sec_name', 'sec_type', 'vault_conn_default_auth_method', \
'is_active', 'address', 'hook_service', 'data_format', Int('new_token_wait_time'), Int('token_ttl'), \
'extra_properties'
request_elem = 'zato_channel_web_socket_start_request'
response_elem = 'zato_channel_web_socket_start_response'
def handle(self):
input = self.request.input
if input.bind_port and is_port_taken(input.bind_port):
self.logger.warning('Cannot bind WebSocket channel `%s` to TCP port %s (already taken)', input.name, input.bind_port)
else:
self.server.worker_store.web_socket_channel_create(self.request.input)
# ################################################################################################################################
class GetConnectionList(AdminService):
""" Returns a list of WSX connections for a particular channel.
"""
_filter_by = WebSocketClient.ext_client_id,
class SimpleIO(GetListAdminSIO):
input_required = 'id', 'cluster_id'
output_required = ('local_address', 'peer_address', 'peer_fqdn', AsIs('pub_client_id'), AsIs('ext_client_id'),
DateTime('connection_time'), 'server_name', 'server_proc_pid')
output_optional = 'ext_client_name', 'sub_count', 'peer_forwarded_for', 'peer_forwarded_for_fqdn'
output_repeated = True
def get_data(self, session):
result = self._search(web_socket_client_list, session, self.request.input.cluster_id, self.request.input.id, False)
result = elems_with_opaque(result)
return result
def handle(self):
with closing(self.odb.session()) as session:
data = self.get_data(session)
self.response.payload[:] = data
# ################################################################################################################################
class _BaseCommand(AdminService):
server_service = None
class SimpleIO(AdminSIO):
input_required = 'cluster_id', 'id', AsIs('pub_client_id')
input_optional = 'request_data', Int('timeout')
output_optional = 'response_data'
response_elem = None
# ################################################################################################################################
def _get_wsx_client(self, session):
# type: (object) -> WebSocketClient
client = web_socket_client(session, self.request.input.cluster_id, self.request.input.id,
self.request.input.pub_client_id)
if not client:
raise Exception('No such WebSocket connection `{}`'.format(self.request.input.toDict()))
else:
return client
# ################################################################################################################################
class _BaseAPICommand(_BaseCommand):
def handle(self):
with closing(self.odb.session()) as session:
client = self._get_wsx_client(session)
server_name = client.server_name
server_proc_pid = client.server_proc_pid
self.logger.info(
'WSX API request: `%s` `%s` `%s` `%s` (%s %s:%s)', self.server_service, self.request.input,
client.pub_client_id, client.ext_client_id, self.cid, server_name, server_proc_pid)
invoker = self.server.rpc.get_invoker_by_server_name(server_name)
server_response = invoker.invoke(
self.server_service,
self.request.input,
pid=server_proc_pid,
data_format=DATA_FORMAT.JSON
)
self.logger.info('WSX API response: `%s` (%s)', server_response, self.cid)
if server_response:
# It can be a string object that we will then use as-is ..
if isinstance(server_response, str):
response_data = server_response
# .. otherwise ..
else:
# It may be a dict on a successful invocation ..
if isinstance(server_response, dict):
data = server_response
# .. otherwise, it may be a ServiceInvocationResult object.
else:
data = server_response.data
# The actual response may be a dict by default (on success),
# or a string representation of a traceback object caught
# while invoking another PID.
response_data = data.get('response_data') or {}
# No matter what it is, we can return it now.
self.response.payload.response_data = response_data
else:
self.logger.warning('No server response from %s:%s received to command `%s` (sr:%s)',
server_name, server_proc_pid, self.request.input, server_response)
# ################################################################################################################################
class _BaseServerCommand(_BaseCommand):
func_name = '<overridden-by-subclasses>'
serialize_payload_to_json = False
def _get_server_response(self, func, pub_client_id):
raise NotImplementedError('Must be implemented in subclasses')
def handle(self):
pub_client_id = self.request.input.pub_client_id
try:
with closing(self.odb.session()) as session:
client = web_socket_client_by_pub_id(session, pub_client_id)
wsx_channel_name = client.channel_name
connector = self.server.worker_store.web_socket_api.connectors[wsx_channel_name]
func = getattr(connector, self.func_name)
response_data = self._get_server_response(func, pub_client_id)
except Exception:
self.response.payload.response_data = format_exc()
else:
response_data = dumps(response_data) if self.serialize_payload_to_json else response_data
self.response.payload.response_data = response_data
# ################################################################################################################################
class DisconnectConnection(_BaseAPICommand):
""" Deletes an existing WSX connection.
"""
server_service = 'zato.channel.web-socket.disconnect-connection-server'
# ################################################################################################################################
class DisconnectConnectionServer(_BaseServerCommand):
""" Low-level implementation of WSX connection deletion - must be invoked on the server where the connection exists.
"""
func_name = 'disconnect_client'
def _get_server_response(self, func, pub_client_id):
func(self.cid, pub_client_id)
# ################################################################################################################################
class SubscribeWSX(_BaseAPICommand):
""" Subscribes a WebSocket, identified by pub_client_id, to a topic by its name
"""
server_service = 'zato.channel.web-socket.server-subscribe-wsx'
# ################################################################################################################################
class ServerSubscribeWSX(_BaseServerCommand):
""" Low-level implementation of SubscribeWSX that is invoked on the same server a WSX is on.
"""
func_name = 'subscribe_to_topic'
def _get_server_response(self, func, pub_client_id):
return func(self.cid, pub_client_id, self.request.input.request_data)
# ################################################################################################################################
class InvokeWSX(_BaseAPICommand):
""" Invokes an existing WSX connection.
"""
server_service = 'zato.channel.web-socket.server-invoke-wsx'
# ################################################################################################################################
class ServerInvokeWSX(_BaseServerCommand):
""" Low-level implementation of WSX connection inovcations - must be invoked on the server where the connection exists.
"""
func_name = 'invoke'
serialize_payload_to_json = True
def _get_server_response(self, func, pub_client_id):
return func(self.cid, pub_client_id, self.request.input.request_data, self.request.input.timeout)
# ################################################################################################################################
class GetSubKeyDataList(AdminService):
""" Returns a list of pub/sub sub_key data for a particular WSX connection.
"""
_filter_by = PubSubTopic.name,
class SimpleIO(GetListAdminSIO):
input_required = 'cluster_id', AsIs('pub_client_id')
output_required = ('sub_id', 'sub_key', DateTime('creation_time'), 'topic_id', 'topic_name', 'sub_pattern_matched',
AsIs('ext_client_id'), 'endpoint_id', 'endpoint_name')
output_repeated = True
def get_data(self, session):
return self._search(web_socket_sub_key_data_list,
session, self.request.input.cluster_id, self.request.input.pub_client_id, False)
def handle(self):
with closing(self.odb.session()) as session:
data = self.get_data(session)
for item in data:
item.creation_time = datetime_from_ms(item.creation_time * 1000)
self.response.payload[:] = data
# ################################################################################################################################
class Broadcast(AdminService):
""" Broacasts the input message to all WebSocket connections attached to a channel by its name.
"""
def handle(self):
channel_name = self.request.raw_request['channel_name']
data = self.request.raw_request['data']
connector = self.server.worker_store.web_socket_api.connectors[channel_name] # type: ChannelWebSocketImpl
connector.broadcast(self.cid, data)
# ################################################################################################################################
| 17,187
|
Python
|
.py
| 299
| 50.143813
| 130
| 0.533874
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,340
|
subscription.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/channel/web_socket/subscription.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
# Zato
from zato.common.api import WEB_SOCKET
from zato.common.odb.model import WebSocketSubscription
from zato.server.service import AsIs, Bool
from zato.server.service.internal import AdminService, AdminSIO
_pattern = WEB_SOCKET.PATTERN
_create_input_required = (AsIs('ext_client_id'), AsIs('client_id'), AsIs('channel_id'), 'channel_name')
_create_input_optional = ('is_internal', 'is_durable', Bool('has_gd'))
# ################################################################################################################################
class Create(AdminService):
""" Creates a new message subscription for a given WebSocket client.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_channel_web_socket_subscription_create_request'
response_elem = 'zato_channel_web_socket_subscription_create_response'
input_required = _create_input_required + ('pattern', 'is_by_ext_id', 'is_by_channel')
input_optional = _create_input_optional
def handle(self):
req = self.request.input
with closing(self.odb.session()) as session:
sub = WebSocketSubscription()
sub.is_internal = req.is_internal
sub.pattern = req.pattern
sub.is_by_ext_id = req.is_by_ext_id
sub.is_by_channel = req.is_by_channel
sub.is_durable = req.is_durable
sub.has_gd = req.has_gd
sub.client_id = req.client_id
sub.channel_id = req.channel_id
sub.server_id = self.server.id
session.add(sub)
session.commit()
# ################################################################################################################################
class CreateDefault(AdminService):
""" Creates default subscriptions for a client.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_channel_web_socket_subscription_create_default_request'
response_elem = 'zato_channel_web_socket_subscription_create_default_response'
input_required = _create_input_required
input_optional = _create_input_optional
def handle(self):
req = self.request.input
# pattern, is_by_ext_id, is_by_channel
patterns = [
(_pattern.BY_EXT_ID.format(req.ext_client_id), True, False),
(_pattern.BY_CHANNEL.format(req.channel_name), False, True),
]
for pattern, is_by_ext_id, is_by_channel in patterns:
self.invoke(Create.get_name(), {
'ext_client_id': req.ext_client_id,
'client_id.': req.client_id,
'channel_id': req.channel_id,
'channel_name': req.channel_name,
'pattern': pattern,
'is_by_ext_id': is_by_ext_id,
'is_by_channel': is_by_channel,
'is_internal': req.get('is_internal', False),
'is_durable': req.get('is_durable', False),
'has_gd': req.get('has_gd', False),
})
# ################################################################################################################################
class Delete(AdminService):
""" Deletes a pub/sub subscription previously created or resumed by current WebSocket.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_channel_web_socket_subscription_delete_request'
response_elem = 'zato_channel_web_socket_subscription_delete_response'
# ################################################################################################################################
| 3,874
|
Python
|
.py
| 77
| 42.142857
| 130
| 0.553879
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,341
|
cleanup.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/channel/web_socket/cleanup.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from datetime import datetime, timedelta
from logging import getLogger
# Python 2/3 compatibility
from zato.common.ext.future.utils import iteritems
# Zato
from zato.common.api import WEB_SOCKET
from zato.common.broker_message import PUBSUB
from zato.common.odb.model import ChannelWebSocket, PubSubSubscription, WebSocketClient, WebSocketClientPubSubKeys
from zato.common.util.pubsub import get_topic_sub_keys_from_sub_keys
from zato.common.util.api import parse_extra_into_dict
from zato.common.util.time_ import datetime_from_ms, utcnow_as_ms
from zato.server.service.internal import AdminService
# ################################################################################################################################
# ################################################################################################################################
logger_pubsub = getLogger('zato_pubsub.srv')
# ################################################################################################################################
SubscriptionTable = PubSubSubscription.__table__
SubscriptionDelete = SubscriptionTable.delete
SubscriptionSelect = SubscriptionTable.select
WSXChannelTable = ChannelWebSocket.__table__
WSXClientTable = WebSocketClient.__table__
WSXClientDelete = WSXClientTable.delete
WSXClientSelect = WSXClientTable.select
# ################################################################################################################################
class _msg:
initial = 'Cleaning up old WSX connections; now:`%s`, md:`%s`, ma:`%s`'
found = 'Found %d WSX connection%s to clean up'
cleaning = 'Cleaning up WSX connection %d/%d; %s'
cleaned_up = 'Cleaned up WSX connection %d/%d; %s'
unsubscribing = 'Unsubscribing `%s` (%s) from `%s`'
deleting = 'Deleting `%s` from ODB'
# ################################################################################################################################
class _CleanupWSX:
""" A container for WSX connections that are about to be cleaned up, along with their subscriptions.
"""
__slots__ = 'pub_client_id', 'sk_dict'
def __init__(self):
self.pub_client_id = None
self.sk_dict = None
def __repr__(self):
return '<{} at {}, pci:{}, sk_dict:{}>'.format(self.__class__.__name__, hex(id(self)), self.pub_client_id, self.sk_dict)
def to_dict(self):
return {
'pub_client_id': self.pub_client_id,
'sk_dict': self.sk_dict,
}
# ################################################################################################################################
# ################################################################################################################################
class CleanupWSXPubSub(AdminService):
""" Deletes all old WSX clients and their subscriptions.
"""
name = 'pub.zato.channel.web-socket.cleanup-wsx-pub-sub'
def _run_max_allowed_query(self, session, query, channel_name, max_allowed):
return session.execute(
query.\
where(SubscriptionTable.c.ws_channel_id==WSXChannelTable.c.id).\
where(SubscriptionTable.c.cluster_id==self.server.cluster_id).\
where(WSXChannelTable.c.name==channel_name).\
where(SubscriptionTable.c.last_interaction_time < max_allowed)
)
def handle(self, _msg='Cleaning up WSX pub/sub, channel:`%s`, now:`%s (%s)`, md:`%s`, ma:`%s` (%s)'):
# We receive a multi-line list of WSX channel name -> max timeout accepted on input
config = parse_extra_into_dict(self.request.raw_request)
self.logger.info('Running %s with config %r', self.get_name(), config)
with closing(self.odb.session()) as session:
# Delete stale connections for each subscriber
for channel_name, max_delta in config.items():
# Input timeout is in minutes but timestamps in ODB are in seconds
# so we convert the minutes to seconds, as expected by the database.
max_delta = max_delta * 60
# We compare everything using seconds
now = utcnow_as_ms()
# Laster interaction time for each connection must not be older than that many seconds ago
max_allowed = now - max_delta
now_as_iso = datetime_from_ms(now * 1000)
max_allowed_as_iso = datetime_from_ms(max_allowed * 1000)
# Get all sub_keys that are about to be deleted - retrieving them from the DELETE
# statement below is not portable so we do it manually first.
items = self._run_max_allowed_query(session, SubscriptionSelect(), channel_name, max_allowed)
sub_key_list = [item.sub_key for item in items]
if sub_key_list:
self.logger.debug(_msg, channel_name, now_as_iso, now, max_delta, max_allowed_as_iso, max_allowed)
logger_pubsub.info(_msg, channel_name, now_as_iso, now, max_delta, max_allowed_as_iso, max_allowed)
# First we need a list of topics to which sub_keys were related - required by broker messages.
topic_sub_keys = get_topic_sub_keys_from_sub_keys(session, self.server.cluster_id, sub_key_list)
# Now, delete old connections for that channel from SQL
self._run_max_allowed_query(session, SubscriptionDelete(), channel_name, max_allowed)
# Next, notify processes about deleted subscriptions to allow to update in-RAM structures
if topic_sub_keys:
self.broker_client.publish({
'topic_sub_keys': topic_sub_keys,
'action': PUBSUB.SUBSCRIPTION_DELETE.value,
})
logger_pubsub.info('Published a request to delete sub_keys: %s', sorted(topic_sub_keys))
else:
logger_pubsub.info('Found no sub_keys required to be deleted (%r)', config)
# Commit all deletions
session.commit()
# ################################################################################################################################
# ################################################################################################################################
class CleanupWSX(AdminService):
""" Deletes WSX clients that exceeded their ping timeouts. Executed when a server starts. Also invoked through the scheduler.
"""
name = 'pub.zato.channel.web-socket.cleanup-wsx'
# ################################################################################################################################
def _issue_log_msg(self, msg, *args):
self.logger.debug(msg, *args)
logger_pubsub.info(msg, *args)
# ################################################################################################################################
def _get_max_allowed(self):
# Stale connections are ones that are older than 2 * interval in which each WebSocket's last_seen time is updated.
# This is generous enough, because WSX send background pings once in 30 seconds. After 5 pings missed their
# connections are closed. Then, the default interval is 60 minutes, so 2 * 60 = 2 hours. This means
# that when a connection is broken but we somehow do not delete its relevant entry in SQL (e.g. because our
# process was abruptly shut down), after these 2 hours the row will be considered ready to be deleted from
# the database. Note that this service is invoked from the scheduler, by default, once in 30 minutes.
# This is in minutes ..
max_delta = WEB_SOCKET.DEFAULT.INTERACT_UPDATE_INTERVAL * 2
# .. but timedelta expects seconds.
max_delta = max_delta * 60 # = * 1 hour
now = datetime.utcnow()
max_allowed = now - timedelta(seconds=max_delta)
now_as_iso = now.isoformat()
self._issue_log_msg(_msg.initial, now_as_iso, max_delta, max_allowed)
return max_allowed
# ################################################################################################################################
def _find_old_wsx_connections(self, session, max_allowed):
# Note that we always pull all the data possible to sort it out in Python code
return session.query(
WebSocketClient.id,
WebSocketClient.pub_client_id,
WebSocketClient.last_seen,
WebSocketClientPubSubKeys.sub_key
).\
filter(WebSocketClient.last_seen < max_allowed).\
filter(WebSocketClient.id == WebSocketClientPubSubKeys.client_id).\
all()
# ################################################################################################################################
def handle(self):
# How far back are we to reach out to find old connections
max_allowed = self._get_max_allowed()
with closing(self.odb.session()) as session:
# Find the old connections now
result = self._find_old_wsx_connections(session, max_allowed)
# Nothing to do, we can return
if not result:
return
# At least one old connection was found
wsx_clients = {} # Maps pub_client_id -> _CleanupWSX object
wsx_sub_key = {} # Maps pub_client_id -> a list of its sub_keys
for item in result:
wsx = wsx_clients.setdefault(item.pub_client_id, _CleanupWSX())
wsx.pub_client_id = item.pub_client_id
sk_list = wsx_sub_key.setdefault(item.pub_client_id, [])
sk_list.append(item.sub_key)
len_found = len(wsx_clients)
suffix = '' if len_found == 1 else 's'
self._issue_log_msg(_msg.found, len_found, suffix)
for idx, (pub_client_id, wsx) in enumerate(iteritems(wsx_clients), 1):
# All subscription keys for that WSX, we are adding it here
# so that below, for logging purposes, we are able to say
# what subscriptions are being actually deleted.
wsx.sk_dict = {}.fromkeys(wsx_sub_key[pub_client_id])
# For each subscription of that WSX, add its details to the sk_dict
for sub_key in wsx.sk_dict:
sub = self.pubsub.get_subscription_by_sub_key(sub_key)
if sub:
wsx.sk_dict[sub_key] = {
'creation_time': datetime_from_ms(sub.creation_time),
'topic_id': sub.topic_id,
'topic_name': sub.topic_name,
'ext_client_id': sub.ext_client_id,
'endpoint_type': sub.config['endpoint_type'],
'sub_pattern_matched': sub.sub_pattern_matched,
}
# Log what we are about to do
self._issue_log_msg(_msg.cleaning, idx, len_found, wsx.to_dict())
# Unsubscribe the WebSocket first
for sub_key, info in wsx.sk_dict.items():
# Object 'info' may be None if we are called while the WSX connection
# is still alive but did not respond to pings, in which case it cannot be cleaned up.
if info:
self._issue_log_msg(_msg.unsubscribing, sub_key, info['ext_client_id'], info['topic_name'])
self.invoke('zato.pubsub.pubapi.unsubscribe',{
'sub_key': sub_key,
'topic_name': info['topic_name'],
})
# Delete the WebSocket's state in SQL now
self._issue_log_msg(_msg.deleting, wsx.pub_client_id)
with closing(self.odb.session()) as session:
session.execute(
WSXClientDelete().\
where(WSXClientTable.c.pub_client_id==wsx.pub_client_id)
)
session.commit()
# Log information that this particular connection is done with
# (note that for clarity, this part does not reiterate the subscription's details)
self._issue_log_msg(_msg.cleaned_up, idx, len_found, wsx.pub_client_id)
# ################################################################################################################################
# ################################################################################################################################
| 12,876
|
Python
|
.py
| 208
| 51.375
| 130
| 0.531602
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,342
|
imap.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/email/imap.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from time import time
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.api import EMAIL as EMail_Common, Zato_None
from zato.common.broker_message import EMAIL
from zato.common.odb.model import IMAP
from zato.common.odb.query import email_imap_list
from zato.server.service import AsIs
from zato.server.service.internal import AdminService, AdminSIO, ChangePasswordBase
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.common.typing_ import any_
from zato.server.service import Service
# ################################################################################################################################
elem = 'email_imap'
model = IMAP
label = 'an IMAP connection'
get_list_docs = 'IMAP connections'
broker_message = EMAIL
broker_message_prefix = 'IMAP_'
list_func = email_imap_list
create_edit_input_optional_extra = ['server_type', AsIs('tenant_id'), AsIs('client_id'), 'filter_criteria']
output_optional_extra = ['server_type', 'server_type_human', AsIs('tenant_id'), AsIs('client_id'), 'filter_criteria']
# ################################################################################################################################
def instance_hook(service:'Service', input:'Bunch', instance:'any_', attrs:'any_'):
if attrs.is_create_edit:
instance.username = input.username or '' # So it's not stored as None/NULL
instance.host = input.host or Zato_None
# ################################################################################################################################
def response_hook(service:'Service', input:'Bunch', instance:'any_', attrs:'any_', hook_type:'str'):
if hook_type == 'get_list':
for item in service.response.payload:
# This may be None ..
server_type = item.get('server_type')
# .. in which case we can assume a default one ..
if not server_type:
item.server_type = EMail_Common.IMAP.ServerType.Generic
# .. and now we can obtain a human-friendly name.
item.server_type_human = EMail_Common.IMAP.ServerTypeHuman[item.server_type]
# This should be cleared out in case there is no user-set value
if item.host == Zato_None:
item.host = ''
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = IMAP.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
class ChangePassword(ChangePasswordBase):
""" Changes the password of an IMAP connection.
"""
password_required = False
class SimpleIO(ChangePasswordBase.SimpleIO):
request_elem = 'zato_email_imap_change_password_request'
response_elem = 'zato_email_imap_change_password_response'
def handle(self):
def _auth(instance, password):
instance.password = password
return self._handle(IMAP, _auth, EMAIL.IMAP_CHANGE_PASSWORD.value)
# ################################################################################################################################
class Ping(AdminService):
""" Pings an IMAP connection to check its configuration.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_email_imap_ping_request'
response_elem = 'zato_email_imap_ping_response'
input_required = 'id'
output_optional = 'info'
def handle(self):
with closing(self.odb.session()) as session:
item = session.query(IMAP).filter_by(id=self.request.input.id).one()
start_time = time()
if not self.email:
self.response.payload.info = 'Could not ping connection; is component_enabled.email set to True in server.conf?'
else:
self.email.imap.get(item.name, True).conn.ping()
response_time = time() - start_time
self.response.payload.info = 'Ping NOOP submitted, took:`{0:03.4f} s`, check server logs for details.'.format(
response_time)
# ################################################################################################################################
| 5,331
|
Python
|
.py
| 101
| 47.346535
| 130
| 0.50289
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,343
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/email/__init__.py
|
# -# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 157
|
Python
|
.py
| 5
| 30
| 64
| 0.673333
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,344
|
smtp.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/email/smtp.py
|
# -# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from time import time
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.api import SMTPMessage
from zato.common.broker_message import EMAIL
from zato.common.odb.model import SMTP
from zato.common.version import get_version
from zato.common.odb.query import email_smtp_list
from zato.server.service.internal import AdminService, AdminSIO, ChangePasswordBase
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
# ################################################################################################################################
version = get_version()
# ################################################################################################################################
elem = 'email_smtp'
model = SMTP
label = 'an SMTP connection'
get_list_docs = 'SMTP connections'
broker_message = EMAIL
broker_message_prefix = 'SMTP_'
list_func = email_smtp_list
# ################################################################################################################################
def instance_hook(service, input, instance, attrs):
if attrs.is_create_edit:
instance.username = input.username or '' # So it's not stored as None/NULL
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = SMTP.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
class ChangePassword(ChangePasswordBase):
""" Changes the password of an SMTP connection.
"""
password_required = False
class SimpleIO(ChangePasswordBase.SimpleIO):
request_elem = 'zato_email_smtp_change_password_request'
response_elem = 'zato_email_smtp_change_password_response'
def handle(self):
def _auth(instance, password):
instance.password = password
return self._handle(SMTP, _auth, EMAIL.SMTP_CHANGE_PASSWORD.value)
# ################################################################################################################################
class Ping(AdminService):
""" Pings an SMTP connection to check its configuration.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_email_smtp_ping_request'
response_elem = 'zato_email_smtp_ping_response'
input_required = ('id',)
output_required = ('info',)
def handle(self):
with closing(self.odb.session()) as session:
item = session.query(SMTP).filter_by(id=self.request.input.id).one()
msg = SMTPMessage()
msg.from_ = item.ping_address
msg.to = item.ping_address
msg.cc = item.ping_address
msg.bcc = item.ping_address
msg.subject = 'Zato SMTP ping (Α Β Γ Δ Ε Ζ Η)'
msg.headers['Charset'] = 'utf-8'
msg.body = 'Hello from {}\nUTF-8 test: Α Β Γ Δ Ε Ζ Η'.format(version).encode('utf-8')
msg.attach('utf-8.txt', 'Α Β Γ Δ Ε Ζ Η Θ Ι Κ Λ Μ Ν Ξ Ο Π Ρ Σ Τ Υ Φ Χ Ψ Ω'.encode('utf-8'))
msg.attach('ascii.txt', 'A B C D E F G H I J K L M N O P Q R S T U V W X Y Z')
start_time = time()
self.email.smtp.get(item.name, True).conn.send(msg)
response_time = time() - start_time
self.response.payload.info = 'Ping submitted, took:`{0:03.4f} s`, check server logs for details.'.format(response_time)
# ################################################################################################################################
| 4,535
|
Python
|
.py
| 88
| 46.613636
| 130
| 0.492455
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,345
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/apispec/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from io import StringIO
from itertools import chain
from json import dumps
import os
# Bunch
from bunch import bunchify
# Zato
from zato.server.apispec.spec.openapi import OpenAPIGenerator
from zato.server.service import List, Opaque, Service
# Zato
from zato.common.ext.dataclasses import asdict
from zato.common.util.eval_ import as_list
from zato.common.util.file_system import fs_safe_name
from zato.common.util.open_ import open_r
from zato.server.apispec.model import FieldInfo
from zato.server.apispec.spec.core import Generator
from zato.server.service import AsIs, Bool
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import unicode
# ################################################################################################################################
no_value = '---'
col_sep = ' ' # Column separator
len_col_sep = len(col_sep)
# ################################################################################################################################
def _json_default(value):
# type: (object) -> object
if isinstance(value, FieldInfo):
return asdict(value)
else:
return value
# ################################################################################################################################
class GetAPISpec(Service):
""" Returns API specifications for all services.
"""
class SimpleIO:
input_optional = ('cluster_id', 'query', Bool('return_internal'), 'include', 'exclude', 'needs_sphinx',
'needs_api_invoke', 'needs_rest_channels', 'api_invoke_path', AsIs('tags'))
def handle(self):
cluster_id = self.request.input.get('cluster_id')
include = as_list(self.request.input.include, ',')
exclude = as_list(self.request.input.exclude, ',')
include = [elem for elem in include if elem]
exclude = [elem for elem in exclude if elem]
api_invoke_path = as_list(self.request.input.api_invoke_path, ',')
api_invoke_path = [elem for elem in api_invoke_path if elem]
if not self.request.input.get('return_internal'):
if 'zato.*' not in exclude:
exclude.append('zato.*')
if cluster_id and cluster_id != self.server.cluster_id:
raise ValueError('Input cluster ID `%s` different than ours `%s`', cluster_id, self.server.cluster_id)
# Default to Sphinx output unless explicitly overridden
if isinstance(self.request.input.needs_sphinx, bool):
needs_sphinx = self.request.input.needs_sphinx
else:
needs_sphinx = True
data = Generator(self.server.service_store.services, self.server.sio_config,
include, exclude, self.request.input.query, self.request.input.tags).get_info()
if needs_sphinx:
out = self.invoke(GetSphinx.get_name(), {
'data': data,
'needs_api_invoke': self.request.input.needs_api_invoke,
'needs_rest_channels': self.request.input.needs_rest_channels,
'api_invoke_path':api_invoke_path
})
else:
out = data
self.response.payload = dumps(out, default=_json_default)
# ################################################################################################################################
class GetSphinx(Service):
""" Generates API docs in Sphinx.
"""
class SimpleIO:
input_required = Opaque('data')
input_optional = 'needs_api_invoke', 'needs_rest_channels', List('api_invoke_path')
output_required = Opaque('data')
# ################################################################################################################################
def add_default_files(self, files):
""" Returns default static files that always exist.
"""
apispec_dir = os.path.join(self.server.static_dir, 'sphinxdoc', 'apispec')
for dir_path, _ignored_dir_names, file_names in os.walk(apispec_dir):
if dir_path == apispec_dir:
base_dir = '.'
else:
base_dir = os.path.basename(dir_path)
for file_name in file_names:
relative_path = os.path.join(base_dir, file_name)
f = open_r(os.path.join(dir_path, file_name))
contents = f.read()
f.close()
files[relative_path] = contents
# ################################################################################################################################
def get_openapi_spec(self, data, needs_api_invoke, needs_rest_channels, api_invoke_path):
data = bunchify(data)
channel_data = self.server.worker_store.request_dispatcher.url_data.channel_data
generator = OpenAPIGenerator(data, channel_data, needs_api_invoke, needs_rest_channels, api_invoke_path)
return generator.generate()
# ################################################################################################################################
def _make_sphinx_safe(self, data):
# type: (unicode) -> unicode
# This is a no-op currently
return data
# ################################################################################################################################
def get_service_table_line(self, idx, name, docs, sio):
name_fs_safe = 'service_{}'.format(fs_safe_name(name))
file_name = '{}.rst'.format(name_fs_safe)
summary = docs.summary
if summary:
summary = self._make_sphinx_safe(summary)
return bunchify({
'ns': unicode(idx),
'orig_name': name,
'sphinx_name': name.replace('_', '\_'), # Needed for Sphinx to ignore undescores # noqa: W605
'name': name_fs_safe,
'name_link': """:doc:`{} <./{}>`""".format(name, name_fs_safe),
'file_name': file_name,
'description': summary or no_value,
'docs': docs,
'sio': sio
})
# ################################################################################################################################
def write_separators(self, buff, *borders):
for border in borders:
buff.write(border)
buff.write(col_sep)
buff.write('\n')
# ################################################################################################################################
def write_sio(self, buff, input, output):
# Reusable
list_suffix = ' (list)'
len_list_suffix = len(list_suffix)
sio_lines = []
longest_name = 4 # len('Name')
longest_datatype = 8 # len('Datatype')
longest_required = 8 # len('Required')
longest_description = 11 # len('Description')
# The table is within a 'table' block which is why it needs to be indented
len_table_indent = 3
# Find the longest elements for each column
for elem in chain(input, output):
elem.name_sphinx = elem.name.replace('_', '\_') # Sphinx treats _ as hyperlinks # noqa: W605
len_elem_name_sphinx = len(elem.name_sphinx)
len_elem_subtype = len(elem.subtype)
len_elem_description = len(elem.description)
longest_name = max(longest_name, len_elem_name_sphinx)
longest_datatype = max(longest_datatype, len_elem_subtype + len_list_suffix)
longest_description = max(longest_description, len_elem_description)
# We need to know how much to indent multi-line descriptions,
# this includes all the preceding headers and 1 for each single space.
description_indent = ' ' * (
len_table_indent + \
longest_name + \
len_col_sep + \
1 + \
longest_datatype + \
len_col_sep + \
1 + \
longest_required + \
1
)
new_line_with_indent = '\n\n' + description_indent
for elem in chain(input, output):
elem.description = elem.description.replace('\n', new_line_with_indent)
sio_lines.append(bunchify({
'name': elem.name_sphinx,
'datatype': elem.subtype + (list_suffix if elem.is_list else ''),
'is_required': elem.is_required,
'is_required_str': 'Yes' if elem.is_required else no_value,
'description': elem.description,
}))
longest_name += len_col_sep
longest_datatype += len_col_sep
name_border = '=' * longest_name
datatype_border = '=' * longest_datatype
required_border = '=' * longest_required
description_border = '=' * longest_description
table_indent = ' ' * len_table_indent
# Left-align the table
buff.write('.. table::\n')
buff.write(table_indent) # Note no \n here
buff.write(':align: left\n\n')
buff.write(table_indent)
self.write_separators(buff, name_border, datatype_border, required_border, description_border)
buff.write(table_indent)
buff.write('Name'.ljust(longest_name))
buff.write(col_sep)
buff.write('Datatype'.ljust(longest_datatype))
buff.write(col_sep)
buff.write('Required'.ljust(longest_required))
buff.write(col_sep)
buff.write('Description'.ljust(longest_description))
buff.write(col_sep)
buff.write('\n')
buff.write(table_indent)
self.write_separators(buff, name_border, datatype_border, required_border, description_border)
for item in sio_lines:
buff.write(table_indent)
# First, add the services to the main table
buff.write(item.name.ljust(longest_name))
buff.write(col_sep)
buff.write(item.datatype.ljust(longest_datatype))
buff.write(col_sep)
buff.write(item.is_required_str.ljust(longest_required))
buff.write(col_sep)
buff.write((item.description or '---').ljust(longest_description))
buff.write(col_sep)
buff.write('\n')
buff.write(table_indent)
self.write_separators(buff, name_border, datatype_border, required_border, description_border)
buff.write('\n')
# ################################################################################################################################
def get_service_page(self, item):
buff = StringIO()
input_title = 'Input'
len_input_title = len(input_title)
output_title = 'Output'
len_output_title = len(output_title)
_ = buff.write(item.sphinx_name)
_ = buff.write('\n')
_ = buff.write('=' * len(item.sphinx_name))
_ = buff.write('\n')
_ = buff.write('\n')
docs_full = self._make_sphinx_safe(item.docs.full)
_ = buff.write(docs_full)
_ = buff.write('\n')
_ = buff.write('\n')
# No SimpleIO for that services
if 'zato' not in item.sio or (not item.sio.zato):
return buff
input_required = item.sio.zato.input_required
input_optional = item.sio.zato.input_optional
output_required = item.sio.zato.output_required
output_optional = item.sio.zato.output_optional
# Input
_ = buff.write(input_title)
_ = buff.write('\n')
_ = buff.write('-' * len_input_title)
_ = buff.write('\n' * 2)
if input_required or input_optional:
self.write_sio(buff, input_required, input_optional)
else:
_ = buff.write('(None)')
_ = buff.write('\n')
_ = buff.write('\n')
# Output
_ = buff.write(output_title)
_ = buff.write('\n')
_ = buff.write('-' * len_output_title)
_ = buff.write('\n' * 2)
if output_required or output_optional:
self.write_sio(buff, output_required, output_optional)
else:
_ = buff.write('(None)')
_ = buff.write('\n')
return buff
# ################################################################################################################################
def add_services(self, data, files):
buff = StringIO()
_ = buff.write('Services\n')
_ = buff.write('--------\n\n')
lines = []
longest_ns = 2 # len('NS')
longest_name = 4 # len('Name')
longest_desc = 11 # len('Description')
for idx, elem in enumerate(data, 1):
name = elem.name
docs = elem.docs
sio = elem.simple_io
service_line = self.get_service_table_line(idx, name, docs, sio)
lines.append(service_line)
longest_ns = max(longest_ns, len(service_line.ns))
longest_name = max(longest_name, len(service_line.name_link))
longest_desc = max(longest_desc, len(service_line.description))
longest_ns += len_col_sep
longest_name += len_col_sep
ns_border = '=' * longest_ns
name_border = '=' * longest_name
desc_border = '=' * longest_desc
self.write_separators(buff, ns_border, name_border, desc_border)
_ = buff.write('---'.ljust(longest_ns))
_ = buff.write(col_sep)
_ = buff.write('Name'.ljust(longest_name))
_ = buff.write(col_sep)
_ = buff.write('Description'.ljust(longest_desc))
_ = buff.write(col_sep)
_ = buff.write('\n')
self.write_separators(buff, ns_border, name_border, desc_border)
for item in lines:
# First, add the services to the main table
_ = buff.write(item.ns.ljust(longest_ns))
_ = buff.write(col_sep)
_ = buff.write(item.name_link.ljust(longest_name))
_ = buff.write(col_sep)
_ = buff.write((item.description or '---').ljust(longest_desc))
_ = buff.write(col_sep)
_ = buff.write('\n')
# Now, create a description file for each service
files[item.file_name] = self.get_service_page(item).getvalue()
# Finish the table
self.write_separators(buff, ns_border, name_border, desc_border)
# Save the table's contents
files['services.rst'] = buff.getvalue()
# ################################################################################################################################
def handle(self):
req = self.request.input
data = bunchify(req.data)
files = {}
self.add_default_files(files)
self.add_services(data, files)
files['download/openapi.yaml'] = self.get_openapi_spec(
data, req.needs_api_invoke, req.needs_rest_channels, req.api_invoke_path)
self.response.payload.data = files
# ################################################################################################################################
| 15,350
|
Python
|
.py
| 319
| 38.661442
| 130
| 0.520558
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,346
|
cassandra.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/query/cassandra.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.broker_message import QUERY
from zato.common.odb.model import CassandraConn, CassandraQuery
from zato.common.odb.query import cassandra_query_list
from zato.server.service.internal import AdminService
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
# ################################################################################################################################
elem = 'query_cassandra'
model = CassandraQuery
output_optional_extra = ['def_name']
label = 'a Cassandra query'
get_list_docs = 'Cassandra queries'
broker_message = QUERY
broker_message_prefix = 'CASSANDRA_'
list_func = cassandra_query_list
def_needed = CassandraConn
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = CassandraQuery.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
| 1,958
|
Python
|
.py
| 41
| 45.97561
| 130
| 0.472909
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,347
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/query/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
| 238
|
Python
|
.py
| 6
| 38.166667
| 82
| 0.729258
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,348
|
ide.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/service/ide.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from dataclasses import dataclass
from datetime import datetime, timedelta
from operator import itemgetter
from pathlib import Path
from time import sleep
# Zato
from zato.common.api import Default_Service_File_Data
from zato.common.exception import BadRequest
from zato.common.typing_ import anylist, intnone, list_field, strnone
from zato.common.util.api import get_demo_py_fs_locations, wait_for_file
from zato.common.util.open_ import open_r, open_w
from zato.server.service import Model, Service
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, dictlist, strlistdict
# ################################################################################################################################
# ################################################################################################################################
def make_fs_location_url_safe(data:'str') -> 'str':
return data.replace('/', '~')
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class IDERequest(Model):
service_name: 'strnone' = None
fs_location: 'strnone' = None
should_wait_for_services: 'bool' = False
should_convert_pickup_to_work_dir: 'bool' = False
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class IDEResponse(Model):
service_count: 'intnone' = None
service_count_human: 'strnone' = None
file_count: 'intnone' = None
file_count_human: 'strnone' = None
current_file_source_code: 'strnone' = ''
service_list: 'anylist' = list_field()
current_file_name: 'strnone' = None
current_fs_location: 'strnone' = None
current_fs_location_url_safe: 'strnone' = None
current_root_directory: 'strnone' = None
root_directory_count: 'intnone' = None
# A list of services that are contained in a particular file.
current_file_service_list: 'anylist' = list_field()
# A list of files that may potentially have a service of the given name.
current_service_file_list: 'anylist' = list_field()
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class RootDirInfo(Model):
current_root_directory: 'strnone' = None
root_directory_count: 'intnone' = None
# ################################################################################################################################
# ################################################################################################################################
class _IDEBase(Service):
input = IDERequest
output = IDEResponse
# ################################################################################################################################
def _normalize_fs_location(self, fs_location:'str') -> 'str':
# .. resolve the basic variables ..
fs_location = os.path.expanduser(fs_location)
# .. make sure it's an actual path ..
fs_location = fs_location.replace('~', '/')
# .. this is always required ..
fs_location = os.path.abspath(fs_location)
# .. now, we can return it to our caller.
return fs_location
# ################################################################################################################################
def _validate_path(self, orig_path:'str') -> 'None':
# If we have any path on input ..
if orig_path:
# .. collect all the root, top-level directories we can deploy services to ..
all_root_dirs = self._get_all_root_directories()
# .. get its canonical version ..
path = self._normalize_fs_location(orig_path)
# .. go through all the deployment roots ..
for item in all_root_dirs:
# .. check if the input path is one that belongs to that root ..
if path.startswith(item):
# .. we have our match, we can stop searching ..
break
# .. if we are here, it means we didn't find a matching root directory ..
# .. so we need to raise an exception to indicate that ..
else:
raise ValueError(f'Invalid path `{orig_path}`')
# ################################################################################################################################
def before_handle(self):
# In the Create service, we're looking up the 'root_directory' key,
# in other services, it's called 'fs_location'.
orig_path = self.request.input.get('root_directory') or self.request.input.get('fs_location')
# If we have any path on input ..
if orig_path:
# .. validate if it's a correct one.
self._validate_path(orig_path)
# ################################################################################################################################
def _get_service_list_by_fs_location(self, deployment_info_list:'any_', fs_location:'str') -> 'dictlist':
# Local variables
all_root_dirs = self._get_all_root_directories()
# Response to produce
out = []
for item in deployment_info_list:
if fs_location == item['fs_location']:
# This is reusable
_target_fs_location = self._convert_work_to_pickup_dir(fs_location)
root_dir_info = self._get_current_root_dir_info(_target_fs_location, all_root_dirs)
out.append({
'name': item['service_name'],
'fs_location': fs_location,
'fs_location_url_safe': make_fs_location_url_safe(fs_location),
'line_number': item['line_number'],
'current_root_directory': root_dir_info.current_root_directory,
'root_directory_count': root_dir_info.root_directory_count,
# We subtract a little bit to make sure the class name is not in the first line
'line_number_human': item['line_number'] - 3
})
return sorted(out, key=itemgetter('name'))
# ################################################################################################################################
def _get_all_root_directories(self) -> 'strlistdict':
# Our response to produce
out = {}
# .. this the default directory that will always exist
out[self.server.hot_deploy_config.pickup_dir] = []
# .. now, we can append all the user-defined directories ..
for key, value in sorted(self.server.pickup_config.items()):
if not value:
continue
if key.startswith(('hot-deploy.user', 'user_conf')):
if not 'patterns' in value:
pickup_from = value['pickup_from']
if pickup_from.endswith('/'):
pickup_from = pickup_from[:-1]
out[pickup_from] = []
return out
# ################################################################################################################################
def _get_default_root_directory(self, all_root_dirs:'strlistdict | None'=None) -> 'str':
all_root_dirs = all_root_dirs or self._get_all_root_directories()
for item in all_root_dirs:
windows_matches = r'incoming\services' in item
non_windows_matches = 'incoming/services' in item
if windows_matches or non_windows_matches:
return item
else:
raise ValueError(f'Default root directory not found among {sorted(all_root_dirs)}')
# ################################################################################################################################
def _get_current_root_dir_info(self, fs_location:'str', all_root_dirs:'strlistdict | None'=None) -> 'RootDirInfo':
# Our response to produce
out = RootDirInfo()
# Collect all the root, top-level directories we can deploy services to ..
all_root_dirs = all_root_dirs or self._get_all_root_directories()
# .. check which one the current file belongs to ..
for item in all_root_dirs:
if fs_location.startswith(item):
current_root_directory = item
break
else:
current_root_directory = None
# .. populate the response accordingly ..
out.current_root_directory = current_root_directory
out.root_directory_count = len(all_root_dirs)
# .. and return it to our caller.
return out
# ################################################################################################################################
def get_deployment_info_list(self):
service_list_response = self.invoke('zato.service.get-deployment-info-list', **{
'needs_details': True,
'include_internal': False,
'skip_response_elem': True,
})
for item in service_list_response:
yield item
# ################################################################################################################################
def _convert_work_to_pickup_dir(self, fs_location:'str') -> 'str':
# Windows ..
if 'hot-deploy\\current' in fs_location:
needs_replace = True
# .. non-Windows ..
elif 'hot-deploy/current' in fs_location:
needs_replace = True
# .. we don't need to fix it up ..
else:
needs_replace = False
# .. we enter here if we need to fix up the name ..
if needs_replace:
file_name = os.path.basename(fs_location)
default_root_dir = self._get_default_root_directory()
out = os.path.join(default_root_dir, file_name)
# .. otherwise, we use it as-is ..
else:
out = fs_location
# .. now, we can return it to our caller.
return out
# ################################################################################################################################
def _convert_pickup_to_work_dir(self, fs_location:'str') -> 'str':
# Windows ..
if 'pickup\\incoming\\services' in fs_location:
needs_replace = True
# .. non-Windows ..
elif 'pickup/incoming/services' in fs_location:
needs_replace = True
# .. we don't need to fix it up ..
else:
needs_replace = False
# .. we enter here if we need to fix up the name ..
if needs_replace:
file_name = os.path.basename(fs_location)
base_work_dir = self.server.work_dir
hot_deploy_dir = self.server.fs_server_config.hot_deploy.current_work_dir
out = os.path.join(base_work_dir, hot_deploy_dir, file_name)
out = os.path.abspath(out)
# .. otherwise, we use it as-is ..
else:
out = fs_location
# .. now, we can return it to our caller.
return out
# ################################################################################################################################
def _wait_for_services(self, fs_location:'str', max_wait_time:'int'=3) -> 'None':
now = datetime.utcnow()
until = now + timedelta(seconds=max_wait_time)
while now < until:
self.logger.info('Waiting for %s', fs_location)
for item in self.get_deployment_info_list():
if item['fs_location'] == fs_location:
return
now = datetime.utcnow()
sleep(0.2)
# ################################################################################################################################
# ################################################################################################################################
class ServiceIDE(_IDEBase):
def handle(self):
# Add type hints
input:'IDERequest' = self.request.input
# Local variables
all_root_dirs = self._get_all_root_directories()
# Default data structures to fill out with details
file_item_dict = {}
service_list = []
# The service that we want to look up ..
input_service_name = input.service_name
# .. or a file that we need.
input_fs_location = input.fs_location or ''
if input_fs_location:
input_fs_location = self._normalize_fs_location(input_fs_location)
# Full path to the file with the current service's source code
current_fs_location = input_fs_location
# It's possible that we've been called right after a file was deployed,
# in which case we need to wait for a moment until any services
# from that file are deployed.
if self.request.input.should_wait_for_services:
if input_fs_location:
if os.path.exists(input_fs_location):
self._wait_for_services(input_fs_location)
# Current's service source code
current_file_source_code = ''
# All services stored in the current file
current_file_service_list = []
# This will point to files that contain the currently selected service.
# It is possible that more than one file will have the same service
# and we need to recognize such a case.
current_service_file_list = []
service_list_response = self.get_deployment_info_list()
# The file_item_dict dictionary maps file system locations to file names which means that keys
# are always unique (because FS locations are always unique).
for item in service_list_response:
file_name = item['file_name']
service_name = item['service_name']
line_number = item['line_number']
# The location we received may point to a hot-deployment directory
# but not the original one that the file was saved in. Rather, it may be
# the work directory that the file was moved to. This is why we need
# to potentially fix up the location and make it point to the original one.
fs_location = item['fs_location']
fs_location = self._convert_work_to_pickup_dir(fs_location)
# This is reusable
root_dir_info = self._get_current_root_dir_info(fs_location, all_root_dirs)
# We subtract a little bit to make sure the class name is not in the first line
line_number_human = item['line_number'] - 3
# This maps a full file path to its extract file name.
file_item_dict[fs_location] = file_name
# Appending to our list of services is something that we can always do
service_list.append({
'name': service_name,
'fs_location': fs_location,
'fs_location_url_safe': make_fs_location_url_safe(fs_location),
'line_number': line_number,
'line_number_human': line_number_human,
'current_root_directory': root_dir_info.current_root_directory,
'root_directory_count': root_dir_info.root_directory_count,
})
# If the current service is among what this file contains or if the current file is what we have on input,
# append the latter's name for later use.
input_service_name_matches = input_service_name and input_service_name == service_name
input_fs_location_matches = input_fs_location and input_fs_location == fs_location
if input_service_name_matches or input_fs_location_matches:
# This is the file that contains the service that we have on input
# or if input location is the same as what we are processing right now in this loop's iteration.
current_fs_location = fs_location
# This is reusable
root_dir_info = self._get_current_root_dir_info(current_fs_location, all_root_dirs)
# Append this location to the list of locations that the service is available under ..
current_service_file_list.append(fs_location)
# .. also, append the service name to the list of services this file contains ..
current_file_service_list.append({
'name': service_name,
'fs_location': fs_location,
'fs_location_url_safe': make_fs_location_url_safe(fs_location),
'line_number': line_number,
'line_number_human': line_number_human,
'current_root_directory': root_dir_info.current_root_directory,
'root_directory_count': root_dir_info.root_directory_count,
})
# .. and read the service's source code for our caller's benefit.
if os.path.exists(fs_location):
with open_r(fs_location) as f:
current_file_source_code = f.read()
# This list may have file names that are not unique
# but their FS locations will be always unique.
file_list = []
for fs_location, file_name in file_item_dict.items():
# This is reusable
root_dir_info = self._get_current_root_dir_info(fs_location, all_root_dirs)
file_list.append({
'name': file_name,
'fs_location': fs_location,
'fs_location_url_safe': make_fs_location_url_safe(fs_location),
'current_root_directory': root_dir_info.current_root_directory,
'root_directory_count': root_dir_info.root_directory_count,
})
file_count = len(file_list)
service_count = len(service_list)
file_list_suffix = 's'# if needs_suffix(file_count) else ''
service_list_suffix = 's'# if needs_suffix(service_count) else ''
file_count_human = f'{file_count} file{file_list_suffix}'
service_count_human = f'{service_count} service{service_list_suffix}'
# Let's try to find the root directory based on the current file ..
root_dir_info = self._get_current_root_dir_info(current_fs_location)
# .. we go here if we found one ..
if root_dir_info.current_root_directory:
current_root_directory = root_dir_info.current_root_directory
# .. we go here if we didn't find one, which may happen if the current file has no services inside ..
else:
current_root_directory = self._get_default_root_directory(all_root_dirs)
response = {
'service_list': sorted(service_list, key=itemgetter('name')),
'file_list': sorted(file_list, key=itemgetter('name')),
'file_count': file_count,
'service_count': service_count,
'file_count_human': file_count_human,
'service_count_human': service_count_human,
'current_file_service_list': current_file_service_list,
'current_service_file_list': current_service_file_list,
'current_fs_location': current_fs_location,
'current_file_source_code': current_file_source_code,
'current_root_directory': current_root_directory,
'root_directory_count': root_dir_info.root_directory_count,
}
self.response.payload = response
# ################################################################################################################################
# ################################################################################################################################
class _GetBase(_IDEBase):
def _build_get_response(self, deployment_info_list:'any_', fs_location:'str') -> 'IDEResponse':
response = IDEResponse()
response.service_list = []
response.current_file_service_list = self._get_service_list_by_fs_location(deployment_info_list, fs_location)
response.current_service_file_list = []
if fs_location:
wait_for_file(fs_location, 3, interval=0.2)
response.current_fs_location = fs_location
response.current_fs_location_url_safe = make_fs_location_url_safe(fs_location)
response.current_file_name = os.path.basename(fs_location)
# We waited above but is still may not exist
if os.path.exists(fs_location):
response.current_file_source_code = open(fs_location).read()
# .. get information about the current root directory ..
info = self._get_current_root_dir_info(fs_location)
# .. populate the response accordingly ..
response.current_root_directory = info.current_root_directory
response.root_directory_count = info.root_directory_count
# .. and return it to our caller.
return response
# ################################################################################################################################
# ################################################################################################################################
class GetService(_GetBase):
def handle(self):
deployment_info_list = list(self.get_deployment_info_list())
for item in deployment_info_list:
if item['service_name'] == self.request.input.service_name:
# Extract the full file system path
fs_location = item['fs_location']
# Build a response ..
response = self._build_get_response(deployment_info_list, fs_location)
# .. this is what we return to our caller ..
self.response.payload = response
# .. no need to iterate further.
break
# ################################################################################################################################
# ################################################################################################################################
class GetFile(_GetBase):
def handle(self):
# Reusable
fs_location = self.request.input.fs_location
fs_location = self._convert_pickup_to_work_dir(fs_location)
deployment_info_list = self.get_deployment_info_list()
# Build a response ..
response = self._build_get_response(deployment_info_list, fs_location)
# .. and return it to our caller.
self.response.payload = response
# ################################################################################################################################
# ################################################################################################################################
class GetFileList(_GetBase):
def handle(self):
# Our response to produce
out = self._get_all_root_directories()
# .. go through all the top-level roots ..
for dir_name, files in out.items():
# .. make sure we take into services and models into account here ..
if dir_name.endswith('src-zato'):
_dir_name = os.path.join(dir_name, 'impl', 'src')
else:
_dir_name = dir_name
# .. extract all the Python files recursively ..
for py_file in sorted(Path(_dir_name).glob('**/*.py')):
py_file_name = str(py_file)
root_dir_info = self._get_current_root_dir_info(py_file_name)
files.append({
'file_name': py_file_name,
'file_name_url_safe': make_fs_location_url_safe(py_file_name),
'current_root_directory': root_dir_info.current_root_directory,
'root_directory_count': root_dir_info.root_directory_count,
})
# .. finally, we can return the response to our caller.
self.response.payload = out
# ################################################################################################################################
# ################################################################################################################################
class CreateFile(_GetBase):
# Our I/O
input = 'root_directory', 'file_name', '-data'
output = 'data', 'full_path', 'full_path_url_safe'
def handle(self):
# Local variables
input_data = self.request.input.data or ''
file_name = self.request.input.file_name
root_directory = self.request.input.root_directory
# We will expect for the full path to begin with one of these
all_root_dirs = self._get_all_root_directories()
# Combine the two to get a full path ..
full_path = os.path.join(root_directory, file_name)
# .. normalize it ..
full_path = self._normalize_fs_location(full_path)
# .. make sure this is a Python file ..
if not full_path.endswith('.py'):
full_path += '.py'
# .. make sure it's an allowed one ..
self._validate_path(full_path)
# .. make sure all the directories leading to the file exist ..
Path(full_path).parent.mkdir(parents=True, exist_ok=True)
# .. prepare the data to write to and return ..
if input_data:
needs_new_file = True
data_for_new_file = input_data
else:
if os.path.exists(full_path):
needs_new_file = False
with open_r(full_path) as f:
data_for_new_file = f.read()
else:
needs_new_file = True
data_for_new_file = Default_Service_File_Data.format(**{
'full_path': full_path,
})
# .. ensure it has a prefix that we recognize ..
for item in all_root_dirs:
# .. we have a match ..
if full_path.startswith(item):
# .. otherwise, simply create it unless it already existed ..
if needs_new_file:
with open_w(full_path) as f:
_ = f.write(data_for_new_file)
self.logger.info('Created path %s', full_path)
else:
self.logger.info('Path already exists %s', full_path)
# .. no need to continue further ..
break
# .. if it has no such prefix, we need to report an error ..
else:
msg = f'Invalid path `{full_path}`, must start with one of: `{sorted(all_root_dirs)}`'
raise ValueError(msg)
self.response.payload.data = data_for_new_file
self.response.payload.full_path = full_path
self.response.payload.full_path_url_safe = make_fs_location_url_safe(full_path)
# ################################################################################################################################
# ################################################################################################################################
class DeleteFile(_GetBase):
# Our I/O
input = 'fs_location'
output = 'full_path', 'full_path_url_safe'
def _validate_fs_location(self, fs_location:'str') -> 'None':
# Make sure we have a path on input ..
if not fs_location:
raise BadRequest(self.cid, f'No path given on input `{fs_location}`')
# .. and that it exists ..
if not os.path.exists(fs_location):
raise BadRequest(self.cid, f'Path does not exist `{fs_location}`')
# .. and that it's actually a file.
if not os.path.isfile(fs_location):
raise BadRequest(self.cid, f'Path is not a file `{fs_location}`')
# ################################################################################################################################
def handle(self):
# Local variables
fs_location = self.request.input.fs_location
# .. turn the pickup location into a work directory one ..
work_dir_fs_location = self._convert_pickup_to_work_dir(fs_location)
# .. validate both locations ..
self._validate_fs_location(fs_location)
self._validate_fs_location(work_dir_fs_location)
# .. if we're here, it means that we can actually delete both locations ..
os.remove(fs_location)
self.logger.info('Deleted path %s', fs_location)
os.remove(work_dir_fs_location)
self.logger.info('Deleted path %s', work_dir_fs_location)
# .. now, delete it from our in-RAM service store ..
self.server.service_store.delete_objects_by_file_path(work_dir_fs_location, delete_from_odb=True)
# .. find the location with the demo service ..
demo_py_fs = get_demo_py_fs_locations(self.server.base_dir)
# .. finally, tell the caller what our default file with services is ..
self.response.payload.full_path = demo_py_fs.pickup_incoming_full_path
self.response.payload.full_path_url_safe = make_fs_location_url_safe(demo_py_fs.pickup_incoming_full_path)
# ################################################################################################################################
# ################################################################################################################################
class RenameFile(_GetBase):
# Our I/O
input = 'root_directory', 'current_file_name', 'new_file_name'
output = 'full_path', 'full_path_url_safe'
def handle(self) -> 'None':
# Local variables
input = self.request.input
# We always work with combined and absolute paths
current_file_path = os.path.join(input.root_directory, input.current_file_name)
current_file_path = self._normalize_fs_location(current_file_path)
new_file_path = os.path.join(input.root_directory, input.new_file_name)
new_file_path = self._normalize_fs_location(current_file_path)
# Make sure that both paths are allowed
self._validate_path(current_file_path)
self._validate_path(new_file_path)
# First, get the contents of the old file ..
with open_r(current_file_path) as f:
data = f.read()
# .. now, we can delete it ..
_ = self.invoke(DeleteFile, fs_location=current_file_path)
# .. and create a new one ..
response = self.invoke(CreateFile, **{
'root_directory': input.root_directory,
'file_name': input.new_file_name,
'data': data,
})
# .. finally, build a response for our caller.
self.response.payload.full_path = response['full_path']
self.response.payload.full_path_url_safe = response['full_path_url_safe']
# ################################################################################################################################
# ################################################################################################################################
| 32,239
|
Python
|
.py
| 567
| 46.4903
| 130
| 0.501748
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,349
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/service/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from base64 import b64decode, b64encode
from contextlib import closing
from operator import attrgetter
from traceback import format_exc
from uuid import uuid4
# Python 2/3 compatibility
from builtins import bytes
from zato.common.ext.future.utils import iterkeys
# Zato
from zato.common.api import BROKER, SCHEDULER, StatsKey
from zato.common.broker_message import SERVICE
from zato.common.const import ServiceConst
from zato.common.exception import BadRequest, ZatoException
from zato.common.ext.validate_ import is_boolean
from zato.common.json_ import dumps as json_dumps
from zato.common.json_internal import dumps, loads
from zato.common.json_schema import get_service_config
from zato.common.marshal_.api import Model
from zato.common.odb.model import Cluster, ChannelAMQP, ChannelWMQ, ChannelZMQ, DeployedService, HTTPSOAP, Server, \
Service as ODBService
from zato.common.odb.query import service_deployment_list, service_list
from zato.common.rate_limiting import DefinitionParser
from zato.common.scheduler import get_startup_job_services
from zato.common.util.api import hot_deploy, parse_extra_into_dict, payload_from_request
from zato.common.util.file_system import get_tmp_path
from zato.common.util.stats import combine_table_data, collect_current_usage
from zato.common.util.sql import elems_with_opaque, set_instance_opaque_attrs
from zato.server.service import Boolean, Float, Integer, Service
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
# ################################################################################################################################
# ################################################################################################################################
if 0:
from datetime import datetime
from zato.common.typing_ import any_, anydict, strnone
# ################################################################################################################################
# ################################################################################################################################
# For pyflakes
Service = Service
# ################################################################################################################################
# ################################################################################################################################
class GetList(AdminService):
""" Returns a list of services.
"""
_filter_by = ODBService.name,
class SimpleIO(GetListAdminSIO):
request_elem = 'zato_service_get_list_request'
response_elem = 'zato_service_get_list_response'
input_required = 'cluster_id'
input_optional = ('should_include_scheduler',) + GetListAdminSIO.input_optional
output_required = 'id', 'name', 'is_active', 'impl_name', 'is_internal', Boolean('may_be_deleted')
output_optional = 'is_json_schema_enabled', 'needs_json_schema_err_details', 'is_rate_limit_active', \
'rate_limit_type', 'rate_limit_def', Boolean('rate_limit_check_parent_def'), Integer('usage'), \
Integer('usage'), Integer('slow_threshold')
output_repeated = True
default_value = ''
# ################################################################################################################################
def _get_data(self, session, return_internal, include_list, internal_del): # type: ignore
out = []
search_result = self._search(service_list, session, self.request.input.cluster_id, return_internal, include_list, False)
search_result = elems_with_opaque(search_result)
for item in search_result:
# First, filter out services that aren't deployed, e.g. they may have existed at one point
# but right now the server doesn't have them deployed.
# Extract the name of the module that the service is implemented in ..
impl_name = item.impl_name
# .. check if we have any deployment info about this module ..
deployment_info = self.server.service_store.get_deployment_info(impl_name)
# .. if there's no file-system path for the module's file, it means it's not deployed ..
if not deployment_info.get('fs_location'):
continue
item.may_be_deleted = internal_del if item.is_internal else True
# Attach JSON Schema validation configuration
json_schema_config = get_service_config(item, self.server)
item.is_json_schema_enabled = json_schema_config['is_json_schema_enabled']
item.needs_json_schema_err_details = json_schema_config['needs_json_schema_err_details']
out.append(item)
return out
# ################################################################################################################################
def get_data(self, session): # type: ignore
# Reusable
return_internal = is_boolean(self.server.fs_server_config.misc.return_internal_objects)
internal_del = is_boolean(self.server.fs_server_config.misc.internal_services_may_be_deleted)
# We issue one or two queries to populate this list - the latter case only if we are to return scheduler's jobs.
out = []
# Confirm if we are to return services for the scheduler
if self.request.input.should_include_scheduler:
scheduler_service_list = get_startup_job_services()
else:
scheduler_service_list = []
# This query runs only if there are scheduler services to return ..
if scheduler_service_list:
result = self._get_data(session, return_internal, scheduler_service_list, internal_del)
out.extend(result)
# .. while this query runs always (note the empty include_list).
result = self._get_data(session, return_internal, [], internal_del)
out.extend(result)
# .. sort the result before returning ..
out.sort(key=attrgetter('name'))
# .. finally, return all that we found.
return out
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
# ################################################################################################################################
# ################################################################################################################################
class _GetStatsTable(AdminService):
def handle(self):
table = None # self.server.stats_client.get_table()
if table:
self.response.payload = table
# ################################################################################################################################
# ################################################################################################################################
class GetStatsTable(AdminService):
class SimpleIO(AdminSIO):
output_optional = ('name', Float('item_max'), Float('item_min'), Float('item_mean'), Float('item_total_time'), \
Float('item_usage_share'), Float('item_time_share'), Integer('item_total_usage'),
'item_total_time_human', 'item_total_usage_human')
response_elem = None
def handle(self):
# Invoke all servers and all PIDs..
response = self.server.rpc.invoke_all(_GetStatsTable.get_name())
# .. combine responses ..
response = combine_table_data(response.data)
# .. and return the response.
self.response.payload[:] = response
# ################################################################################################################################
# ################################################################################################################################
class GetServiceStats(AdminService):
def handle(self):
usage = self.server.current_usage.get(self.request.raw_request['name'])
if usage:
self.response.payload = usage
# ################################################################################################################################
# ################################################################################################################################
class IsDeployed(Service):
input = 'name'
output = 'is_deployed'
def handle(self):
is_deployed = self.server.service_store.is_deployed(self.request.input.name)
self.response.payload.is_deployed = is_deployed
# ################################################################################################################################
# ################################################################################################################################
class _Get(AdminService):
class SimpleIO(AdminSIO):
input_required = 'cluster_id',
output_required = 'id', 'name', 'is_active', 'impl_name', 'is_internal', Boolean('may_be_deleted')
output_optional = Integer('usage'), Integer('slow_threshold'), 'last_duration', \
Integer('time_min_all_time'), Integer('time_max_all_time'), 'time_mean_all_time', \
'is_json_schema_enabled', 'needs_json_schema_err_details', 'is_rate_limit_active', \
'rate_limit_type', 'rate_limit_def', Boolean('rate_limit_check_parent_def'), 'last_timestamp', \
'usage_min', 'usage_max', 'usage_mean'
def get_data(self, session): # type: ignore
query = session.query(ODBService.id, ODBService.name, ODBService.is_active,
ODBService.impl_name, ODBService.is_internal, ODBService.slow_threshold).\
filter(Cluster.id==ODBService.cluster_id).\
filter(Cluster.id==self.request.input.cluster_id)
query = self.add_filter(query) # type: ignore
return query.one()
def handle(self):
with closing(self.odb.session()) as session:
service = self.get_data(session)
internal_del = is_boolean(self.server.fs_server_config.misc.internal_services_may_be_deleted)
self.response.payload.id = service.id
self.response.payload.name = service.name
self.response.payload.is_active = service.is_active
self.response.payload.impl_name = service.impl_name
self.response.payload.is_internal = service.is_internal
self.response.payload.slow_threshold = service.slow_threshold
self.response.payload.may_be_deleted = internal_del if service.is_internal else True
usage_response = self.server.rpc.invoke_all(GetServiceStats.get_name(), {'name': self.request.input.name}) # type: ignore
usage_response = collect_current_usage(usage_response.data) # type: any_
if usage_response:
self.response.payload.usage = usage_response[StatsKey.PerKeyValue]
self.response.payload.last_duration = usage_response[StatsKey.PerKeyLastDuration]
self.response.payload.last_timestamp = usage_response[StatsKey.PerKeyLastTimestamp]
self.response.payload.usage_min = usage_response[StatsKey.PerKeyMin]
self.response.payload.usage_max = usage_response[StatsKey.PerKeyMax]
self.response.payload.usage_mean = usage_response[StatsKey.PerKeyMean]
# ################################################################################################################################
# ################################################################################################################################
class GetByName(_Get):
""" Returns a particular service by its name.
"""
class SimpleIO(_Get.SimpleIO):
request_elem = 'zato_service_get_by_name_request'
response_elem = 'zato_service_get_by_name_response'
input_required = _Get.SimpleIO.input_required + ('name',)
def add_filter(self, query): # type: ignore
return query.\
filter(ODBService.name==self.request.input.name)
# ################################################################################################################################
# ################################################################################################################################
class GetByID(_Get):
""" Returns a particular service by its ID.
"""
class SimpleIO(_Get.SimpleIO):
request_elem = 'zato_service_get_by_name_request'
response_elem = 'zato_service_get_by_name_response'
input_required = _Get.SimpleIO.input_required + ('id',)
def add_filter(self, query): # type: ignore
return query.\
filter(ODBService.id==self.request.input.id)
# ################################################################################################################################
# ################################################################################################################################
class Edit(AdminService):
""" Updates a service.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_service_edit_request'
response_elem = 'zato_service_edit_response'
input_required = 'id', 'is_active', Integer('slow_threshold')
input_optional = 'is_json_schema_enabled', 'needs_json_schema_err_details', 'is_rate_limit_active', \
'rate_limit_type', 'rate_limit_def', 'rate_limit_check_parent_def'
output_optional = 'id', 'name', 'impl_name', 'is_internal', Boolean('may_be_deleted')
def handle(self):
input = self.request.input
# If we have a rate limiting definition, let's check it upfront
DefinitionParser.check_definition_from_input(input)
with closing(self.odb.session()) as session:
try:
service = session.query(ODBService).filter_by(id=input.id).one() # type: ODBService
service.is_active = input.is_active
service.slow_threshold = input.slow_threshold
set_instance_opaque_attrs(service, input)
# Configure JSON Schema validation if service has a schema assigned by user.
class_info = self.server.service_store.get_service_info_by_id(input.id) # type: anydict
class_ = class_info['service_class'] # type: ODBService
if class_.schema:
self.server.service_store.set_up_class_json_schema(class_, input) # type: ignore
# Set up rate-limiting each time an object was edited
self.server.service_store.set_up_rate_limiting(service.name)
session.add(service)
session.commit()
input.action = SERVICE.EDIT.value
input.impl_name = service.impl_name
input.name = service.name
self.broker_client.publish(input)
self.response.payload = service
internal_del = is_boolean(self.server.fs_server_config.misc.internal_services_may_be_deleted)
self.response.payload.may_be_deleted = internal_del if service.is_internal else True
except Exception:
self.logger.error('ODBService could not be updated, e:`%s`', format_exc())
session.rollback()
raise
# ################################################################################################################################
# ################################################################################################################################
class Delete(AdminService):
""" Deletes a service.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_service_delete_request'
response_elem = 'zato_service_delete_response'
input_required = ('id',)
def handle(self):
with closing(self.odb.session()) as session:
try:
service = session.query(ODBService).\
filter(ODBService.id==self.request.input.id).\
one() # type: ODBService
internal_del = is_boolean(self.server.fs_server_config.misc.internal_services_may_be_deleted)
if service.is_internal and not internal_del:
msg = "Can't delete service:[{}], it's an internal one and internal_services_may_be_deleted is not True".format(
service.name)
raise ZatoException(self.cid, msg)
# This will also cascade to delete the related DeployedService objects
session.delete(service)
session.commit()
msg = {
'action': SERVICE.DELETE.value,
'id': self.request.input.id,
'name':service.name,
'impl_name':service.impl_name,
'is_internal':service.is_internal,
}
self.broker_client.publish(msg)
except Exception:
session.rollback()
msg = 'ODBService could not be deleted, e:`{}`'.format(format_exc())
self.logger.error(msg)
raise
# ################################################################################################################################
# ################################################################################################################################
class GetChannelList(AdminService):
""" Returns a list of channels of a given type through which the service is exposed.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_service_get_channel_list_request'
response_elem = 'zato_service_get_channel_list_response'
input_required = ('id', 'channel_type')
output_required = ('id', 'name')
def handle(self):
channel_type_class = {
'plain_http': HTTPSOAP,
'amqp': ChannelAMQP,
'jms-wmq': ChannelWMQ,
'zmq': ChannelZMQ,
}
class_ = channel_type_class[self.request.input.channel_type]
q_attrs = (class_.id, class_.name)
with closing(self.odb.session()) as session:
q = session.query(*q_attrs).\
filter(class_.service_id == self.request.input.id)
if self.request.input.channel_type == 'soap':
q = q.filter(class_.soap_version != None) # noqa
elif self.request.input.channel_type == 'plain_http':
q = q.filter(class_.soap_version == None) # noqa
self.response.payload[:] = q.all()
# ################################################################################################################################
# ################################################################################################################################
class Invoke(AdminService):
""" Invokes the service directly, as though it was exposed through a channel defined in web-admin.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_service_invoke_request'
response_elem = 'zato_service_invoke_response'
input_optional = 'id', 'name', 'payload', 'channel', 'data_format', 'transport', Boolean('is_async'), \
Integer('expiration'), Integer('pid'), Boolean('all_pids'), Integer('timeout'), Boolean('skip_response_elem'), \
'needs_response_time'
output_optional = ('response',)
# ################################################################################################################################
def _get_payload_from_extra(self, payload:'any_') -> 'strnone':
if not payload:
return
if payload in (b'""', b"''", b'null', 'null'):
return
if isinstance(payload, bytes):
payload = payload.decode('utf8')
# .. ignore payload that seems to be JSON ..
if '{' in payload or '}' in payload:
return
payload = payload[1:-1]
payload = payload.replace('\\n', '\n')
payload = parse_extra_into_dict(payload)
return payload
# ################################################################################################################################
def _invoke_other_server_pid(
self,
name:'str',
payload:'any_',
pid:'int',
data_format:'str',
skip_response_elem:'bool',
) -> 'any_':
response = self.server.invoke(
name,
payload, # type: ignore
pid=pid,
data_format=data_format,
skip_response_elem=skip_response_elem)
response = {
pid: {
'is_ok': True,
'pid': pid,
'pid_data': response or None,
'error_info': '',
}}
return response
# ################################################################################################################################
def _invoke_current_server_pid(
self,
id:'any_',
name:'str',
all_pids:'bool',
payload:'any_',
channel:'str',
data_format:'str',
transport:'str',
zato_response_headers_container:'anydict',
skip_response_elem:'bool',
) -> 'any_':
func, id_ = (self.invoke, name) if name else (self.invoke_by_id, id)
response = func(
id_,
payload, # type: ignore
channel,
data_format,
transport,
zato_response_headers_container=zato_response_headers_container,
skip_response_elem=skip_response_elem,
serialize=True)
if all_pids:
response = {
self.server.pid: {
'is_ok': True,
'pid': self.server.pid,
'pid_data': response,
'error_info': '',
}}
return response
# ################################################################################################################################
def _build_response(self, response:'any_') -> 'any_':
if not isinstance(response, str):
if not isinstance(response, bytes):
if hasattr(response, 'to_dict'):
response = response.to_dict() # type: ignore
response = json_dumps(response)
# Make sure what we return is a string ..
response = response if isinstance(response, bytes) else response.encode('utf8')
# .. which we base64-encode ..
response = b64encode(response).decode('utf8') if response else ''
# .. and return to our caller.
return response
# ################################################################################################################################
def _run_async_invoke(
self,
pid:'int',
id:'any_',
name:'str',
payload:'any_',
channel:'str',
data_format:'str',
transport:'str',
expiration:'int',
) -> 'any_':
if id:
impl_name = self.server.service_store.id_to_impl_name[id]
name = self.server.service_store.service_data(impl_name)['name']
# If PID is given on input it means we must invoke this particular server process by it ID
if pid and pid != self.server.pid:
response = self.server.invoke_by_pid(name, payload, pid)
else:
response = self.invoke_async(name, payload, channel, data_format, transport, expiration)
return response
# ################################################################################################################################
def _run_sync_invoke(
self,
pid:'int',
timeout:'int',
id:'any_',
name:'str',
all_pids:'bool',
payload:'any_',
channel:'str',
data_format:'str',
transport:'str',
zato_response_headers_container:'anydict',
skip_response_elem:'bool',
) -> 'any_':
# This method is the same as the one for async, except that in async there was no all_pids
# It is possible that we were given the all_pids flag on input but we know
# ourselves that there is only one process, the current one, so we can just
# invoke it directly instead of going through IPC.
if all_pids and self.server.fs_server_config.main.gunicorn_workers > 1:
use_all_pids = True
else:
use_all_pids = False
if use_all_pids:
args = (name, payload, timeout) if timeout else (name, payload)
response = dumps(self.server.invoke_all_pids(*args, skip_response_elem=skip_response_elem))
else:
# We are invoking another server by its PID ..
if pid and pid != self.server.pid:
response = self._invoke_other_server_pid(name, payload, pid, data_format, skip_response_elem)
# .. we are invoking our own process ..
else:
response = self._invoke_current_server_pid(
id, name, all_pids, payload, channel, data_format, transport,
zato_response_headers_container, skip_response_elem)
return response
# ################################################################################################################################
def _build_response_time(self, start_time:'datetime') -> 'any_':
response_time = self.time.utcnow(needs_format=False) - start_time # type: ignore
response_time = response_time.total_seconds()
response_time = response_time * 1000 # Turn seconds into milliseconds
# If we have less than a millisecond, don't show exactly how much it was ..
if response_time < 1:
response_time_human = 'Below 1 ms'
else:
# .. if it's below 10 seconds, keep using milliseconds ..
if response_time < 10_000:
response_time = int(response_time) # Round it up
response_time_human = f'{response_time} ms'
# .. otherwise, turn it into seconds ..
else:
_response_time = float(response_time)
_response_time = _response_time / 1000.0 # Convert it back to seconds
_response_time = round(_response_time, 2) # Keep it limited to two digits
response_time_human = f'{_response_time} sec.'
return response_time, response_time_human
# ################################################################################################################################
def handle(self):
# Local aliases
payload:'any_' = None
needs_response_time = self.request.input.get('needs_response_time', True)
# A dictionary of headers that the target service may want to produce
zato_response_headers_container = {}
# Optionally, we are return the total execution time of this service
if needs_response_time:
start_time = self.time.utcnow(needs_format=False)
# This is our input ..
orig_payload:'any_' = self.request.input.get('payload')
# .. which is optional ..
if orig_payload:
# .. if it exists, it will be BASE64-encoded ..
orig_payload = b64decode(orig_payload) # type: ignore
# .. try and see if it a dict of extra keys and value ..
payload = self._get_payload_from_extra(orig_payload)
# .. if it is not, run the regular parser ..
if not payload:
payload = payload_from_request(self.server.json_parser, self.cid, orig_payload,
self.request.input.data_format, self.request.input.transport)
if payload:
if isinstance(payload, str):
scheduler_indicator = SCHEDULER.EmbeddedIndicator
else:
scheduler_indicator = SCHEDULER.EmbeddedIndicatorBytes
if scheduler_indicator in payload: # type: ignore
payload = loads(payload) # type: ignore
payload = payload['data'] # type: ignore
id = self.request.input.get('id')
name = self.request.input.get('name')
pid = self.request.input.get('pid') or 0
all_pids = self.request.input.get('all_pids')
timeout = self.request.input.get('timeout') or None
skip_response_elem = self.request.input.get('skip_response_elem') or False
channel = self.request.input.get('channel')
data_format = self.request.input.get('data_format')
transport = self.request.input.get('transport')
expiration = self.request.input.get('expiration') or BROKER.DEFAULT_EXPIRATION
if name and id:
raise ZatoException('Cannot accept both id:`{}` and name:`{}`'.format(id, name))
try:
# Is this an async invocation ..
if self.request.input.get('is_async'):
response = self._run_async_invoke(
pid, id, name, payload, channel, data_format, transport, expiration)
# .. or a sync one ..
else:
response = self._run_sync_invoke(
pid, timeout, id, name, all_pids, payload, channel,
data_format, transport, zato_response_headers_container, skip_response_elem
)
# .. we still may not have any response here ..
if response is not None:
response = self._build_response(response)
self.response.payload.response = response
finally:
# If we are here, it means that we can optionally compute the total execution time ..
if needs_response_time:
# .. build the values we are to return ..
response_time, response_time_human = self._build_response_time(start_time) # type: ignore
# .. which we attach to our response.
self.response.headers['X-Zato-Response-Time'] = response_time
self.response.headers['X-Zato-Response-Time-Human'] = response_time_human
# ################################################################################################################################
# ################################################################################################################################
class GetDeploymentInfoList(AdminService):
""" Returns detailed information regarding the service's deployment status on each of the servers it's been deployed to.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_service_get_deployment_info_list_request'
response_elem = 'zato_service_get_deployment_info_list_response'
input = '-id', '-needs_details', Boolean('-include_internal')
output = 'server_id', 'server_name', 'service_id', 'service_name', 'fs_location', 'file_name', \
Integer('line_number'), '-details'
def get_data(self, session): # type: ignore
# Response to produce
out = []
# This is optional and if it does not exist we assume that it is True
if not 'include_internal' in self.request.input:
include_internal = True
else:
include_internal = self.request.input.get('include_internal')
needs_details = self.request.input.needs_details
items = service_deployment_list(session, self.request.input.id, include_internal)
for item in items:
# Convert the item from SQLAlchemy to a dict because we are going to append the file_name to it ..
_item = item._asdict()
# .. load it but do not assign it yet because it is optional ..
details = loads(item.details)
# .. extract the file name out of the full path to the service ..
fs_location = details['fs_location']
_item['file_name'] = os.path.basename(fs_location)
# .. but append the full path as well ..
_item['fs_location'] = fs_location
# .. this is also required ..
_item['line_number'] = details['line_number']
# .. this is optional ..
if needs_details:
_item['details'] = details
else:
del _item['details']
# .. we can append this item now ..
out.append(_item)
# .. and return the whole output once we are done,
return out
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
# ################################################################################################################################
# ################################################################################################################################
class GetSourceInfo(AdminService):
""" Returns information on the service's source code.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_service_get_source_info_request'
response_elem = 'zato_service_get_source_info_response'
input_required = ('cluster_id', 'name')
output_optional = ('service_id', 'server_name', 'source', 'source_path', 'source_hash', 'source_hash_method')
def get_data(self, session): # type: ignore
return session.query(ODBService.id.label('service_id'), Server.name.label('server_name'), # type: ignore
DeployedService.source, DeployedService.source_path,
DeployedService.source_hash, DeployedService.source_hash_method).\
filter(Cluster.id==ODBService.cluster_id).\
filter(Cluster.id==self.request.input.cluster_id).\
filter(ODBService.name==self.request.input.name).\
filter(ODBService.id==DeployedService.service_id).\
filter(Server.id==DeployedService.server_id).\
filter(Server.id==self.server.id).\
one()
def handle(self):
with closing(self.odb.session()) as session:
si = self.get_data(session)
self.response.payload.service_id = si.service_id
self.response.payload.server_name = si.server_name
self.response.payload.source = b64encode(si.source) if si.source else None
self.response.payload.source_path = si.source_path
self.response.payload.source_hash = si.source_hash
self.response.payload.source_hash_method = si.source_hash_method
# ################################################################################################################################
# ################################################################################################################################
class UploadPackage(AdminService):
""" Uploads a package with service(s) to be hot-deployed.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_service_upload_package_request'
response_elem = 'zato_service_upload_package_response'
input_required = ('cluster_id', 'payload', 'payload_name')
def handle(self):
# Local variables
payload = self.request.input.payload
payload_name = self.request.input.payload_name
input_payload = b64decode(payload)
prefix = 'zato-hd-'
suffix = payload_name
body = uuid4().hex
service_deploy_location = os.environ.get('Zato_Service_Deploy_Location')
has_abs_path = os.path.isabs(payload_name)
if service_deploy_location:
file_name_full = os.path.join(service_deploy_location, suffix)
needs_default_hot_deploy = False
elif has_abs_path:
file_name_full = payload_name
needs_default_hot_deploy = False
else:
prefix = 'zato-hd-'
body = uuid4().hex
file_name_full = get_tmp_path(prefix, suffix, body)
needs_default_hot_deploy = True
with open(file_name_full, 'wb') as tf:
_ = tf.write(input_payload)
tf.flush()
if needs_default_hot_deploy:
package_id = hot_deploy(self.server, payload_name, tf.name, False)
self.response.payload = {
'package_id': package_id
}
# ################################################################################################################################
# ################################################################################################################################
class ServiceInvoker(AdminService):
""" A proxy service to invoke other services through via REST.
"""
name = ServiceConst.ServiceInvokerName
# ################################################################################################################################
def _extract_payload_from_request(self):
payload = self.request.raw_request
payload = loads(payload) if payload else None
return payload
# ################################################################################################################################
def handle(self, _internal=('zato', 'pub.zato')): # type: ignore
# ODBService name is given in URL path
service_name = self.request.http.params.service_name
# Are we invoking a Zato built-in service or a user-defined one?
is_internal = service_name.startswith(_internal) # type: bool
# Before invoking a service that is potentially internal we need to confirm
# that our channel can be used for such invocations.
if is_internal:
if self.channel.name not in self.server.fs_server_config.misc.service_invoker_allow_internal:
msg = 'ODBService `%s` could not be invoked; channel `%s` not among `%s` (service_invoker_allow_internal)'
self.logger.warning(
msg, service_name, self.channel.name, self.server.fs_server_config.misc.service_invoker_allow_internal)
self.response.data_format = 'text/plain'
raise BadRequest(self.cid, 'No such service `{}`'.format(service_name))
# Make sure the service exists
if self.server.service_store.has_service(service_name):
# Depending on HTTP verb used, we may need to look up input in different places
if self.request.http.method == 'GET':
payload = self.request.http.GET or self._extract_payload_from_request()
else:
payload = self._extract_payload_from_request()
# A dictionary of headers that the target service may want to produce
zato_response_headers_container = {}
# Invoke the service now
response = self.invoke(
service_name,
payload,
wsgi_environ={'HTTP_METHOD':self.request.http.method},
zato_response_headers_container=zato_response_headers_container
)
# All internal services wrap their responses in top-level elements that we need to shed here ..
if is_internal and response:
top_level = list(iterkeys(response))[0]
response = response[top_level]
# Take dataclass-based models into account
response = response.to_dict() if isinstance(response, Model) else response
# Assign response to outgoing payload
self.response.payload = dumps(response)
self.response.data_format = 'application/json'
self.response.headers.update(zato_response_headers_container)
# No such service as given on input
else:
self.response.data_format = 'text/plain'
raise BadRequest(self.cid, 'No such service `{}`'.format(service_name))
# ################################################################################################################################
# ################################################################################################################################
class RPCServiceInvoker(AdminService):
""" An invoker making use of the API that Redis-based communication used to use.
"""
def handle(self):
self.server.on_broker_msg(self.request.raw_request)
# ################################################################################################################################
# ################################################################################################################################
| 41,093
|
Python
|
.py
| 729
| 46.224966
| 132
| 0.516915
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,350
|
memcached.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/cache/memcached.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.broker_message import CACHE
from zato.common.exception import BadRequest
from zato.common.odb.model import CacheMemcached
from zato.common.odb.query import cache_memcached_list
from zato.common.util.api import parse_extra_into_dict
from zato.server.service.internal import AdminService
from zato.server.service.internal.cache import common_instance_hook
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
# ################################################################################################################################
elem = 'cache_builtin'
model = CacheMemcached
label = 'a Memcached cache definition'
get_list_docs = 'Memcached cache definitions'
broker_message = CACHE
broker_message_prefix = 'MEMCACHED_'
list_func = cache_memcached_list
skip_input_params = ['cache_id']
# ################################################################################################################################
def response_hook(self, input, _ignored, attrs, service_type):
if service_type == 'create_edit':
self.response.payload.cache_id = self.response.payload.id
elif service_type == 'get_list':
for elem in self.response.payload:
elem.id = elem.cache_id
# ################################################################################################################################
def instance_hook(self, input, instance, attrs):
common_instance_hook(self, input, instance, attrs)
if attrs.is_create_edit:
# Parse extra arguments to confirm their syntax is correct,
# output is ignored on purpose, we just want to validate it.
parse_extra_into_dict(input.extra)
elif attrs.is_delete:
if instance.is_default:
raise BadRequest(self.cid, 'Cannot delete the default cache')
else:
input.cache_type = instance.cache_type
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = CacheMemcached.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
| 3,163
|
Python
|
.py
| 62
| 47.548387
| 130
| 0.500975
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,351
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/cache/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.common.odb.model import Cache
# ################################################################################################################################
def common_instance_hook(self, input, instance, attrs):
""" A common instance hook that checks if the cache instance currently saved is the default one,
and if so, finds all other definitions and make sure they are not default anymore.
"""
if attrs.is_create_edit and instance.is_default:
with attrs._meta_session.no_autoflush:
attrs._meta_session.query(Cache).\
filter(Cache.is_default.is_(True)).\
filter(Cache.id != instance.id).\
update({'is_default':False})
# ################################################################################################################################
| 1,015
|
Python
|
.py
| 19
| 47.631579
| 130
| 0.493428
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,352
|
entry.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/cache/builtin/entry.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Arrow
from arrow import get as arrow_get
# Bunch
from bunch import bunchify
# Python 2/3 compatibility
from zato.common.ext.future.utils import iteritems
from zato.common.py23_.past.builtins import basestring, long
# Zato
from zato.common.api import CACHE
from zato.common.exception import BadRequest
from zato.common.util.search import SearchResults
from zato.server.service import AsIs, Bool, Float, Int
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
# ################################################################################################################################
time_keys = ('expires_at', 'last_read', 'prev_read', 'last_write', 'prev_write')
# ################################################################################################################################
class _Base(AdminService):
""" Base class for services that access the contents of a given cache.
"""
def _get_cache_by_input(self, needs_odb=False):
odb_cache = self.server.odb.get_cache_builtin(self.server.cluster_id, self.request.input.cache_id)
if needs_odb:
return odb_cache
else:
return self.cache.get_cache(CACHE.TYPE.BUILTIN, odb_cache.name)
# ################################################################################################################################
class GetList(_Base):
""" Returns a list of entries from the cache given on input.
"""
_filter_by = ('name',)
class SimpleIO(GetListAdminSIO):
input_required = (AsIs('cache_id'),)
input_optional = GetListAdminSIO.input_optional + (Int('max_chars'),)
output_required = (AsIs('cache_id'), 'key', 'position', 'hits', 'expiry_op', 'expiry_left', 'expires_at',
'last_read', 'prev_read', 'last_write', 'prev_write', 'server')
output_optional = ('value', 'chars_omitted')
output_repeated = True
# ################################################################################################################################
def _get_data_from_sliceable(self, sliceable, query_ctx, _time_keys=time_keys):
max_chars = self.request.input.get('max_chars') or 30
out = []
now = self.time.utcnow(needs_format=False)
start = query_ctx.cur_page * query_ctx.page_size
stop = start + query_ctx.page_size
for _idx, item in enumerate(sliceable[start:stop]):
# Internally, time is kept as doubles so we need to convert it to a datetime object or null it out.
for name in _time_keys:
_value = item[name]
if _value:
item[name] = arrow_get(_value)
else:
item[name] = None
del _value
# Compute expiry since the last operation + the time left to expiry
expiry = item.pop('expiry')
if expiry:
item['expiry_op'] = int(expiry)
item['expiry_left'] = int((item['expires_at'] - now).total_seconds())
else:
item['expiry_op'] = None
item['expiry_left'] = None
# Now that we have worked with all the time keys needed, we can serialize them to the ISO-8601 format.
for name in _time_keys:
if item[name]:
item[name] = item[name].isoformat()
# Shorten the value if it's possible, if it's not something else than a string/unicode
value = item['value']
if isinstance(value, basestring):
len_value = len(value)
chars_omitted = len_value - max_chars
chars_omitted = chars_omitted if chars_omitted > 0 else 0
if chars_omitted:
value = value[:max_chars]
item['value'] = value
item['chars_omitted'] = chars_omitted
item['cache_id'] = self.request.input.cache_id
item['server'] = '{} ({})'.format(self.server.name, self.server.pid)
out.append(item)
return SearchResults(None, out, None, len(sliceable))
# ################################################################################################################################
def _filter_cache(self, query, cache):
out = []
key_criteria = []
value_criteria = []
for item in query:
if item.startswith('k:'):
criterion = item.split('k:', 1)[1]
key_criteria.append(criterion)
elif item.startswith('v:'):
criterion = item.split('v:', 1)[1]
value_criteria.append(criterion)
else:
key_criteria.append(item)
has_empty_key_criteria = len(key_criteria) == 0
has_empty_value_criteria = len(value_criteria) == 0
for key, entry in iteritems(cache):
include_by_key = False
include_by_value = False
if key_criteria:
if all(item in key for item in key_criteria):
include_by_key = True
if value_criteria:
if all(item in entry.value for item in value_criteria):
include_by_value = True
if (include_by_key or has_empty_key_criteria) and (include_by_value or has_empty_value_criteria):
out.append(entry.to_dict())
return out
# ################################################################################################################################
def _get_data(self, _ignored_session, _ignored_cluster_id, *args, **kwargs):
# Get the cache object first
cache = self._get_cache_by_input()
query_ctx = bunchify(kwargs)
query = query_ctx.get('query', None)
# Without any query, simply return a slice of the underlying list from the cache object
if not query:
sliceable = cache
else:
sliceable = self._filter_cache(query, cache)
return self._get_data_from_sliceable(sliceable, query_ctx)
# ################################################################################################################################
def handle(self):
self.response.payload[:] = self._search(self._get_data)
# ################################################################################################################################
class _CreateEdit(_Base):
old_key_elem = '<invalid>'
new_key_elem = 'key'
class SimpleIO(AdminSIO):
input_required = ('cluster_id', 'cache_id', 'key', 'value', Bool('replace_existing'))
input_optional = ('key_data_type', 'value_data_type', Float('expiry'))
def handle(self):
key = self.request.input[self.new_key_elem]
value = self.request.input.value or ''
key = int(key) if self.request.input.get('key_data_type') == CACHE.BUILTIN_KV_DATA_TYPE.INT.id else key
value = int(value) if self.request.input.get('value_data_type') == CACHE.BUILTIN_KV_DATA_TYPE.INT.id else value
expiry = self.request.input.get('expiry', None) or 0
# Double check expiry is actually an integer
try:
int(expiry)
except ValueError:
raise BadRequest(self.cid, 'Expiry {} must be an integer instead of {}'.format(repr(expiry), type(expiry)))
cache = self._get_cache_by_input()
# Note that the try/except/else/set operation below is not atomic
existing_value = cache.get(key)
if existing_value:
if not self.request.input.get('replace_existing'):
raise BadRequest(self.cid, 'Key `{}` already exists with a value of `{}`'.format(key, existing_value))
# If we get here it means the key doesn't exist or it's fine to overwrite it.
cache.set(key, value, expiry)
# ################################################################################################################################
class Create(_CreateEdit):
""" Creates a new entry in the cache given on input.
"""
old_key_elem = 'key'
# ################################################################################################################################
class Update(_CreateEdit):
""" Updates an existing entry in the cache given on input.
"""
old_key_elem = 'old_key'
class SimpleIO(_CreateEdit.SimpleIO):
input_optional = _CreateEdit.SimpleIO.input_optional + ('old_key',)
def handle(self):
# Invoke common logic
super(Update, self).handle()
# Now, if new key and old key are different, we must delete the old one because it was a rename.
if self.request.input.old_key != self.request.input.key:
self._get_cache_by_input().delete(self.request.input.old_key)
# ################################################################################################################################
class Get(_Base):
""" Returns an individual entry from the cache given on input.
"""
class SimpleIO(AdminSIO):
input_required = ('cluster_id', 'cache_id', 'key')
output_required = (Bool('key_found'),)
output_optional = ('key', 'value', 'is_key_integer', 'is_value_integer', Float('expiry'))
def handle(self):
key = self.request.input.key
cache = self._get_cache_by_input()
entry = cache.get(key, details=True)
if entry:
self.response.payload.key_found = True
self.response.payload.key = key
self.response.payload.is_key_integer = isinstance(key, (int, long))
self.response.payload.value = entry.value
self.response.payload.is_value_integer = isinstance(entry.value, (int, long))
self.response.payload.expiry = entry.expiry
else:
self.response.payload.key_found = False
# ################################################################################################################################
class Delete(_Base):
""" Deletes an entry from the cache given on input.
"""
class SimpleIO(AdminSIO):
input_required = ('cluster_id', 'cache_id', 'key')
output_required = (Bool('key_found'),)
def handle(self):
try:
self._get_cache_by_input().delete(self.request.input.key)
except KeyError:
key_found = False
else:
key_found = True
self.response.payload.key_found = key_found
# ################################################################################################################################
| 10,951
|
Python
|
.py
| 209
| 43.086124
| 130
| 0.514685
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,353
|
pubapi.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/cache/builtin/pubapi.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from datetime import datetime
# Python 2/3 compatibility
from zato.common.ext.future.utils import iteritems
# Zato
from zato.common.api import ZATO_NOT_GIVEN
from zato.common.exception import BadRequest, InternalServerError, NotFound
from zato.server.service import AsIs, Bool, Float, Service
# ################################################################################################################################
optional_keys = AsIs('key'), AsIs('value'), 'last_read', 'prev_read', 'last_write', 'prev_write', 'expiry', 'expires_at', \
'hits', 'position'
datetime_keys = 'last_read', 'prev_read', 'last_write', 'prev_write', 'expires_at'
# ################################################################################################################################
class _BaseService(Service):
class SimpleIO:
response_elem = None
input_required = ('key',)
input_optional = (Bool('return_prev'),)
output_optional = optional_keys
skip_empty_keys = True
allow_empty_required = True
def _get_cache(self, input):
cache_name = input.get('cache')
if cache_name:
try:
cache = self.cache.builtin[cache_name]
except KeyError:
raise NotFound(self.cid, 'No such cache `{}`'.format(cache_name))
else:
cache = self.cache.default
return cache
def _convert_item_dict(self, item_dict, _utcfromtimestamp=datetime.utcfromtimestamp, _dt_keys=datetime_keys):
for key in _dt_keys:
value = item_dict[key]
item_dict[key] = _utcfromtimestamp(value).isoformat() if value else None
expiry = item_dict['expiry']
item_dict['expiry'] = expiry if expiry else None
return item_dict
# ################################################################################################################################
class SingleKeyService(_BaseService):
""" Base class for cache services accepting a single key on input, except for Expiry which uses its own service.
"""
class SimpleIO(_BaseService.SimpleIO):
input_optional = _BaseService.SimpleIO.input_optional + ('cache', AsIs('value'), 'details', Float('expiry'))
output_optional = _BaseService.SimpleIO.output_optional + ('prev_value',)
# ################################################################################################################################
def handle_GET(self):
""" Returns cache entries by their keys.
"""
input = self.request.input
key = input['key']
default = input['default'] if input.get('default') else ZATO_NOT_GIVEN
details = self.request.input.get('details')
result = self._get_cache(input).get(key, default, details)
if result is not None:
if result == default:
self.response.payload.value = result
else:
self.response.payload = self._convert_item_dict(result.to_dict()) if details else {'value': result}
else:
raise NotFound(self.cid, 'No such key `{}`'.format(key))
# ################################################################################################################################
def handle_POST(self):
""" Stores new cache entries or updates existing ones, including setting their expiry time.
"""
input = self.request.input
key = input['key']
cache = self._get_cache(input)
# This is an update of value and, possibly, an entry's expiry
if input.get('value'):
prev_value = cache.set(input['key'], input.value, input.get('expiry') or 0.0)
if input.get('return_prev'):
self.response.payload.prev_value = prev_value
# We only update the expiry time
else:
if not input.get('expiry'):
raise BadRequest(self.cid, 'At least one of `value` or `expiry` is needed on input')
else:
found_key = cache.expire(input['key'], input.expiry)
if not found_key:
raise NotFound(self.cid, 'No such key `{}`'.format(key))
# ################################################################################################################################
def handle_DELETE(self):
""" Deletes already existing cache entries
"""
input = self.request.input
key = input['key']
try:
prev_value = self._get_cache(input).delete(key)
except KeyError:
raise NotFound(self.cid, 'No such key `{}`'.format(key))
else:
if input.get('return_prev'):
self.response.payload.prev_value = prev_value
# ################################################################################################################################
class _Multi(_BaseService):
action = None
class SimpleIO(_BaseService.SimpleIO):
input_optional = _BaseService.SimpleIO.input_optional + ('expiry', 'value')
output_optional = ('key', 'prev_value',)
output_repeated = True
def handle(self):
input = self.request.input
return_prev = input.get('return_prev')
cache = self._get_cache(input)
# Input for all Set* commands
if self.action == 'set':
args = (input['key'], input.value, input.get('expiry') or 0.0, return_prev)
# Input for all Delete* commands
elif self.action == 'delete':
args = (input['key'], return_prev)
# Input for all Expire* commands
elif self.action == 'expire':
args = (input['key'], input.expiry)
# Must be extended if more commands are added in the future
else:
# Do not return too much information to the caller - but store it in logs nevertheless.
self.logger.warning('Invalid internal action found `%s` in `%s` (%s)', self.action, self.name, self.cid)
raise InternalServerError(self.cid, 'Invalid internal action found')
result = self._get_cache_func(cache)(*args)
if return_prev:
self.response.payload[:] = [{'key':key, 'prev_value':value} for key, value in iteritems(result)]
def _get_cache_func(self, cache):
raise NotImplementedError('Must be implemented in subclasses')
# ################################################################################################################################
class _GetMulti(_Multi):
action = 'get'
class _SetMulti(_Multi):
action = 'set'
class _DeleteMulti(_Multi):
action = 'delete'
class _ExpireMulti(_Multi):
action = 'expire'
# ################################################################################################################################
class GetByPrefix(_GetMulti):
def _get_cache_func(self, cache):
return cache.get_by_prefix
class GetBySuffix(_GetMulti):
def _get_cache_func(self, cache):
return cache.get_by_suffix
class GetByRegex(_GetMulti):
def _get_cache_func(self, cache):
return cache.get_by_regex
class GetContains(_GetMulti):
def _get_cache_func(self, cache):
return cache.get_contains
class GetNotContains(_GetMulti):
def _get_cache_func(self, cache):
return cache.get_not_contains
class GetContainsAll(_GetMulti):
def _get_cache_func(self, cache):
return cache.get_contains_all
class GetContainsAny(_GetMulti):
def _get_cache_func(self, cache):
return cache.get_contains_any
# ################################################################################################################################
class SetByPrefix(_SetMulti):
def _get_cache_func(self, cache):
return cache.set_by_prefix
class SetBySuffix(_SetMulti):
def _get_cache_func(self, cache):
return cache.set_by_suffix
class SetByRegex(_SetMulti):
def _get_cache_func(self, cache):
return cache.set_by_regex
class SetContains(_SetMulti):
def _get_cache_func(self, cache):
return cache.set_contains
class SetNotContains(_SetMulti):
def _get_cache_func(self, cache):
return cache.set_not_contains
class SetContainsAll(_SetMulti):
def _get_cache_func(self, cache):
return cache.set_contains_all
class SetContainsAny(_SetMulti):
def _get_cache_func(self, cache):
return cache.set_contains_any
# ################################################################################################################################
class DeleteByPrefix(_DeleteMulti):
def _get_cache_func(self, cache):
return cache.delete_by_prefix
class DeleteBySuffix(_DeleteMulti):
def _get_cache_func(self, cache):
return cache.delete_by_suffix
class DeleteByRegex(_DeleteMulti):
def _get_cache_func(self, cache):
return cache.delete_by_regex
class DeleteContains(_DeleteMulti):
def _get_cache_func(self, cache):
return cache.delete_contains
class DeleteNotContains(_DeleteMulti):
def _get_cache_func(self, cache):
return cache.delete_not_contains
class DeleteContainsAll(_DeleteMulti):
def _get_cache_func(self, cache):
return cache.delete_contains_all
class DeleteContainsAny(_DeleteMulti):
def _get_cache_func(self, cache):
return cache.delete_contains_any
# ################################################################################################################################
class ExpireByPrefix(_ExpireMulti):
def _get_cache_func(self, cache):
return cache.expire_by_prefix
class ExpireBySuffix(_ExpireMulti):
def _get_cache_func(self, cache):
return cache.expire_by_suffix
class ExpireByRegex(_ExpireMulti):
def _get_cache_func(self, cache):
return cache.expire_by_regex
class ExpireContains(_ExpireMulti):
def _get_cache_func(self, cache):
return cache.expire_contains
class ExpireNotContains(_ExpireMulti):
def _get_cache_func(self, cache):
return cache.expire_not_contains
class ExpireContainsAll(_ExpireMulti):
def _get_cache_func(self, cache):
return cache.expire_contains_all
class ExpireContainsAny(_ExpireMulti):
def _get_cache_func(self, cache):
return cache.expire_contains_any
# ################################################################################################################################
| 10,755
|
Python
|
.py
| 228
| 40.074561
| 130
| 0.549952
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,354
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/cache/builtin/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# dictalchemy
from dictalchemy.utils import asdict
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.api import CACHE as _COMMON_CACHE
from zato.common.broker_message import CACHE
from zato.common.odb.model import CacheBuiltin
from zato.common.odb.query import cache_builtin_list
from zato.server.service import Bool, Int
from zato.server.service.internal import AdminService, AdminSIO
from zato.server.service.internal.cache import common_instance_hook
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
elem = 'cache_builtin'
model = CacheBuiltin
label = 'a built-in cache definition'
get_list_docs = 'built-in cache definitions'
broker_message = CACHE
broker_message_prefix = 'BUILTIN_'
list_func = cache_builtin_list
skip_create_integrity_error = True
skip_if_exists = True
skip_input_params = ['cache_id']
output_optional_extra = ['current_size', 'cache_id']
# ################################################################################################################################
def instance_hook(self, input, instance, attrs):
# Common functionality first ..
common_instance_hook(self, input, instance, attrs)
# .. now, if this is an update, we need to ensure that we have
# a handle to cache_id. It will be provided on input from web-admin
# but enmasse will not have it so we need to look it up ourselfves.
if not input.get('cache_id'):
if attrs.is_edit:
with attrs._meta_session.no_autoflush:
result = attrs._meta_session.query(CacheBuiltin.cache_id).\
filter(CacheBuiltin.id==input.id).\
filter(CacheBuiltin.cluster_id==self.server.cluster_id).\
one()
instance.cache_id = result.cache_id
# ################################################################################################################################
def response_hook(self, input, _ignored, attrs, service_type):
if service_type == 'create_edit':
self.response.payload.cache_id = self.response.payload.id
elif service_type == 'get_list':
for item in self.response.payload:
# Note that below we are catching a KeyError in get_size.
# This is because we know that item.name exists in the database,
# otherwise we would not have found it during the iteration,
# but it may not exist yet in RAM. This will happen when enmasse
# runs with a fresh cluster - the database may be updated but our in-RAM
# storage not yet.
try:
item.current_size = self.cache.get_size(_COMMON_CACHE.TYPE.BUILTIN, item.name)
except KeyError:
item.current_size = 0
# ################################################################################################################################
def broker_message_hook(self, input, instance, attrs, service_type):
if service_type == 'delete':
input.cache_type = _COMMON_CACHE.TYPE.BUILTIN
# ################################################################################################################################
class Get(AdminService):
""" Returns configuration of a cache definition.
"""
class SimpleIO(AdminSIO):
input_required = ('cluster_id', 'cache_id')
output_required = ('name', 'is_active', 'is_default', 'cache_type', Int('max_size'), Int('max_item_size'),
Bool('extend_expiry_on_get'), Bool('extend_expiry_on_set'), 'sync_method', 'persistent_storage',
Int('current_size'))
def handle(self):
response = asdict(self.server.odb.get_cache_builtin(self.server.cluster_id, self.request.input.cache_id))
response['current_size'] = self.cache.get_size(_COMMON_CACHE.TYPE.BUILTIN, response['name'])
self.response.payload = response
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = CacheBuiltin.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
class Clear(AdminService):
""" Clears out a cache by its ID - deletes all keys and values.
"""
class SimpleIO(AdminSIO):
input_required = ('cluster_id', 'cache_id')
def handle(self):
cache = self.server.odb.get_cache_builtin(self.server.cluster_id, self.request.input.cache_id)
self.cache.clear(_COMMON_CACHE.TYPE.BUILTIN, cache.name)
# ################################################################################################################################
| 5,613
|
Python
|
.py
| 104
| 48.461538
| 130
| 0.532895
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,355
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/hot_deploy/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
import shutil
from contextlib import closing
from dataclasses import dataclass
from datetime import datetime
from errno import ENOENT
from json import loads
from time import sleep
from traceback import format_exc
# Zato
from zato.common.api import DEPLOYMENT_STATUS, KVDB
from zato.common.json_internal import dumps
from zato.common.odb.model import DeploymentPackage, DeploymentStatus
from zato.common.typing_ import cast_
from zato.common.util.api import is_python_file, is_archive_file
from zato.common.util.file_system import fs_safe_now, touch_multiple
from zato.common.util.python_ import import_module_by_path
from zato.server.service import AsIs
from zato.server.service.internal import AdminService, AdminSIO
# ################################################################################################################################
if 0:
from sqlalchemy.orm.session import Session as SASession
from zato.common.typing_ import any_, anylist, anylistnone, commoniter, intlist, strbytes, strlist, strset
from zato.server.service.store import InRAMService
InRAMService = InRAMService
# ################################################################################################################################
MAX_BACKUPS = 1000
_first_prefix = '0' * (len(str(MAX_BACKUPS)) - 1) # So it runs from, e.g., 000 to 999
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class DeploymentCtx:
model_name_list: 'strlist'
service_id_list: 'intlist'
service_name_list: 'strlist'
# ################################################################################################################################
# ################################################################################################################################
class Create(AdminService):
""" Creates all the filesystem directories and files out of a deployment package stored in the ODB.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_hot_deploy_create_request'
response_elem = 'zato_hot_deploy_create_response'
input_required = ('package_id',)
input_optional = ('is_startup',)
output_optional = (AsIs('services_deployed'),)
# ################################################################################################################################
def _delete(self, items:'commoniter') -> 'None':
for item in items:
if os.path.isfile(item):
os.remove(item)
elif os.path.isdir(item):
shutil.rmtree(item)
else:
msg = 'Could not delete `%s`, it is neither file nor a directory, stat:`%s`'
self.logger.warning(msg, item, os.stat(item))
# ################################################################################################################################
def _backup_last(self, fs_now:'str', current_work_dir:'str', backup_format:'str', last_backup_work_dir:'str') -> 'None':
# We want to grab the directory's contents right now, before we place the
# new backup in there
last_backup_contents = []
for item in os.listdir(last_backup_work_dir):
last_backup_contents.append(os.path.join(last_backup_work_dir, item))
# First make the backup to the special 'last' directory
last_backup_path = os.path.join(last_backup_work_dir, fs_now)
_ = shutil.make_archive(last_backup_path, backup_format, current_work_dir, verbose=True, logger=None)
# Delete everything previously found in the last backup directory
self._delete(last_backup_contents)
# ################################################################################################################################
def _backup_linear_log(
self,
fs_now, # type: str
current_work_dir, # type: str
backup_format, # type: str
backup_work_dir, # type: str
backup_history, # type: int
) -> 'None':
delete_previous_backups = False
# Aren't we going to exceed the limit?
max_backups = min(backup_history, MAX_BACKUPS)
backup_contents = []
for item in os.listdir(backup_work_dir):
item = os.path.join(backup_work_dir, item)
# We tally files only and assume each file must be one of ours so we can safely drop it if need be
if os.path.isfile(item):
backup_contents.append(item)
len_backups = len(backup_contents)
# It's the first backup or we're past the limit so we need a fresh prefix
if not len_backups or len_backups >= max_backups:
next_prefix = _first_prefix
else:
next_prefix = str(len_backups).zfill(len(_first_prefix))
# Also, we need to delete previous backups if we're starting anew
if len_backups >= max_backups:
delete_previous_backups = True
backup_name = '{}-{}'.format(next_prefix, fs_now)
backup_path = os.path.join(backup_work_dir, backup_name)
_ = shutil.make_archive(backup_path, backup_format, current_work_dir, verbose=True, logger=None)
if delete_previous_backups:
self._delete(backup_contents)
# ################################################################################################################################
def backup_current_work_dir(self):
# Save a few keystrokes
last_backup_work_dir = self.server.hot_deploy_config.last_backup_work_dir
current_work_dir = self.server.hot_deploy_config.current_work_dir
backup_work_dir = self.server.hot_deploy_config.backup_work_dir
backup_history = self.server.hot_deploy_config.backup_history
backup_format = self.server.hot_deploy_config.backup_format
# Safe to use as a directory name
fs_now = fs_safe_now()
# Store the last backup
self._backup_last(fs_now, current_work_dir, backup_format, last_backup_work_dir)
# Now store the same thing in the linear log of backups
self._backup_linear_log(fs_now, current_work_dir, backup_format, backup_work_dir, backup_history)
# ################################################################################################################################
def _redeploy_module_dependencies(self, file_name:'str') -> 'None':
# Reload the module so its newest contents is in sys path ..
mod_info = import_module_by_path(file_name)
# .. we enter here if the reload succeeded ..
if mod_info:
# .. get all the files with that are making use of this module ..
# .. which may mean both files with services or models ..
file_name_list = self.server.service_store.get_module_importers(mod_info.name)
# .. and redeploy all such files.
touch_multiple(file_name_list)
# ################################################################################################################################
def _deploy_models(self, current_work_dir:'str', model_file_name:'str') -> 'strset':
# This returns details of all the model classes deployed from the file
model_info_list = self.server.service_store.import_models_from_file(
model_file_name,
False,
current_work_dir,
)
# .. extract unique names only ..
model_name_list = {item.name for item in model_info_list}
# .. if we have deployed any models ..
if model_name_list:
# .. redeploy all the modules that depend on the one we have just deployed ..
self._redeploy_module_dependencies(model_file_name)
# .. and return to the caller the list of models deployed.
return model_name_list
# ################################################################################################################################
def _deploy_services(self, current_work_dir:'str', service_file_name:'str') -> 'intlist':
# Local aliases
service_id_list:'intlist' = []
# This returns details of all the model classes deployed from the file
service_info_list = self.server.service_store.import_services_from_anywhere(service_file_name, current_work_dir)
# .. if we have deployed any models ..
for service in service_info_list.to_process: # type: ignore
# .. add type hints ..
service = cast_('InRAMService', service)
# .. extract the ID of the deployed service ..
service_id = self.server.service_store.impl_name_to_id[service.impl_name]
# .. append it for later use ..
service_id_list.append(service_id)
# .. redeploy all the modules that depend on the one we have just deployed ..
self._redeploy_module_dependencies(service.source_code_info.path)
# .. and return to the caller the list of IDs of all the services deployed.
return service_id_list
# ################################################################################################################################
def _deploy_file(
self,
current_work_dir, # type: str
payload, # type: any_
file_name, # type: str
should_deploy_in_place # type: bool
) -> 'DeploymentCtx':
if not should_deploy_in_place:
with open(file_name, 'wb') as f:
_ = f.write(payload)
model_name_list = self._deploy_models(current_work_dir, file_name)
service_id_list = self._deploy_services(current_work_dir, file_name)
ctx = DeploymentCtx()
ctx.model_name_list = model_name_list # type: ignore
ctx.service_id_list = service_id_list
ctx.service_name_list = [self.server.service_store.get_service_name_by_id(elem) for elem in service_id_list]
return ctx
# ################################################################################################################################
def _deploy_package(
self,
session, # type: any_
package_id, # type: int
payload_name, # type: str
payload, # type: strbytes
should_deploy_in_place, # type: bool
in_place_dir_name # type: str
) -> 'anylistnone':
""" Deploy a package, either a plain Python file or an archive, and update
the deployment status.
"""
# type: (object, int, str, str)
# Local objects
if should_deploy_in_place:
work_dir = in_place_dir_name
else:
work_dir:'str' = self.server.hot_deploy_config.current_work_dir
file_name = os.path.join(work_dir, payload_name)
# Deploy some objects of interest from the file ..
ctx = self._deploy_file(work_dir, payload, file_name, should_deploy_in_place)
# We enter here if there were some models or services that we deployed ..
if ctx.model_name_list or ctx.service_name_list:
# .. no matter what kind of objects we found, the package has been deployed ..
self._update_deployment_status(session, package_id, DEPLOYMENT_STATUS.DEPLOYED)
# .. report any models found ..
if ctx.model_name_list:
self._report_deployment(file_name, ctx.model_name_list, 'model')
# .. report any services found ..
if ctx.service_name_list:
self._report_deployment(file_name, ctx.service_name_list, 'service')
# .. our callers only need services ..
return ctx.service_id_list
# .. we could not find anything to deploy in the file.
else:
# Log only if payload does not point to our own store.py module.
if payload_name != 'store.py':
msg = 'No services nor models were deployed from module `%s`'
self.logger.info(msg, payload_name)
# ################################################################################################################################
def _report_deployment(self, file_name:'str', items:'anylist', noun:'str') -> 'None':
msg = 'Deployed %s {}%sfrom `%s` -> %s'.format(noun)
len_items = len(items)
suffix = 's ' if len_items > 1 else ' '
self.logger.info(msg, len_items, suffix, file_name, sorted(items))
# ################################################################################################################################
def _update_deployment_status(self, session:'SASession', package_id:'int', status:'str') -> 'None':
ds = session.query(DeploymentStatus).\
filter(DeploymentStatus.package_id==package_id).\
filter(DeploymentStatus.server_id==self.server.id).\
one()
ds.status = status
ds.status_change_time = datetime.utcnow()
session.add(ds)
session.commit()
# ################################################################################################################################
def deploy_package(self, package_id:'int', session:'SASession') -> 'any_':
dp = self.get_package(package_id, session)
if dp:
# Load JSON details so that we can find out if we are to hot-deploy in place or not ..
details = loads(dp.details)
should_deploy_in_place = details['should_deploy_in_place']
in_place_dir_name = os.path.dirname(details['fs_location'])
if is_archive_file(dp.payload_name) or is_python_file(dp.payload_name):
return self._deploy_package(session, package_id, dp.payload_name, dp.payload,
should_deploy_in_place, in_place_dir_name)
else:
# This shouldn't really happen at all because the pickup notifier is to
# filter such things out but life is full of surprises
self._update_deployment_status(session, package_id, DEPLOYMENT_STATUS.IGNORED)
# Log a message but only on a debug level
self.logger.debug(
'Ignoring package id:`%s`, payload_name:`%s`, not a Python file nor an archive', dp.id, dp.payload_name)
# ################################################################################################################################
def get_package(self, package_id:'int', session:'SASession') -> 'DeploymentPackage | None':
return session.query(DeploymentPackage).\
filter(DeploymentPackage.id==package_id).\
first()
# ################################################################################################################################
def handle(self):
package_id = self.request.input.package_id
server_token = self.server.fs_server_config.main.token
lock_name = '{}{}:{}'.format(KVDB.LOCK_PACKAGE_UPLOADING, server_token, package_id)
already_deployed_flag = '{}{}:{}'.format(KVDB.LOCK_PACKAGE_ALREADY_UPLOADED, server_token, package_id)
# TODO: Stuff below - and the methods used - needs to be rectified.
# As of now any worker process will always set deployment status
# to DEPLOYMENT_STATUS.DEPLOYED but what we really want is per-worker
# reporting of whether the deployment succeeded or not.
ttl = self.server.deployment_lock_expires
block = self.server.deployment_lock_timeout
# Now, it's possible we don't have the broker_client yet - this will happen if we are deploying
# missing services found on other servers during our own server's startup. In that case we just
# need to wait a moment for the server we are on to fully initialize.
while not self.server.broker_client:
sleep(0.2)
with self.lock(lock_name, ttl, block):
with closing(self.odb.session()) as session:
try:
# Only one of workers will get here ..
if not self.server.kv_data_api.get(already_deployed_flag):
self.backup_current_work_dir()
self.server.kv_data_api.set(
already_deployed_flag,
dumps({'create_time_utc':datetime.utcnow().isoformat()}),
self.server.deployment_lock_expires,
)
# .. all workers get here.
services_deployed = self.deploy_package(self.request.input.package_id, session) or []
# Go through all services deployed, check if any needs post-processing
# and if does, call the relevant function and clear the flag.
service_store = self.server.service_store
needs_post_deploy_attr = service_store.needs_post_deploy_attr
for service_id in services_deployed:
service_info = service_store.get_service_info_by_id(service_id)
class_ = service_info['service_class']
if getattr(class_, needs_post_deploy_attr, None):
service_store.post_deploy(class_)
delattr(class_, needs_post_deploy_attr)
self.response.payload.services_deployed = services_deployed
except OSError as e:
if e.errno == ENOENT:
self.logger.debug('Caught ENOENT e:`%s`', format_exc())
else:
raise
# ################################################################################################################################
# ################################################################################################################################
| 18,361
|
Python
|
.py
| 310
| 49.148387
| 130
| 0.526416
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,356
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/ide_deploy/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from traceback import format_exc
# Zato
from zato.common.api import DATA_FORMAT
from zato.common.json_internal import dumps
from zato.server.service import Service
class Create(Service):
""" Like zato.hot-deploy.create but returns an empty string if input payload is empty to let IDEs test server connections.
"""
class SimpleIO:
request_elem = 'zato_ide_deploy_create_request'
response_elem = 'zato_ide_deploy_create_response'
input_optional = ('payload_name', 'payload')
output_required = ('success', 'msg')
def handle(self):
if not self.request.payload:
self.response.payload.success = True
self.response.payload.msg = 'Ping succeeded.'
return
payload_name = self.request.payload.get('payload_name')
payload = self.request.payload.get('payload')
if not (payload and payload_name):
self.response.payload.success = False
self.response.payload.msg = 'Both "payload" and "payload_name" fields are required.'
return
new_payload = dict(self.request.payload, cluster_id=self.server.cluster_id)
try:
self.invoke('zato.service.upload-package', dumps(new_payload), data_format=DATA_FORMAT.JSON)
except Exception as e:
self.logger.warning('Could not invoke zato.service.upload-package, e:`%s`', format_exc(e))
self.response.payload.success = False
self.response.payload.msg = 'Deployment failed: {}'.format(e)
return
self.response.payload.success = True
self.response.payload.msg = 'Deployment started: please check server log for status.'
| 1,938
|
Python
|
.py
| 41
| 39.878049
| 126
| 0.678855
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,357
|
publish.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/publish.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.server.service import AsIs, Int, List
from zato.server.service.internal import AdminService
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import anytuple
# ################################################################################################################################
# ################################################################################################################################
class Publish(AdminService):
""" Actual implementation of message publishing exposed through other services to the outside world.
"""
call_hooks = False
class SimpleIO:
input_required = ('topic_name',)
input_optional = (AsIs('data'), List('data_list'), AsIs('msg_id'), Int('priority'), Int('expiration'),
'mime_type', AsIs('correl_id'), 'in_reply_to', AsIs('ext_client_id'), 'ext_pub_time', 'pub_pattern_matched',
'endpoint_id', 'endpoint_name', 'security_id', 'security_name', 'ws_channel_id', 'data_parsed', AsIs('group_id'),
Int('position_in_group'), List('reply_to_sk'), List('deliver_to_sk'), 'user_ctx', AsIs('zato_ctx'),
AsIs('has_gd')) # type: anytuple
output_optional = (AsIs('msg_id'), List('msg_id_list')) # type: anytuple
# ################################################################################################################################
def handle(self):
# Run the publication based on our input ..
response = self.pubsub.impl_publisher.run_from_dict(self.cid, self.request.input)
# .. and assign the response to our return data, assuming that there is anything to return.
if response:
if isinstance(response, str):
self.response.payload.msg_id = response
else:
self.response.payload.msg_id_list = response
# ################################################################################################################################
| 2,390
|
Python
|
.py
| 37
| 58.27027
| 130
| 0.431624
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,358
|
queue.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/queue.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.query.pubsub.queue import acknowledge_delivery, get_messages, get_queue_depth_by_sub_key
from zato.common.util.time_ import datetime_from_ms, utcnow_as_ms
from zato.server.service import AsIs, Dict, List
from zato.server.service.internal import AdminService, AdminSIO
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import anytuple
# ################################################################################################################################
# ################################################################################################################################
_batch_size=PUBSUB.DEFAULT.GET_BATCH_SIZE
# ################################################################################################################################
# ################################################################################################################################
class GetMessages(AdminService):
""" Returns a list of messages available for a given sub_key.
Returns up to batch_size messages, if batch_size is not given, it is equal to 100.
"""
class SimpleIO(AdminSIO):
input_required = 'sub_key'
input_optional = 'batch_size'
output_optional = AsIs('msg_id'), AsIs('correl_id'), 'in_reply_to', 'priority', 'size', \
'data_format', 'mime_type', 'data', 'expiration', 'expiration_time', 'ext_client_id', 'topic_name', \
'recv_time', 'delivery_count' # type: anytuple
output_repeated = True
def handle(self) -> 'None':
input = self.request.input
batch_size = input.batch_size or _batch_size
with closing(self.odb.session()) as session:
msg_list = get_messages(session, self.server.cluster_id, input.sub_key, batch_size, utcnow_as_ms())
for elem in msg_list:
ext_pub_time = datetime_from_ms(elem.ext_pub_time) if elem.ext_pub_time else None
self.response.payload.append({
'msg_id': elem.msg_id,
'correl_id': elem.correl_id,
'in_reply_to': elem.in_reply_to,
'priority': elem.priority,
'size': elem.size,
'data_format': elem.data_format,
'mime_type': elem.mime_type,
'data': elem.data,
'expiration': elem.expiration,
'expiration_time': datetime_from_ms(elem.expiration_time),
'ext_client_id': elem.ext_client_id,
'ext_pub_time': ext_pub_time,
'topic_name': elem.topic_name,
'recv_time': datetime_from_ms(elem.recv_time),
'delivery_count': elem.delivery_count,
})
# We need to commit the session because the underlying query issued SELECT FOR UPDATE
session.commit()
# ################################################################################################################################
class AcknowledgeDelivery(AdminService):
""" Invoked by API clients to confirm that delivery of all messages from input msg_id_list was successful.
"""
class SimpleIO(AdminSIO):
input_required = 'sub_key', List('msg_id_list') # type: anytuple
def handle(self) -> 'None':
sub_key = self.request.input.sub_key
msg_id_list = self.request.input.msg_id_list
if msg_id_list:
with closing(self.odb.session()) as session:
# Call SQL UPDATE ..
acknowledge_delivery(session, self.server.cluster_id, sub_key, msg_id_list, utcnow_as_ms())
# .. and confirm the transaction
session.commit()
# ################################################################################################################################
class GetQueueDepthBySubKey(AdminService):
""" For each sub_key given on input, return depth of its associated message queue.
"""
class SimpleIO(AdminSIO):
input_optional = 'sub_key', List('sub_key_list') # type: anytuple
output_optional = Dict('queue_depth'), # type: anytuple
def handle(self) -> 'None':
input = self.request.input
input.require_any('sub_key', 'sub_key_list')
# Support both on input but always pass on a list further on
sub_key_list = [input.sub_key] if input.sub_key else input.sub_key_list
# Response to return
response = {}
with closing(self.odb.session()) as session:
for item in sub_key_list:
response[item] = get_queue_depth_by_sub_key(session, self.server.cluster_id, item, utcnow_as_ms())
self.response.payload.queue_depth = response
# ################################################################################################################################
| 5,416
|
Python
|
.py
| 93
| 48.870968
| 130
| 0.481768
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,359
|
message.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/message.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from copy import deepcopy
# Bunch
from bunch import Bunch
# SQLAlchemy
from sqlalchemy import and_, exists
# Zato
from zato.common.exception import NotFound
from zato.common.odb.model import PubSubTopic, PubSubEndpoint, PubSubEndpointEnqueuedMessage, PubSubEndpointTopic, PubSubMessage
from zato.common.odb.query import pubsub_message, pubsub_queue_message
from zato.common.typing_ import cast_
from zato.common.util.pubsub import get_expiration, get_priority
from zato.common.util.time_ import datetime_from_ms, utcnow_as_ms
from zato.server.service import AsIs, Bool, Int
from zato.server.service.internal import AdminService, AdminSIO
# ################################################################################################################################
# ################################################################################################################################
if 0:
from sqlalchemy.orm.session import Session as SASession
from zato.common.typing_ import any_, stranydict
# ################################################################################################################################
# ################################################################################################################################
MsgInsert = PubSubMessage.__table__.insert
EndpointTopicInsert = PubSubEndpointTopic.__table__.insert
EnqueuedMsgInsert = PubSubEndpointEnqueuedMessage.__table__.insert
Topic = PubSubTopic.__table__
Endpoint = PubSubEndpoint.__table__
EndpointTopic = PubSubEndpointTopic.__table__
# ################################################################################################################################
class _GetSIO(AdminSIO):
input_required = (AsIs('msg_id'),)
output_optional = ('topic_id', 'topic_name', AsIs('msg_id'), AsIs('correl_id'), 'in_reply_to', 'pub_time', \
'ext_pub_time', 'pub_pattern_matched', 'sub_pattern_matched', 'priority', 'data_format', 'mime_type', 'size', 'data',
'expiration', 'expiration_time', 'endpoint_id', 'endpoint_name', 'recv_time', 'pub_hook_service_id',
'pub_hook_service_name', AsIs('ext_client_id'), 'server_name', 'server_pid', 'published_by_id', 'published_by_name',
'subscriber_id', 'subscriber_name')
# ################################################################################################################################
class _UpdateSIO(AdminSIO):
input_required = (AsIs('msg_id'), 'mime_type')
input_optional = ('cluster_id', 'data', Int('expiration'), AsIs('correl_id'), AsIs('in_reply_to'), Int('priority'),
Bool('exp_from_now'), 'server_name', 'server_pid', Int('size'), AsIs('pub_correl_id'), AsIs('expiration_time'))
output_required = (Bool('found'), AsIs('msg_id'))
output_optional = ('expiration_time', Int('size'))
# ################################################################################################################################
class GetFromTopicGD(AdminService):
""" Returns a GD pub/sub topic message by its ID.
"""
class SimpleIO(_GetSIO):
input_required = _GetSIO.input_required + ('cluster_id',)
input_optional = ('needs_sub_queue_check',)
def handle(self, _not_given:'any_'=object()) -> 'None':
with closing(self.odb.session()) as session:
needs_sub_queue_check = self.request.input.get('needs_sub_queue_check', _not_given)
needs_sub_queue_check = needs_sub_queue_check if needs_sub_queue_check is not _not_given else True
item = pubsub_message(session, self.request.input.cluster_id, self.request.input.msg_id, needs_sub_queue_check).\
first()
if item:
item.pub_time = datetime_from_ms(item.pub_time * 1000)
item.ext_pub_time = datetime_from_ms(item.ext_pub_time * 1000) if item.ext_pub_time else ''
item.expiration_time = datetime_from_ms(item.expiration_time * 1000) if item.expiration_time else ''
self.response.payload = item
else:
raise NotFound(self.cid, 'No such message `{}`'.format(self.request.input.msg_id))
# ################################################################################################################################
class GetFromServerTopicNonGD(AdminService):
""" Returns a non-GD message from current server.
"""
SimpleIO = _GetSIO # type: ignore
def handle(self) -> 'None':
msg = self.pubsub.sync_backlog.get_message_by_id(self.request.input.msg_id)
# We need to re-arrange attributes but we don't want to update the original message in place
msg = deepcopy(msg)
msg['msg_id'] = msg.pop('pub_msg_id')
msg['correl_id'] = msg.pop('pub_correl_id', None)
msg['pub_time'] = datetime_from_ms(msg['pub_time'] * 1000.0)
expiration_time = msg.pop('expiration_time', None)
if expiration_time:
msg['expiration_time'] = datetime_from_ms(expiration_time * 1000.0)
msg['endpoint_id'] = msg.pop('published_by_id')
msg['endpoint_name'] = self.pubsub.get_endpoint_by_id(msg['endpoint_id']).name
self.response.payload = msg
# ################################################################################################################################
class GetFromTopicNonGD(AdminService):
""" Returns a non-GD pub/sub topic message by its ID.
"""
class SimpleIO(_GetSIO):
input_required = _GetSIO.input_required + ('server_name', 'server_pid')
def handle(self) -> 'None':
invoker = self.server.rpc.get_invoker_by_server_name(self.request.input.server_name)
response = invoker.invoke(GetFromServerTopicNonGD.get_name(), {
'msg_id': self.request.input.msg_id,
}, pid=self.request.input.server_pid)
if response:
self.response.payload = response['response']
# ################################################################################################################################
class Has(AdminService):
""" Returns a boolean flag to indicate whether a given message by ID exists in pub/sub.
"""
class SimpleIO(AdminSIO):
input_required = ('cluster_id', AsIs('msg_id'))
output_required = (Bool('found'),)
def handle(self) -> 'None':
with closing(self.odb.session()) as session:
self.response.payload.found = session.query(
exists().where(and_(
PubSubMessage.pub_msg_id==self.request.input.msg_id,
PubSubMessage.cluster_id==self.server.cluster_id,
))).\
scalar()
# ################################################################################################################################
class TopicDeleteGD(AdminService):
""" Deletes a GD topic message by its ID. Cascades to all related SQL objects, e.g. subscriber queues.
"""
class SimpleIO(AdminSIO):
input_required = ('cluster_id', AsIs('msg_id'))
def handle(self) -> 'None':
with closing(self.odb.session()) as session:
ps_msg = session.query(PubSubMessage).\
filter(PubSubMessage.cluster_id==self.request.input.cluster_id).\
filter(PubSubMessage.pub_msg_id==self.request.input.msg_id).\
first()
if not ps_msg:
raise NotFound(self.cid, 'Message not found `{}`'.format(self.request.input.msg_id))
session.delete(ps_msg)
session.commit()
self.logger.info('GD topic message deleted `%s` (%s)', self.request.input.msg_id, ps_msg.data_prefix_short)
# ################################################################################################################################
class DeleteTopicNonGDMessage(AdminService):
""" Deletes a non-GD message by its ID from current server.
"""
class SimpleIO(AdminSIO):
input_required = (AsIs('msg_id'),)
def handle(self) -> 'None':
self.pubsub.sync_backlog.delete_msg_by_id(self.request.input.msg_id)
# ################################################################################################################################
class TopicDeleteNonGD(AdminService):
""" Deletes a non-GD message by its ID from a named server.
"""
class SimpleIO(AdminSIO):
input_required = ('cluster_id', 'server_name', 'server_pid', AsIs('msg_id'))
def handle(self) -> 'None':
invoker = self.server.rpc.get_invoker_by_server_name(self.request.input.server_name)
invoker.invoke(DeleteTopicNonGDMessage.get_name(), {
'msg_id': self.request.input.msg_id,
}, pid=self.request.input.server_pid)
self.logger.info('Deleted non-GD message `%s` from `%s:%s`',
self.request.input.msg_id, self.request.input.server_name, self.request.input.server_pid)
# ################################################################################################################################
class QueueDeleteServerNonGD(AdminService):
""" Deletes a non-GD messages from a selected queue which must exist on current server.
"""
class SimpleIO(AdminSIO):
input_required = ('sub_key', AsIs('msg_id'))
def handle(self) -> 'None':
if pubsub_tool := self.pubsub.get_pubsub_tool_by_sub_key(self.request.input.sub_key):
pubsub_tool.delete_messages(self.request.input.sub_key, [self.request.input.msg_id])
# ################################################################################################################################
class QueueDeleteNonGD(AdminService):
""" Deletes a non-GD messages from a selected queue.
"""
class SimpleIO(AdminSIO):
input_required = ('sub_key', AsIs('msg_id'), 'server_name', 'server_pid')
def handle(self) -> 'None':
sk_server = self.pubsub.get_delivery_server_by_sub_key(self.request.input.sub_key)
if sk_server:
invoker = self.server.rpc.get_invoker_by_server_name(sk_server.server_name)
response = invoker.invoke(
QueueDeleteServerNonGD.get_name(), {
'sub_key': sk_server.sub_key,
'msg_id': self.request.input.msg_id
}, pid=sk_server.server_pid)
if response:
self.response.payload[:] = response['response']
# ################################################################################################################################
class QueueDeleteGD(AdminService):
""" Deletes a GD message by its ID from the input subscription queue.
"""
class SimpleIO(AdminSIO):
input_required = ('cluster_id', AsIs('msg_id'), 'sub_key')
def handle(self) -> 'None':
with closing(self.odb.session()) as session:
ps_msg = session.query(PubSubEndpointEnqueuedMessage).\
filter(PubSubEndpointEnqueuedMessage.cluster_id==self.request.input.cluster_id).\
filter(PubSubEndpointEnqueuedMessage.pub_msg_id==self.request.input.msg_id).\
filter(PubSubEndpointEnqueuedMessage.sub_key==self.request.input.sub_key).\
first()
if not ps_msg:
raise NotFound(self.cid, 'Message not found `{}` for sub_key `{}`'.format(
self.request.input.msg_id, self.request.input.sub_key))
session.delete(ps_msg)
session.commit()
# Find the server that has the delivery task for this sub_key
sk_server = self.pubsub.get_delivery_server_by_sub_key(self.request.input.sub_key)
# It's possible that there is no such server in case of WSX clients that connected,
# had their subscription created but then they disconnected and there is no delivery server for them.
if sk_server:
invoker = self.server.rpc.get_invoker_by_server_name(sk_server.server_name)
invoker.invoke(DeleteDeliveryTaskMessage.get_name(), {
'msg_id': self.request.input.msg_id,
'sub_key': self.request.input.sub_key,
}, pid=sk_server.server_pid)
self.logger.info('Deleting GD queue message `%s` (%s)', self.request.input.msg_id, self.request.input.sub_key)
# ################################################################################################################################
class DeleteDeliveryTaskMessage(AdminService):
""" Deletes a message from a delivery task which must exist on current server
"""
class SimpleIO(AdminSIO):
input_required = (AsIs('msg_id'), 'sub_key')
def handle(self) -> 'None':
if pubsub_tool := self.pubsub.get_pubsub_tool_by_sub_key(self.request.input.sub_key):
pubsub_tool.delete_messages(self.request.input.sub_key, [self.request.input.msg_id])
# ################################################################################################################################
class UpdateServerNonGD(AdminService):
""" Updates a non-GD message on current server.
"""
SimpleIO = _UpdateSIO # type: ignore
def handle(self) -> 'None':
self.response.payload.msg_id = self.request.input.msg_id
self.response.payload.found = self.pubsub.sync_backlog.update_msg(self.request.input)
# ################################################################################################################################
class _Update(AdminService):
""" Base class for services updating GD or non-GD messages.
"""
_message_update_has_gd:'bool'
SimpleIO = _UpdateSIO # type: ignore
def _get_item(self, *args:'any_', **kwargs:'any_') -> 'PubSubMessage':
raise NotImplementedError('Must be overridden by subclasses')
def _save_item(self, *args:'any_', **kwargs:'any_') -> 'None':
raise NotImplementedError('Must be overridden by subclasses')
def handle(self) -> 'None':
input = self.request.input
self.response.payload.msg_id = input.msg_id
session = self.odb.session() if self._message_update_has_gd else None
try:
# Get that from its storage, no matter what it is
item = self._get_item(input, session)
if session and (not item):
self.response.payload.found = False
return
item.data = input.data.encode('utf8')
item.data_prefix = input.data[:self.pubsub.data_prefix_len].encode('utf8')
item.data_prefix_short = input.data[:self.pubsub.data_prefix_short_len].encode('utf8')
item.size = len(input.data)
item.expiration = get_expiration(self.cid, input.get('expiration'), item.expiration)
item.priority = get_priority(self.cid, input.get('priority'))
item.msg_id = input.msg_id
item.pub_correl_id = input.correl_id
item.in_reply_to = input.in_reply_to
item.mime_type = input.mime_type
if item.expiration:
if self.request.input.exp_from_now:
from_ = utcnow_as_ms()
else:
from_ = item.pub_time
item.expiration_time = from_ + (item.expiration / 1000.0)
else:
item.expiration_time = None
# Save data to its storage, SQL for GD and RAM for non-GD messages
found = self._save_item(item, input, session)
self.response.payload.found = found
self.response.payload.size = item.size
self.response.payload.expiration_time = datetime_from_ms(
item.expiration_time * 1000.0) if item.expiration_time else None
finally:
if session:
session.close() # type: ignore
# ################################################################################################################################
class UpdateGD(_Update):
""" Updates details of an individual GD message.
"""
_message_update_has_gd = True
def _get_item(self, input:'stranydict', session:'SASession') -> 'PubSubMessage | None':
return session.query(PubSubMessage).\
filter(PubSubMessage.cluster_id==input['cluster_id']).\
filter(PubSubMessage.pub_msg_id==input['msg_id']).\
first()
def _save_item(self, item:'any_', _ignored:'any_', session:'SASession') -> 'bool':
session.add(item)
session.commit()
return True
# ################################################################################################################################
class UpdateNonGD(_Update):
""" Updates details of an individual non-GD message.
"""
_message_update_has_gd = False
def _get_item(self, input:'any_', _ignored:'any_') -> 'Bunch':
return Bunch()
def _save_item(self, item:'any_', input:'any_', _ignored:'any_') -> 'bool':
invoker = self.server.rpc.get_invoker_by_server_name(self.request.input.server_name)
response = invoker.invoke(UpdateServerNonGD.get_name(), item, pid=self.request.input.server_pid)
self.response.payload = response['response']
return True
# ################################################################################################################################
class GetFromQueueGD(AdminService):
""" Returns a GD pub/sub topic message by its ID.
"""
class SimpleIO(AdminSIO):
input_required = ('cluster_id', AsIs('msg_id'))
output_optional = (AsIs('msg_id'), 'recv_time', 'data', Int('delivery_count'), 'last_delivery_time',
'is_in_staging', 'queue_name', 'subscriber_id', 'subscriber_name', 'size', 'priority', 'mime_type',
'sub_pattern_matched', AsIs('correl_id'), 'in_reply_to', 'expiration', 'expiration_time',
AsIs('sub_hook_service_id'), 'sub_hook_service_name', AsIs('ext_client_id'), 'published_by_id',
'published_by_name', 'pub_pattern_matched')
def handle(self):
with closing(self.odb.session()) as session:
item = pubsub_queue_message(session, self.request.input.cluster_id, self.request.input.msg_id).\
first()
if item:
item.expiration = item.expiration or None
item_dict = item._asdict()
for name in('expiration_time', 'recv_time', 'ext_pub_time', 'last_delivery_time'):
value = item_dict.get(name)
if value:
item_dict[name] = datetime_from_ms(value * 1000.0)
self.response.payload = item_dict
self.response.payload['published_by_name'] = self.pubsub.get_endpoint_by_id(item_dict['published_by_id']).name
else:
raise NotFound(self.cid, 'No such message `{}`'.format(self.request.input.msg_id))
# ################################################################################################################################
class GetFromQueueServerNonGD(AdminService):
""" Returns details of a selected non-GD message from its queue which must exist on current server.
"""
class SimpleIO(_GetSIO):
input_required = _GetSIO.input_required + ('sub_key',)
def handle(self) -> 'None':
if pubsub_tool := self.pubsub.get_pubsub_tool_by_sub_key(self.request.input.sub_key):
msg = pubsub_tool.get_message(self.request.input.sub_key, self.request.input.msg_id)
if msg:
msg = msg.to_dict()
msg['msg_id'] = msg.pop('pub_msg_id')
msg['correl_id'] = msg.pop('pub_correl_id', None)
for name in ('pub_time', 'ext_pub_time', 'expiration_time', 'recv_time'):
value = msg.pop(name, None)
if value:
value = cast_('float', value)
msg[name] = datetime_from_ms(value * 1000.0)
published_by_id = msg['published_by_id']
published_by_id = cast_('int', published_by_id)
msg['published_by_name'] = self.pubsub.get_endpoint_by_id(published_by_id).name
sub = self.pubsub.get_subscription_by_sub_key(self.request.input.sub_key)
if sub:
subscriber_id = sub.endpoint_id
subscriber_name = self.pubsub.get_endpoint_by_id(subscriber_id).name
msg['subscriber_id'] = subscriber_id
msg['subscriber_name'] = subscriber_name
self.response.payload = msg
# ################################################################################################################################
class GetFromQueueNonGD(AdminService):
""" Returns details of a selected non-GD message from its queue.
"""
class SimpleIO(_GetSIO):
input_required = _GetSIO.input_required + ('sub_key', 'server_name', 'server_pid')
def handle(self) -> 'None':
sk_server = self.pubsub.get_delivery_server_by_sub_key(self.request.input.sub_key)
if sk_server:
invoker = self.server.rpc.get_invoker_by_server_name(sk_server.server_name)
response = invoker.invoke(
GetFromQueueServerNonGD.get_name(), {
'sub_key': sk_server.sub_key,
'msg_id': self.request.input.msg_id
}, pid=sk_server.server_pid)
if response:
self.response.payload = response['response']
# ################################################################################################################################
| 21,946
|
Python
|
.py
| 370
| 49.927027
| 130
| 0.535104
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,360
|
hook.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/hook.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
# Zato
from zato.common.api import PUBSUB
from zato.common.util.api import is_class_pubsub_hook
from zato.common.odb.model import PubSubSubscription, PubSubTopic
from zato.common.odb.query import pubsub_hook_service
from zato.server.service import PubSubHook
from zato.server.service.internal import AdminService, AdminSIO
# ################################################################################################################################
hook_type_model = {
PUBSUB.HOOK_TYPE.BEFORE_PUBLISH: PubSubTopic,
PUBSUB.HOOK_TYPE.BEFORE_DELIVERY: PubSubSubscription,
}
# ################################################################################################################################
class GetHookService(AdminService):
""" Returns ID and name of a hook service assigned to endpoint, if any is assigned at all.
"""
class SimpleIO(AdminSIO):
input_required = 'cluster_id', 'endpoint_id', 'hook_type'
output_optional = 'id', 'name'
def handle(self) -> 'None':
with closing(self.odb.session()) as session:
data = pubsub_hook_service(session, self.request.input.cluster_id, self.request.input.endpoint_id,
hook_type_model[self.request.input.hook_type])
if data:
self.response.payload = data
# ################################################################################################################################
class GetHookServiceList(AdminService):
""" Returns a list of pub/sub hook services currently deployed on this server.
"""
class SimpleIO(AdminSIO):
input_required = 'cluster_id'
output_optional = 'id', 'name'
output_repeated = True
request_elem = 'zato_pubsub_get_hook_service_list_request'
response_elem = 'zato_pubsub_get_hook_service_list_response'
def handle(self) -> 'None':
out = []
for impl_name, details in self.server.service_store.services.items():
if is_class_pubsub_hook(details['service_class']):
service_id = self.server.service_store.impl_name_to_id[impl_name]
out.append({
'id': service_id,
'name': details['name'],
})
self.response.payload[:] = out
# ################################################################################################################################
class PubSubHookDemo(PubSubHook):
""" A demo pub/sub hook which logs incoming topic and queue messages.
"""
def before_publish(self) -> 'None':
""" Invoked for each pub/sub message before it is published to a topic.
"""
self.logger.info('Demo hook before_publish invoked for pub_msg_id:`%s`, data:`%s`',
self.request.input.ctx.msg.pub_msg_id, self.request.input.ctx.msg.data)
def before_delivery(self) -> 'None':
""" Invoked for each pub/sub message before it is delivered to an endpoint.
"""
self.logger.info('Demo hook before_delivery invoked for pub_msg_id:`%s`, data:`%s`',
self.request.input.ctx.msg.pub_msg_id, self.request.input.ctx.msg.data)
# ################################################################################################################################
| 3,505
|
Python
|
.py
| 67
| 45.447761
| 130
| 0.539655
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,361
|
pubapi.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/pubapi.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from json import dumps
from traceback import format_exc
# Zato
from zato.common.api import CHANNEL, ContentType, CONTENT_TYPE, PUBSUB, ZATO_NONE
from zato.common.exception import BadRequest, Forbidden, PubSubSubscriptionExists
from zato.common.typing_ import cast_
from zato.common.util.auth import parse_basic_auth
from zato.server.service import AsIs, Int, Service
from zato.server.service.internal.pubsub.subscription import CreateWSXSubscription
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, anylist, anytuple, stranydict
from zato.server.connection.http_soap.url_data import URLData
from zato.server.connection.web_socket import WebSocket
URLData = URLData
# ################################################################################################################################
# ################################################################################################################################
delete_channels_allowed = {CHANNEL.WEB_SOCKET, CHANNEL.SERVICE, CHANNEL.INVOKE, CHANNEL.INVOKE_ASYNC}
_invoke_channels=(CHANNEL.INVOKE, CHANNEL.INVOKE_ASYNC)
# ################################################################################################################################
# ################################################################################################################################
class BaseSIO:
input_required = ('topic_name',)
response_elem = None
skip_empty_keys = True
default_value = None
# ################################################################################################################################
class TopicSIO(BaseSIO):
input_optional = ('data', AsIs('msg_id'), 'has_gd', Int('priority'), # type: any_
Int('expiration'), 'mime_type', AsIs('correl_id'), 'in_reply_to', AsIs('ext_client_id'), 'ext_pub_time',
'sub_key', AsIs('wsx')) # type: any_
output_optional = AsIs('msg_id'), # type: anytuple
# ################################################################################################################################
class SubSIO(BaseSIO):
input_optional = 'sub_key', 'delivery_method'
output_optional = 'sub_key', Int('queue_depth') # type: anytuple
# ################################################################################################################################
class _PubSubService(Service):
def _pubsub_check_credentials(self) -> 'int':
# If it is a WebSocket channel that invokes us, it means that it has already been authenticated
# and we can get its underlying endpoint directly.
wsx = self.wsgi_environ.get('zato.wsx') # type: WebSocket | None
if wsx:
channel_id = wsx.config.id
endpoint_id = self.pubsub.get_endpoint_id_by_ws_channel_id(channel_id)
return cast_('int', endpoint_id)
# If we are being through a CHANNEL.INVOKE* channel, it means that our caller used self.invoke
# or self.invoke_async, so there will never by any credentials in HTTP headers (there is no HTTP request after all),
# and we can run as an internal endpoint in this situation.
if self.channel.type in _invoke_channels:
return self.server.get_default_internal_pubsub_endpoint_id()
auth = self.wsgi_environ.get('HTTP_AUTHORIZATION')
if not auth:
raise Forbidden(self.cid)
try:
username, password = parse_basic_auth(auth)
except ValueError:
raise Forbidden(self.cid)
url_data = self.server.worker_store.request_dispatcher.url_data
basic_auth = url_data.basic_auth_config.values() # type: any_
# Assume we are not allowed by default
auth_ok = False
security_id = None
for item in basic_auth:
config = item['config']
if config['is_active']:
if config['username'] == username and config['password'] == password:
auth_ok = True
security_id = config['id']
break
else:
auth_ok = False
if not auth_ok:
raise Forbidden(self.cid)
if not security_id:
raise Forbidden(self.cid)
try:
endpoint_id = self.pubsub.get_endpoint_id_by_sec_id(security_id)
except KeyError:
self.logger.warning('Client credentials are valid but there is no pub/sub endpoint using them, sec_id:`%s`, e:`%s`',
security_id, format_exc())
raise Forbidden(self.cid)
else:
return endpoint_id
# ################################################################################################################################
class TopicService(_PubSubService):
""" Main service responsible for publications to and deliveries from a given topic. Handles security and distribution
of messages to target queues or recipients.
"""
SimpleIO = TopicSIO
# ################################################################################################################################
def _publish(self, endpoint_id:'int') -> 'any_':
""" POST /zato/pubsub/topic/{topic_name} {"data":"my data", ...}
"""
# We always require some data on input
if not self.request.input.data:
raise BadRequest(self.cid, 'No data sent on input')
# Ignore the header set by curl and similar tools
mime_type = self.wsgi_environ.get('CONTENT_TYPE')
if (not mime_type) or (mime_type == ContentType.FormURLEncoded):
mime_type = CONTENT_TYPE.JSON
input = self.request.input
ctx = {
'mime_type': mime_type,
'data': input.data,
'priority': input.priority,
'expiration': input.expiration,
'correl_id': input.correl_id or self.cid,
'in_reply_to': input.in_reply_to,
'ext_client_id': input.ext_client_id,
'has_gd': input.has_gd or ZATO_NONE,
'endpoint_id': endpoint_id,
} # type: stranydict
return self.pubsub.publish(input.topic_name, service=self, **ctx)
# ################################################################################################################################
def _get_messages(self, endpoint_id:'int') -> 'anylist':
""" POST /zato/pubsub/topic/{topic_name}
"""
# Local aliases
topic_name = self.request.input.topic_name
# Not every channel may present a sub_key on input
if self.chan.type in (CHANNEL.WEB_SOCKET, CHANNEL.SERVICE): # type: ignore
sub_key = self.request.input.get('sub_key')
else:
sub = self.pubsub.get_subscription_by_endpoint_id(endpoint_id, topic_name, needs_error=False)
if sub:
sub_key = sub.sub_key
else:
raise BadRequest(self.cid, 'You are not subscribed to topic `{}`'.format(topic_name), needs_msg=True)
try:
_ = self.pubsub.get_subscription_by_sub_key(sub_key)
except KeyError:
self.logger.warning('Could not find sub_key:`%s`, e:`%s`', sub_key, format_exc())
raise Forbidden(self.cid)
else:
return self.pubsub.get_messages(topic_name, sub_key, needs_msg_id=True)
# ################################################################################################################################
def handle_POST(self):
# Checks credentials and returns endpoint_id if valid
endpoint_id = self._pubsub_check_credentials()
# Extracts payload and publishes the message
self.response.payload.msg_id = self._publish(endpoint_id)
# ################################################################################################################################
def handle_PATCH(self):
# Checks credentials and returns endpoint_id if valid
endpoint_id = self._pubsub_check_credentials()
# Find our messages ..
messages = self._get_messages(endpoint_id)
# .. and return them to the caller.
self.response.payload = dumps(messages)
# ################################################################################################################################
class SubscribeService(_PubSubService):
""" Service through which REST clients subscribe to or unsubscribe from topics.
"""
SimpleIO = SubSIO
# ################################################################################################################################
def _check_sub_access(self, endpoint_id:'int') -> 'None':
# At this point we know that the credentials are valid and in principle, there is such an endpoint,
# but we still don't know if it has permissions to subscribe to this topic and we don't want to reveal
# information about what topics exist or not.
try:
topic = self.pubsub.get_topic_by_name(self.request.input.topic_name)
except KeyError:
self.logger.warning(format_exc())
raise Forbidden(self.cid)
# We know the topic exists but we also need to make sure the endpoint can subscribe to it
if not self.pubsub.is_allowed_sub_topic_by_endpoint_id(topic.name, endpoint_id):
endpoint = self.pubsub.get_endpoint_by_id(endpoint_id)
self.logger.warning('Endpoint `%s` is not allowed to subscribe to `%s`', endpoint.name, self.request.input.topic_name)
raise Forbidden(self.cid)
# ################################################################################################################################
def handle_POST(self) -> 'None':
""" POST /zato/pubsub/subscribe/topic/{topic_name}
"""
# Checks credentials and returns endpoint_id if valid
endpoint_id = self._pubsub_check_credentials()
# Make sure this endpoint has correct subscribe permissions (patterns)
self._check_sub_access(endpoint_id)
try:
response = self.invoke('zato.pubsub.subscription.subscribe-rest', {
'topic_name': self.request.input.topic_name,
'endpoint_id': endpoint_id,
'delivery_batch_size': PUBSUB.DEFAULT.DELIVERY_BATCH_SIZE,
'delivery_method': self.request.input.delivery_method or PUBSUB.DELIVERY_METHOD.PULL.id,
'server_id': self.server.id,
})['response']
except PubSubSubscriptionExists:
msg = 'Subscription for topic `%s` already exists for endpoint_id `%s`'
self.logger.info(msg, self.request.input.topic_name, endpoint_id)
else:
self.response.payload.sub_key = response['sub_key']
self.response.payload.queue_depth = response['queue_depth']
# ################################################################################################################################
def _handle_DELETE(self) -> 'None':
""" Low-level implementation of DELETE /zato/pubsub/subscribe/topic/{topic_name}
"""
# Local aliases
topic_name = self.request.input.topic_name
# This is invalid by default and will be set accordingly
endpoint_id = -1
# This call may be made by a live WebSocket object
wsx = self.request.input.get('wsx') # type: WebSocket
# This may be provided by WebSockets
sub_key = self.request.input.get('sub_key')
# Not every channel may present a sub_key on input
if sub_key and self.chan.type not in delete_channels_allowed: # type: ignore
self.logger.warning('Channel type `%s` may not use sub_key on input (%s)', self.chan.type, sub_key)
raise Forbidden(self.cid)
# If this is a WebSocket object, we need to confirm that its underlying pubsub tool
# actually has access to the input sub_key.
if wsx:
if not wsx.pubsub_tool.has_sub_key(sub_key):
self.logger.warning('WSX `%s` does not have sub_key `%s`', wsx.get_peer_info_dict(), sub_key)
raise Forbidden(self.cid)
# Otherwise, we need to get an endpoint associated with the input data and check its permissions.
else:
# Checks credentials and returns endpoint_id if valid
endpoint_id = self._pubsub_check_credentials()
if not endpoint_id:
self.logger.warning('Could not find endpoint for input credentials')
return
# To unsubscribe, we also need to have the right subscription permissions first (patterns) ..
self._check_sub_access(endpoint_id)
# .. also check that sub_key exists and that we are not using another endpoint's sub_key.
try:
if sub_key:
sub = self.pubsub.get_subscription_by_sub_key(sub_key)
else:
sub = self.pubsub.get_subscription_by_endpoint_id(endpoint_id, topic_name, needs_error=False)
except KeyError:
self.logger.warning('Could not find subscription by endpoint_id:`%s`, endpoint:`%s`',
endpoint_id, self.pubsub.get_endpoint_by_id(endpoint_id).name)
raise Forbidden(self.cid)
else:
if not sub:
self.logger.info('No subscription for sub_key: `%s` and endpoint_id: `%s` (%s) (delete)',
sub_key, endpoint_id, topic_name)
return
# If this is not a WebSocket, raise an exception if current endpoint is not the one that created
# the subscription originally, but only if current endpoint is not the default internal one;
# in such a case we want to let the call succeed - this lets other services use self.invoke in
# order to unsubscribe.
if sub.endpoint_id != endpoint_id:
if endpoint_id != self.server.get_default_internal_pubsub_endpoint_id():
sub_endpoint = self.pubsub.get_endpoint_by_id(sub.endpoint_id)
self_endpoint = self.pubsub.get_endpoint_by_id(endpoint_id)
self.logger.warning('Endpoint `%s` cannot unsubscribe sk:`%s` (%s) created by `%s`',
self_endpoint.name,
sub.sub_key,
self.pubsub.get_topic_by_sub_key(sub.sub_key).name,
sub_endpoint.name)
raise Forbidden(self.cid)
# We have all permissions checked now and can proceed to the actual calls
response = self.invoke('zato.pubsub.endpoint.delete-endpoint-queue', {
'cluster_id': self.server.cluster_id,
'sub_key': sub.sub_key
})
# Make sure that we always return JSON payload
response = response or {}
# Assign the response ..
self.response.payload = response
# ################################################################################################################################
def handle_DELETE(self) -> 'None':
""" DELETE /zato/pubsub/subscribe/topic/{topic_name}
"""
# Call our implementation ..
self._handle_DELETE()
# .. and always return an empty response.
self.response.payload = {}
# ################################################################################################################################
class PublishMessage(Service):
""" Lets one publish messages to a topic.
"""
SimpleIO = TopicSIO
def handle(self) -> 'None':
response = self.invoke(TopicService.get_name(), self.request.input,
wsgi_environ={
'REQUEST_METHOD':'POST',
'zato.wsx': self.wsgi_environ.get('zato.wsx'),
'zato.request_ctx.async_msg': self.wsgi_environ.get('zato.request_ctx.async_msg') or {},
})
self.response.payload = response
# ################################################################################################################################
class GetMessages(Service):
""" Used to return outstanding messages from a topic.
"""
SimpleIO = TopicSIO
def handle(self) -> 'None':
response = self.invoke(TopicService.get_name(), self.request.input, wsgi_environ={'REQUEST_METHOD':'PATCH'})
self.response.payload = response
# ################################################################################################################################
class Subscribe(Service):
""" Lets callers subscribe to topics.
"""
SimpleIO = SubSIO
def handle(self) -> 'None':
response = self.invoke(SubscribeService.get_name(), self.request.input, wsgi_environ={'REQUEST_METHOD':'POST'})
self.response.payload = response
# ################################################################################################################################
# Added for completness so as to make WSX clients use services from this module only
class SubscribeWSX(CreateWSXSubscription):
""" An alias to CreateWSXSubscription, added for API completeness.
"""
name = 'zato.pubsub.pubapi.subscribe-wsx'
# ################################################################################################################################
class Unsubscribe(Service):
""" Lets one unsubscribe from a topic.
"""
SimpleIO = SubSIO
def handle(self) -> 'None':
response = self.invoke(SubscribeService.get_name(), self.request.input, wsgi_environ={'REQUEST_METHOD':'DELETE'})
self.response.payload = response
# ################################################################################################################################
| 18,332
|
Python
|
.py
| 320
| 47.884375
| 130
| 0.51783
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,362
|
topic.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/topic.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from dataclasses import dataclass
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.broker_message import PUBSUB as BROKER_MSG_PUBSUB
from zato.common.api import PUBSUB
from zato.common.odb.model import PubSubEndpointEnqueuedMessage, PubSubMessage, PubSubTopic
from zato.common.odb.query import pubsub_messages_for_topic, pubsub_publishers_for_topic, pubsub_topic, pubsub_topic_list
from zato.common.odb.query.pubsub.topic import get_gd_depth_topic, get_gd_depth_topic_list, get_topic_sub_count_list, \
get_topics_by_sub_keys
from zato.common.typing_ import anylist, cast_, intlistnone, intnone, strlistnone, strnone
from zato.common.util.api import ensure_pubsub_hook_is_valid
from zato.common.util.pubsub import get_last_pub_metadata
from zato.common.util.time_ import datetime_from_ms
from zato.server.service import AsIs, Bool, Int, List, Model, Opaque, Service
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
from zato.server.service.internal.pubsub.search import NonGDSearchService
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from sqlalchemy.orm.session import Session as SASession
from zato.common.typing_ import any_, anydict, anytuple, stranydict, strlist
Bunch = Bunch
strlist = strlist
# ################################################################################################################################
# ################################################################################################################################
topic_limit_fields:'anylist' = [Int('limit_retention'), Int('limit_message_expiry'), Int('limit_sub_inactivity')]
elem = 'pubsub_topic'
model = PubSubTopic
label = 'a pub/sub topic'
get_list_docs = 'pub/sub topics'
broker_message = BROKER_MSG_PUBSUB
broker_message_prefix = 'TOPIC_'
list_func:'any_' = pubsub_topic_list
skip_input_params = ['cluster_id', 'is_internal', 'current_depth_gd', 'last_pub_time', 'last_pub_msg_id', 'last_endpoint_id',
'last_endpoint_name']
input_optional_extra = ['needs_details', 'on_no_subs_pub', 'hook_service_name', 'target_service_name'] + topic_limit_fields
output_optional_extra:'anylist' = ['is_internal', Int('current_depth_gd'), Int('current_depth_non_gd'), 'last_pub_time',
'hook_service_name', 'last_pub_time', AsIs('last_pub_msg_id'), 'last_endpoint_id', 'last_endpoint_name',
Bool('last_pub_has_gd'), Opaque('last_pub_server_pid'), 'last_pub_server_name', 'on_no_subs_pub',
Int('sub_count'),] + topic_limit_fields
# ################################################################################################################################
sub_broker_attrs = ('active_status', 'active_status', 'cluster_id', 'creation_time', 'endpoint_id', 'has_gd', 'id',
'is_durable', 'is_internal', 'name', 'out_amqp_id', 'out_http_soap_id', 'sub_key', 'topic_id', 'ws_channel_id',
'ws_sub_id', 'delivery_group_size')
# ################################################################################################################################
_meta_topic_key = PUBSUB.REDIS.META_TOPIC_LAST_KEY
_meta_endpoint_key = PUBSUB.REDIS.META_ENDPOINT_PUB_KEY
# ################################################################################################################################
def _format_meta_topic_key(cluster_id:'int', topic_id:'int') -> 'str':
return _meta_topic_key % (cluster_id, topic_id)
# ################################################################################################################################
def broker_message_hook(
self:'Service',
input:'anydict',
instance:'PubSubTopic',
attrs:'anydict',
service_type:'str'
) -> 'None':
if service_type == 'create_edit':
with closing(self.odb.session()) as session:
topic = pubsub_topic(session, input['cluster_id'], instance.id)
input['is_internal'] = topic.is_internal
input['max_depth_gd'] = topic.max_depth_gd
input['max_depth_non_gd'] = topic.max_depth_non_gd
input['hook_service_id'] = topic.hook_service_id
input['hook_service_name'] = topic.hook_service_name
# ################################################################################################################################
def _add_limits(item:'any_') -> 'None':
item.limit_retention = item.get('limit_retention') or PUBSUB.DEFAULT.LimitTopicRetention
item.limit_sub_inactivity = item.get('limit_sub_inactivity') or PUBSUB.DEFAULT.LimitMessageExpiry
item.limit_message_expiry = item.get('limit_message_expiry') or PUBSUB.DEFAULT.LimitSubInactivity
# ################################################################################################################################
def response_hook(self:'Service', input:'anydict', instance:'PubSubTopic', attrs:'anydict', service_type:'str') -> 'None':
if service_type == 'create_edit':
_ = self.pubsub.wait_for_topic(input['name'])
elif service_type == 'get_list':
# Limit-related fields were introduced post-3.2 release which is why they may not exist
for item in self.response.payload:
_add_limits(item)
# Details are needed when the main list of topics is requested. However, if only basic information
# is needed, like a list of topic IDs and their names, we don't need to look up additional details.
# The latter is the case of the message publication screen which simply needs a list of topic IDs/names.
if input.get('needs_details', True):
# Topics to look up data for
topic_id_list = []
# Collect all topic IDs whose depth need to be looked up
for item in self.response.payload:
topic_id_list.append(item.id)
# .. query the database to find all the additional data for topics from the list ..
with closing(self.odb.session()) as session:
depth_by_topic = get_gd_depth_topic_list(session, input['cluster_id'], topic_id_list)
sub_count_by_topic = get_topic_sub_count_list(session, input['cluster_id'], topic_id_list)
# .. convert it all to a dict to make it easier to use it ..
depth_by_topic = dict(depth_by_topic)
sub_count_by_topic = dict(sub_count_by_topic)
# .. look up last pub metadata among all the servers ..
last_pub_by_topic:'anydict' = get_last_pub_metadata(self.server, topic_id_list)
# .. now, having collected all the details, go through all the topics again
# .. and assign the metadata found.
for item in self.response.payload:
# .. assign additional data ..
item.current_depth_gd = depth_by_topic.get(item.id) or 0
item.sub_count = sub_count_by_topic.get(item.id) or 0
# .. assign last usage metadata ..
last_data = last_pub_by_topic.get(item.id)
if last_data:
item.last_pub_time = last_data['pub_time']
item.last_pub_has_gd = last_data['has_gd']
item.last_pub_msg_id = last_data['pub_msg_id']
item.last_endpoint_id = last_data['endpoint_id']
item.last_endpoint_name = last_data['endpoint_name']
item.last_pub_server_pid = last_data.get('server_pid')
item.last_pub_server_name = last_data.get('server_name')
# PIDs are integers
if item.last_pub_server_pid:
item.last_pub_server_pid = int(item.last_pub_server_pid) # type: ignore
# ################################################################################################################################
def pre_opaque_attrs_hook(self:'Service', input:'anydict', instance:'PubSubTopic', attrs:'anydict') -> 'None':
if not input.get('hook_service_name'):
if input.get('hook_service_id'):
hook_service_name = self.server.service_store.get_service_name_by_id(input['hook_service_id'])
input['hook_service_name'] = hook_service_name
# ################################################################################################################################
def instance_hook(self:'Service', input:'anydict', instance:'PubSubTopic', attrs:'anydict') -> 'None':
if attrs['is_create_edit']:
# Populate a field that ODB requires even if it is reserved for future use
instance.pub_buffer_size_gd = 0
# Validate if broker hook actually exists
ensure_pubsub_hook_is_valid(self, input, instance, attrs)
# If input hook service is provided by its name,
# turn it into a service ID and assign it to instance.
hook_service_name = input.get('hook_service_name')
if hook_service_name:
hook_service_id = self.server.service_store.get_service_id_by_name(hook_service_name)
instance.hook_service_id = hook_service_id
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class DeleteTopicRequest(Model):
id: intnone
id_list: intlistnone
name: strnone
name_list: strlistnone
pattern: strnone
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class DeleteTopicResponse(Model):
topics_deleted: anylist
# ################################################################################################################################
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = PubSubTopic.name,
# ################################################################################################################################
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
# ################################################################################################################################
class Get(AdminService):
""" Returns a pub/sub topic by its ID.
"""
class SimpleIO:
input_optional:'anytuple' = 'cluster_id', AsIs('id'), 'name'
output_optional:'anytuple' = 'id', 'name', 'is_active', 'is_internal', 'has_gd', 'max_depth_gd', 'max_depth_non_gd', \
'current_depth_gd', Int('limit_retention'), Int('limit_message_expiry'), Int('limit_sub_inactivity'), \
'last_pub_time', 'on_no_subs_pub', 'target_service_name'
def handle(self) -> 'None':
# Local aliases
cluster_id = self.request.input.get('cluster_id') or self.server.cluster_id
topic_id = self.request.input.id
topic_name = self.request.input.name
with closing(self.odb.session()) as session:
topic = pubsub_topic(session, cluster_id, topic_id, topic_name) # type: PubSubTopic
topic['current_depth_gd'] = get_gd_depth_topic(session, cluster_id, topic.id)
# Now, we know that we have this object so we can just make use of its ID
topic_id = topic.id
last_data = get_last_pub_metadata(self.server, [topic_id])
if last_data:
topic['last_pub_time'] = last_data[int(topic_id)]['pub_time']
# Limits were added post-3.2 release
_add_limits(topic)
self.response.payload = topic
# ################################################################################################################################
# ################################################################################################################################
class ClearTopicNonGD(AdminService):
""" Clears a topic from all non-GD messages on current server.
"""
class SimpleIO:
input_required = ('topic_id',)
output_optional = 'status'
def handle(self) -> 'None':
self.pubsub.sync_backlog.clear_topic(self.request.input.topic_id)
self.response.payload.status = 'ok.{}.{}'.format(self.server.name, self.server.pid)
# ################################################################################################################################
# ################################################################################################################################
class Clear(AdminService):
""" Clears a topic from GD and non-GD messages.
"""
class SimpleIO:
input_required = 'id'
input_optional = 'cluster_id'
def handle(self) -> 'None':
# Local aliases
cluster_id = self.request.input.get('cluster_id') or self.server.cluster_id
topic_id = self.request.input.id
with closing(self.odb.session()) as session:
self.logger.info('Clearing topic `%s` (id:%s)', self.pubsub.get_topic_by_id(topic_id).name, topic_id)
# Remove all GD messages
_ = session.query(PubSubMessage).\
filter(PubSubMessage.cluster_id==cluster_id).\
filter(PubSubMessage.topic_id==topic_id).\
delete()
# Remove all references to topic messages from target queues
_ = session.query(PubSubEndpointEnqueuedMessage).\
filter(PubSubEndpointEnqueuedMessage.cluster_id==cluster_id).\
filter(PubSubEndpointEnqueuedMessage.topic_id==topic_id).\
delete()
# Whatever happens with non-GD messsages we can at least delete the GD ones
session.commit()
# Delete non-GD messages for that topic on all servers
_ = self.server.rpc.invoke_all(ClearTopicNonGD.get_name(), {
'topic_id': topic_id,
}, timeout=90)
# ################################################################################################################################
# ################################################################################################################################
class GetPublisherList(AdminService):
""" Returns all publishers that sent at least one message to a given topic.
"""
class SimpleIO:
input_required = 'topic_id'
input_optional = 'cluster_id'
output_required = ('name', 'is_active', 'is_internal', 'pub_pattern_matched')
output_optional:'anytuple' = ('service_id', 'security_id', 'ws_channel_id', 'last_seen', 'last_pub_time', \
AsIs('last_msg_id'), AsIs('last_correl_id'), 'last_in_reply_to', 'service_name', 'sec_name', 'ws_channel_name', \
AsIs('ext_client_id'))
output_repeated = True
def handle(self) -> 'None':
# Type checks
item:'Bunch'
# Local aliases
cluster_id = self.request.input.get('cluster_id') or self.server.cluster_id
response = []
with closing(self.odb.session()) as session:
# Get last pub time for that specific endpoint to this very topic
last_data:'anylist' = pubsub_publishers_for_topic(session, cluster_id, self.request.input.topic_id).all()
for item in last_data:
item.last_seen = datetime_from_ms(cast_('float', item.last_seen))
item.last_pub_time = datetime_from_ms(cast_('float', item.last_pub_time))
response.append(item)
self.response.payload[:] = response
# ################################################################################################################################
# ################################################################################################################################
class GetGDMessageList(AdminService):
""" Returns all GD messages currently in a topic that have not been moved to subscriber queues yet.
"""
_filter_by = PubSubMessage.data_prefix,
class SimpleIO(GetListAdminSIO):
input_required = 'topic_id'
input_optional:'anytuple' = GetListAdminSIO.input_optional + ('cluster_id', 'has_gd')
output_required:'anytuple' = (AsIs('msg_id'), 'pub_time', 'data_prefix_short', 'pub_pattern_matched')
output_optional:'anytuple' = (AsIs('correl_id'), 'in_reply_to', 'size', 'service_id', 'security_id', 'ws_channel_id',
'service_name', 'sec_name', 'ws_channel_name', 'endpoint_id', 'endpoint_name', 'server_pid', 'server_name')
output_repeated = True
# ################################################################################################################################
def get_gd_data(self, session:'SASession') -> 'anylist':
# Local aliases
cluster_id = self.request.input.get('cluster_id') or self.server.cluster_id
return self._search(
pubsub_messages_for_topic, session, cluster_id, self.request.input.topic_id, False)
# ################################################################################################################################
def handle(self) -> 'None':
# Response to produce ..
out = []
# .. collect the data ..
with closing(self.odb.session()) as session:
data = self.get_gd_data(session)
# .. use ISO timestamps ..
for item in data:
# .. work with dicts ..
item = item._asdict()
# .. convert to ISO ..
pub_time = datetime_from_ms(item['pub_time'] * 1000.0)
ext_pub_time = datetime_from_ms(item['ext_pub_time'] * 1000.0) if item['ext_pub_time'] else ''
# .. assign it back ..
item['pub_time'] = pub_time
item['ext_pub_time'] = ext_pub_time
# .. and add it to our response ..
out.append(item)
# .. which we can return now.
self.response.payload[:] = out
# ################################################################################################################################
# ################################################################################################################################
class GetNonGDMessageList(NonGDSearchService):
""" Returns all non-GD messages currently in a topic that have not been moved to subscriber queues yet.
"""
class SimpleIO(AdminSIO):
input_required = ('cluster_id', 'topic_id')
input_optional:'anytuple' = (Bool('paginate'), Int('cur_page'), 'query')
output_required:'anytuple' = (AsIs('_meta'),)
output_optional:'anytuple' = (AsIs('response'),)
response_elem = None
# ################################################################################################################################
def handle(self) -> 'None':
# Local aliases
topic_id = self.request.input.topic_id
# Collects responses from all server processes
reply = self.server.rpc.invoke_all('zato.pubsub.topic.get-server-message-list', {
'topic_id': topic_id,
'query': self.request.input.query,
}, timeout=30)
# Use a util function to produce a paginated response
self.set_non_gd_msg_list_response(reply.data, self.request.input.cur_page)
# ################################################################################################################################
# ################################################################################################################################
class GetServerMessageList(AdminService):
""" Returns a list of in-RAM messages matching input criteria from current server process.
"""
class SimpleIO(AdminSIO):
input_required = ('topic_id',)
input_optional = ('cur_page', 'query', 'paginate')
output_optional:'anytuple' = (Opaque('data'),)
# ################################################################################################################################
def handle(self) -> 'None':
self.response.payload.data = self.pubsub.sync_backlog.get_messages_by_topic_id(
self.request.input.topic_id, True, self.request.input.query)
# ################################################################################################################################
# ################################################################################################################################
class GetInRAMMessageList(AdminService):
""" Returns all in-RAM messages matching input sub_keys. Messages, if there were any, are deleted from RAM.
"""
class SimpleIO:
input_required:'anytuple' = List('sub_key_list')
output_optional:'anytuple' = List('messages')
def handle(self) -> 'None':
out = []
topic_sub_keys:'stranydict' = {}
with closing(self.odb.session()) as session:
for topic_id, sub_key in get_topics_by_sub_keys(session, self.server.cluster_id, self.request.input.sub_key_list):
sub_keys:'strlist' = topic_sub_keys.setdefault(topic_id, [])
sub_keys.append(sub_key)
for topic_id, sub_keys in topic_sub_keys.items():
# This is a dictionary of sub_key -> msg_id -> message data ..
data = self.pubsub.sync_backlog.retrieve_messages_by_sub_keys(
cast_('int', topic_id),
sub_keys,
)
# .. which is why we can extend out directly - sub_keys are always unique
out.extend(data)
self.response.payload.messages = out
# ################################################################################################################################
# ################################################################################################################################
class GetNonGDDepth(AdminService):
""" Returns depth of non-GD messages in the input topic on current server.
"""
class SimpleIO:
input_required = ('topic_name',)
output_optional:'anytuple' = (Int('depth'),)
response_elem = None
def handle(self) -> 'None':
self.response.payload.depth = self.pubsub.get_non_gd_topic_depth(self.request.input.topic_name)
# ################################################################################################################################
# ################################################################################################################################
class CollectNonGDDepth(AdminService):
""" Checks depth of non-GD messages for the input topic on all servers and returns a combined tally.
"""
class SimpleIO:
input_required = ('topic_name',)
output_optional:'anytuple' = (Int('current_depth_non_gd'),)
def handle(self) -> 'None':
reply = self.server.rpc.invoke_all('zato.pubsub.topic.get-non-gd-depth', {
'topic_name':self.request.input.topic_name
}, timeout=10)
total = 0
for response in reply.data:
total += response['depth']
self.response.payload.current_depth_non_gd = total
# ################################################################################################################################
# ################################################################################################################################
class GetTopicMetadata(AdminService):
def handle(self) -> 'None':
# All the topic IDs we need to find our in-RAM metadata for
topic_id_list = self.request.raw_request['topic_id_list']
# Construct keys to look up topic metadata for ..
topic_key_list = (
_format_meta_topic_key(self.server.cluster_id, topic_id) for topic_id in topic_id_list
)
# .. look up keys in RAM ..
result = self.server.pub_sub_metadata.get_many(topic_key_list)
self.logger.info('Returning topic metadata -> %s', result)
if result:
self.response.payload = result
# ################################################################################################################################
# ################################################################################################################################
| 26,101
|
Python
|
.py
| 416
| 55.074519
| 130
| 0.476174
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,363
|
search.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/search.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.common.api import SEARCH
from zato.common.util.search import SearchResults
from zato.common.util.time_ import datetime_from_ms
from zato.server.service.internal import AdminService
# ################################################################################################################################
# ################################################################################################################################
_page_size = SEARCH.ZATO.DEFAULTS.PAGE_SIZE
# ################################################################################################################################
# ################################################################################################################################
class NonGDSearchService(AdminService):
""" A base class for services that produce a list of paginated non-GD messages.
"""
def _post_process_msg_list(self, msg_list) -> 'None':
for msg in msg_list:
# Convert float timestamps in all the remaining messages to ISO-8601
msg['pub_time'] = datetime_from_ms(msg['pub_time'] * 1000.0)
if msg.get('expiration_time'):
msg['expiration_time'] = datetime_from_ms(msg['expiration_time'] * 1000.0)
# Return endpoint information in the same format GD messages are returned in
msg['endpoint_id'] = msg.pop('published_by_id')
msg['endpoint_name'] = self.pubsub.get_endpoint_by_id(msg['endpoint_id']).name
# ################################################################################################################################
def set_non_gd_msg_list_response(self, msg_list, cur_page) -> 'None':
""" Paginates a list of non-GD messages (from topics or queues) and returns results.
"""
# Build the results metadata
search_results = SearchResults.from_list(
msg_list, cur_page, _page_size, needs_sort=True, post_process_func=self._post_process_msg_list)
# This goes to the service's response payload object ..
self.response.payload.response = msg_list
# .. and this is metadata so it goes to _meta.
self.response.payload._meta = search_results.to_dict()
# ################################################################################################################################
# ################################################################################################################################
| 2,679
|
Python
|
.py
| 40
| 60.9
| 130
| 0.441692
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,364
|
endpoint.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/endpoint.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from copy import deepcopy
from json import loads
# SQLAlchemy
from sqlalchemy import delete
# Zato
from zato.common.api import PUBSUB as COMMON_PUBSUB
from zato.common.broker_message import PUBSUB
from zato.common.exception import BadRequest, Conflict
from zato.common.odb.model import PubSubEndpoint, PubSubEndpointEnqueuedMessage, PubSubMessage, PubSubSubscription, PubSubTopic
from zato.common.odb.query import count, pubsub_endpoint, pubsub_endpoint_list, pubsub_endpoint_queue, \
pubsub_messages_for_queue, pubsub_messages_for_queue_raw, server_by_id
from zato.common.odb.query.pubsub.endpoint import pubsub_endpoint_summary, pubsub_endpoint_summary_list
from zato.common.odb.query.pubsub.subscription import pubsub_subscription_list_by_endpoint_id
from zato.common.pubsub import ensure_subs_exist, msg_pub_attrs
from zato.common.simpleio_ import drop_sio_elems
from zato.common.typing_ import cast_
from zato.common.util.pubsub import get_endpoint_metadata, get_topic_sub_keys_from_sub_keys, make_short_msg_copy_from_msg
from zato.common.util.time_ import datetime_from_ms
from zato.server.service import AsIs, Bool, Int, List
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
from zato.server.service.internal.pubsub import common_sub_data
from zato.server.service.internal.pubsub.search import NonGDSearchService
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import unicode
from six import add_metaclass
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from sqlalchemy import Column
from sqlalchemy.orm.session import Session as SASession
from zato.common.typing_ import any_, anylist, intnone, strdict
from zato.server.connection.server.rpc.invoker import PerPIDResponse, ServerInvocationResult
from zato.server.pubsub.model import subnone
from zato.server.service import Service
Bunch = Bunch
Column = Column
PerPIDResponse = PerPIDResponse
ServerInvocationResult = ServerInvocationResult
Service = Service
subnone = subnone
# ################################################################################################################################
# ################################################################################################################################
elem = 'pubsub_endpoint'
model = PubSubEndpoint
label = 'a pub/sub endpoint'
get_list_docs = 'pub/sub endpoints'
broker_message = PUBSUB
broker_message_prefix = 'ENDPOINT_'
list_func = pubsub_endpoint_list
skip_input_params = ['sub_key', 'is_sub_allowed']
input_optional_extra = ['service_name']
output_optional_extra = ['service_name', 'ws_channel_name', 'sec_id', 'sec_type', 'sec_name', 'sub_key', 'endpoint_type_name']
delete_require_instance = False
SubTable = PubSubSubscription.__table__
# ################################################################################################################################
msg_pub_attrs_sio = []
for name in msg_pub_attrs:
if name in ('topic', 'is_in_sub_queue', 'position_in_group', 'group_id'):
continue
elif name.endswith('_id'):
msg_pub_attrs_sio.append(AsIs(name))
elif name in ('position_in_group', 'priority', 'size', 'delivery_count', 'expiration'):
msg_pub_attrs_sio.append(Int(name))
elif name.startswith(('has_', 'is_')):
msg_pub_attrs_sio.append(Bool(name))
else:
msg_pub_attrs_sio.append(name)
# ################################################################################################################################
_queue_type=COMMON_PUBSUB.QUEUE_TYPE
_meta_endpoint_key = COMMON_PUBSUB.REDIS.META_ENDPOINT_PUB_KEY
# ################################################################################################################################
_sub_skip_update = ('id', 'sub_id', 'sub_key', 'cluster_id', 'creation_time', 'current_depth', 'endpoint_id', 'endpoint_type',
'last_interaction_time', 'staging_depth', 'sql_ws_client_id', 'topic_name', 'total_depth', 'web_socket',
'out_rest_http_soap_id', 'out_soap_http_soap_id', 'out_http_soap_id')
# ################################################################################################################################
# ################################################################################################################################
class _GetEndpointQueueMessagesSIO(GetListAdminSIO):
input_required = ('cluster_id',)
input_optional = GetListAdminSIO.input_optional + ('sub_id', 'sub_key')
output_required = (AsIs('msg_id'), 'recv_time')
output_optional = ('data_prefix_short', Int('delivery_count'), 'last_delivery_time', 'is_in_staging', 'queue_name',
'endpoint_id', 'sub_key', 'published_by_id', 'published_by_name', 'server_name', 'server_pid')
output_repeated = True
# ################################################################################################################################
# ################################################################################################################################
def _get_security_id_from_input(self:'Service', input:'strdict') -> 'intnone':
if input.get('security_name') == 'zato-no-security':
return
# If we have a security name on input, we need to turn it into its ID ..
if security_name := input.get('security_name'):
security_name = security_name.strip()
security = self.server.worker_store.basic_auth_get(security_name)
security = security['config']
security_id:'int' = security['id']
# .. otherwise, we use a service ID as it is.
else:
security_id = self.request.input.get('security_id')
return security_id
# ################################################################################################################################
# ################################################################################################################################
def _get_service_id_from_input(self:'Service', input:'strdict') -> 'intnone':
# If we have a service name on input, we need to turn it into its ID ..
if service_name := input.get('service_name'):
try:
service_name = service_name.strip()
service_id = self.server.service_store.get_service_id_by_name(service_name)
except KeyError:
return
# .. otherwise, we use a service ID as it is.
else:
service_id = self.request.input.get('service_id')
return service_id
# ################################################################################################################################
# ################################################################################################################################
def instance_hook(self:'Service', input:'strdict', instance:'PubSubEndpoint', attrs:'strdict') -> 'None':
if attrs['is_delete']:
return
# These can be given as ID or name and we need to extract the correct values here
service_id = _get_service_id_from_input(self, input)
security_id = _get_security_id_from_input(self, input)
instance.service_id = service_id
instance.security_id = security_id
# Don't use empty string with integer attributes, set them to None (NULL) instead
if cast_('str', service_id) == '':
instance.service_id = None
if cast_('str', security_id) == '':
instance.security_id = None
# SQLite will not accept empty strings, must be None
instance.last_seen = instance.last_seen or None
instance.last_pub_time = instance.last_pub_time or None
instance.last_sub_time = instance.last_sub_time or None
instance.last_deliv_time = instance.last_deliv_time or None
# ################################################################################################################################
def response_hook(
self:'Service',
input:'any_',
instance:'any_',
attrs:'any_',
service_type:'str',
) -> 'None':
if service_type == 'create_edit':
_ = self.pubsub.wait_for_endpoint(input['name'])
elif service_type == 'get_list':
# We are going to check topics for each of these endpoint IDs ..
endpoint_id_list = []
# .. go through every endpoint found ..
for item in self.response.payload:
# .. append its ID for later use ..
endpoint_id_list.append(item.id)
# .. we have all the IDs now and we can check their topics ..
topic_service = 'zato.pubsub.subscription.get-list'
topic_response = self.invoke(topic_service, endpoint_id_list=endpoint_id_list)
# .. top-level response that we are returning ..
response = self.response.payload.getvalue()
response = loads(response)
response = response['zato_pubsub_endpoint_get_list_response']
# .. first, add the required key to all the endpoints ..
for item in response:
item['topic_list'] = []
# .. now, go through the items once more and populate topics for each endpoint ..
for item in response:
for topic_dict in topic_response:
if item['id'] == topic_dict['endpoint_id']:
topic_name = topic_dict['topic_name']
item['topic_list'].append(topic_name)
# ################################################################################################################################
def broker_message_hook(
self:'Service',
input:'strdict',
instance:'PubSubEndpoint',
attrs:'strdict',
service_type:'str'
) -> 'None':
if service_type == 'create_edit':
with closing(self.odb.session()) as session:
input['is_internal'] = pubsub_endpoint(session, input['cluster_id'], instance.id).is_internal
# ################################################################################################################################
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = PubSubEndpoint.name,
# ################################################################################################################################
# ################################################################################################################################
class Create(AdminService):
""" Creates a new pub/sub endpoint.
"""
class SimpleIO(AdminSIO):
input_required = ('name', 'role', 'is_active', 'is_internal', 'endpoint_type')
input_optional = ('cluster_id', 'topic_patterns', 'security_id', 'security_name', 'service_id', 'service_name', \
'ws_channel_id')
output_required = (AsIs('id'), 'name')
request_elem = 'zato_pubsub_endpoint_create_request'
response_elem = 'zato_pubsub_endpoint_create_response'
default_value = None
def handle(self):
input = self.request.input
cluster_id = input.get('cluster_id') or self.server.cluster_id
security_id = _get_security_id_from_input(self, self.request.input)
service_id = _get_service_id_from_input(self, self.request.input)
# If we had a name of a service on input but there is no ID for it, it means that that the name was invalid.
if service_name := input.get('service_name'):
if not service_id:
raise BadRequest(self.cid, f'No such service -> {service_name}')
# Services have a fixed role and patterns ..
if input.endpoint_type == COMMON_PUBSUB.ENDPOINT_TYPE.SERVICE.id:
role = COMMON_PUBSUB.ROLE.PUBLISHER_SUBSCRIBER.id
topic_patterns = COMMON_PUBSUB.DEFAULT.Topic_Patterns_All
else:
role = input.role
topic_patterns = input.topic_patterns
# Populate it back so that we can reuse the same input object
# when we publish a broker message.
input.role = role
input.topic_patterns = topic_patterns
with closing(self.odb.session()) as session:
existing_one = session.query(PubSubEndpoint.id).\
filter(PubSubEndpoint.cluster_id==cluster_id).\
filter(PubSubEndpoint.name==input.name).\
first()
# Names must be unique
if existing_one:
raise Conflict(self.cid, 'Endpoint `{}` already exists'.format(input.name))
# Services cannot be assigned to more than one endpoint
if service_id:
try:
endpoint_id = self.pubsub.get_endpoint_id_by_service_id(service_id)
except KeyError:
pass
else:
endpoint = self.pubsub.get_endpoint_by_id(endpoint_id)
service_name = self.server.service_store.get_service_name_by_id(service_id)
msg = f'Service {service_name} is already assigned to endpoint {endpoint.name}'
raise Conflict(self.cid, msg)
# Security definitions cannot be assigned to more than one endpoint
if security_id:
try:
endpoint_id = self.pubsub.get_endpoint_id_by_sec_id(security_id)
except KeyError:
pass
else:
endpoint = self.pubsub.get_endpoint_by_id(endpoint_id)
security = self.server.worker_store.basic_auth_get_by_id(security_id)
security_name:'str' = security['name']
msg = f'Security definition {security_name} is already assigned to endpoint {endpoint.name}'
raise Conflict(self.cid, msg)
endpoint = PubSubEndpoint()
endpoint.cluster_id = cluster_id # type: ignore
endpoint.name = input.name
endpoint.is_active = input.is_active
endpoint.is_internal = input.is_internal
endpoint.endpoint_type = input.endpoint_type
endpoint.role = input.role
endpoint.topic_patterns = input.topic_patterns
endpoint.security_id = security_id
endpoint.service_id = service_id
endpoint.ws_channel_id = input.get('ws_channel_id')
session.add(endpoint)
session.commit()
input.action = PUBSUB.ENDPOINT_CREATE.value
input.id = endpoint.id
self.broker_client.publish(input)
self.response.payload.id = endpoint.id
self.response.payload.name = self.request.input.name
_ = self.pubsub.wait_for_endpoint(input.name)
# ################################################################################################################################
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
# ################################################################################################################################
class Get(AdminService):
""" Returns details of a pub/sub endpoint.
"""
class SimpleIO:
input_required = ('cluster_id', AsIs('id'))
output_required = ('id', 'name', 'is_active', 'is_internal', 'role', 'endpoint_type')
output_optional = ('tags', 'topic_patterns', 'pub_tag_patterns', 'message_tag_patterns',
'security_id', 'ws_channel_id', 'sec_type', 'sec_name', 'ws_channel_name', 'sub_key',
'service_id', 'service_name', AsIs('topic_list'))
def handle(self):
# Local variables
cluster_id = self.request.input.cluster_id
endpoint_id = self.request.input.id
# Connect to the database ..
with closing(self.odb.session()) as session:
# .. get basic information about this endpoint ..
self.response.payload = pubsub_endpoint(session, self.request.input.cluster_id, self.request.input.id)
# .. get a list of topics this endpoint is subscribed to ..
request = {'cluster_id':cluster_id, 'endpoint_id':endpoint_id, 'sql_session':session}
topic_service = 'zato.pubsub.subscription.get-list'
topic_list = self.invoke(topic_service, request)
self.response.payload.topic_list = topic_list
# ################################################################################################################################
# ################################################################################################################################
class GetTopicList(AdminService):
""" Returns all topics to which a given endpoint published at least once.
"""
class SimpleIO(AdminSIO):
input_required = ('cluster_id', 'endpoint_id')
output_required = ('topic_id', 'topic_name', 'pub_time', AsIs('pub_msg_id'), 'pub_pattern_matched', 'has_gd', 'data')
output_optional = (AsIs('pub_correl_id'), 'in_reply_to', AsIs('ext_client_id'), 'ext_pub_time')
output_repeated = True
# ################################################################################################################################
def handle(self):
self.response.payload[:] = get_endpoint_metadata(self.server, self.request.input.endpoint_id)
# ################################################################################################################################
# ################################################################################################################################
class GetEndpointQueueNonGDDepth(AdminService):
""" Returns current depth of non-GD messages for input sub_key which must have a delivery task on current server.
"""
class SimpleIO(AdminSIO):
input_required = 'sub_key'
output_optional = Int('current_depth_non_gd')
def handle(self):
if pubsub_tool := self.pubsub.get_pubsub_tool_by_sub_key(self.request.input.sub_key):
_, non_gd_depth = pubsub_tool.get_queue_depth(self.request.input.sub_key)
self.response.payload.current_depth_non_gd = non_gd_depth
# ################################################################################################################################
# ################################################################################################################################
class _GetEndpointQueue(AdminService):
def _add_queue_depths(self, session:'SASession', item:'strdict') -> 'None':
cluster_id = self.request.input.cluster_id
sub_key = item['sub_key']
current_depth_gd_q = pubsub_messages_for_queue_raw(session, cluster_id, sub_key, skip_delivered=True)
# This could be read from the SQL database ..
item['current_depth_gd'] = count(session, current_depth_gd_q)
# .. but non-GD depth needs to be collected from all the servers around. Note that the server may not be known
# in case the subscriber is a WSX client. In this case, by definition, there will be no non-GD messages for that client.
sk_server = self.pubsub.get_delivery_server_by_sub_key(item['sub_key'])
if sk_server:
if sk_server.server_name == self.server.name and sk_server.server_pid == self.server.pid:
if pubsub_tool := self.pubsub.get_pubsub_tool_by_sub_key(item['sub_key']):
_, current_depth_non_gd = pubsub_tool.get_queue_depth(item['sub_key'])
else:
# An invoker pointing to that server
invoker = self.server.rpc.get_invoker_by_server_name(sk_server.server_name)
# The service we are invoking
service_name = GetEndpointQueueNonGDDepth.get_name()
# Inquire the server about our sub_key
request = {
'sub_key': item['sub_key'],
}
# Keyword arguments point to a specific PID in that server
kwargs = {
'pid': sk_server.server_pid
}
# Do invoke the server now
response = invoker.invoke(service_name, request, **kwargs)
self.logger.info('*' * 50)
self.logger.warn('Invoker -> %s', invoker)
self.logger.warn('RESPONSE -> %s', response)
self.logger.info('*' * 50)
"""
'''
pid_data = response['response']
if pid_data:
pid_data = cast_('anydict', pid_data)
current_depth_non_gd = pid_data['current_depth_non_gd']
else:
current_depth_non_gd = 0
# No delivery server = there cannot be any non-GD messages waiting for that subscriber
else:
current_depth_non_gd = 0
'''
"""
# item['current_depth_non_gd'] = current_depth_non_gd
item['current_depth_non_gd'] = 0
# ################################################################################################################################
# ################################################################################################################################
class GetEndpointQueue(_GetEndpointQueue):
""" Returns information describing an individual endpoint queue.
"""
class SimpleIO(AdminSIO):
input_required = ('cluster_id', 'id')
output_optional = common_sub_data
def handle(self) -> 'None':
with closing(self.odb.session()) as session:
item = pubsub_endpoint_queue(session, self.request.input.cluster_id, self.request.input.id)
item.creation_time = datetime_from_ms(item.creation_time * 1000.0)
if getattr(item, 'last_interaction_time', None):
item.last_interaction_time = datetime_from_ms(item.last_interaction_time * 1000.0)
self.response.payload = item
self._add_queue_depths(session, self.response.payload)
# ################################################################################################################################
# ################################################################################################################################
class GetEndpointQueueList(_GetEndpointQueue):
""" Returns all queues to which a given endpoint is subscribed.
"""
_filter_by = PubSubTopic.name, PubSubSubscription.sub_key
class SimpleIO(GetListAdminSIO):
input_required = ('cluster_id', 'endpoint_id')
output_optional = common_sub_data
output_repeated = True
request_elem = 'zato_pubsub_endpoint_get_endpoint_queue_list_request'
response_elem = 'zato_pubsub_endpoint_get_endpoint_queue_list_response'
def get_data(self, session:'SASession') -> 'anylist':
return self._search(pubsub_subscription_list_by_endpoint_id, session, self.request.input.cluster_id,
self.request.input.endpoint_id, False)
def handle(self) -> 'None':
response = []
with closing(self.odb.session()) as session:
for item in self.get_data(session):
item = item.get_value()
self._add_queue_depths(session, item)
item['creation_time'] = datetime_from_ms(item['creation_time'] * 1000.0)
if item['last_interaction_time']:
item['last_interaction_time'] = datetime_from_ms(item['last_interaction_time'] * 1000.0)
if item['last_interaction_details']:
if not isinstance(item['last_interaction_details'], unicode):
item['last_interaction_details'] = item['last_interaction_details'].decode('utf8')
response.append(item)
self.response.payload[:] = response
# ################################################################################################################################
# ################################################################################################################################
class UpdateEndpointQueue(AdminService):
""" Modifies selected subscription queue.
"""
class SimpleIO(AdminSIO):
input_required = ('cluster_id', 'id', 'sub_key', 'active_status')
input_optional = drop_sio_elems(common_sub_data, 'active_status', 'sub_key', 'creation_time', 'last_interaction_time')
output_required = ('id', 'name')
def handle(self) -> 'None':
# REST and SOAP outconn IDs have different input names but they both map
# to the same SQL-level attribute. This means that at most one of them may be
# provided on input. It's an error to provide both.
out_rest_http_soap_id = self.request.input.get('out_rest_http_soap_id')
out_soap_http_soap_id = self.request.input.get('out_soap_http_soap_id')
if out_rest_http_soap_id and out_soap_http_soap_id:
raise BadRequest(self.cid, 'Cannot provide both out_rest_http_soap_id and out_soap_http_soap_id on input')
should_update_delivery_server = self.request.input.endpoint_type not in {
# WebSockets clients dynamically attach to delivery servers hence the servers cannot be updated by users
COMMON_PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id,
# Services are always invoked in the same server
COMMON_PUBSUB.ENDPOINT_TYPE.SERVICE.id,
}
# We know we don't have both out_rest_http_soap_id and out_soap_http_soap_id on input
# but we still need to find out if we have any at all.
if out_rest_http_soap_id:
out_http_soap_id = out_rest_http_soap_id
elif out_soap_http_soap_id:
out_http_soap_id = out_soap_http_soap_id
else:
out_http_soap_id = None
with closing(self.odb.session()) as session:
item = session.query(PubSubSubscription).\
filter(PubSubSubscription.id==self.request.input.id).\
filter(PubSubSubscription.cluster_id==self.request.input.cluster_id).\
one()
if should_update_delivery_server:
old_delivery_server_id = item.server_id
new_delivery_server_id = self.request.input.server_id
if new_delivery_server_id:
new_delivery_server_name = server_by_id(session, self.server.cluster_id, new_delivery_server_id).name
else:
new_delivery_server_name = None
else:
# These are added purely for static type hints
old_delivery_server_id = -1
new_delivery_server_id = -1
new_delivery_server_name = 'new-delivery-server-name'
for key, value in sorted(self.request.input.items()):
if key not in _sub_skip_update:
if isinstance(value, bytes):
value = value.decode('utf8')
if value is not None:
setattr(item, key, value)
# This one we set manually based on the logic at the top of the method
item.out_http_soap_id = out_http_soap_id
session.add(item)
session.commit()
self.response.payload.id = self.request.input.id
self.response.payload.name = item.topic.name
# Notify all processes, including our own, that this subscription's parameters have changed
updated_params_msg:'strdict' = item.asdict()
# Remove bytes objects from what we are about to publish - they had to be used
# in SQL messages but not here.
for key, value in deepcopy(updated_params_msg).items():
if isinstance(value, bytes):
updated_params_msg[key] = value.decode('utf8')
updated_params_msg['action'] = PUBSUB.SUBSCRIPTION_EDIT.value
self.broker_client.publish(updated_params_msg)
# We change the delivery server in background - note how we send name, not ID, on input.
# This is because our invocation target will want to use
# self.server.rpc.get_invoker_by_server_name(server_name).invoke(...)
if should_update_delivery_server:
if old_delivery_server_id != new_delivery_server_id:
self.broker_client.publish({
'sub_key': self.request.input.sub_key,
'endpoint_type': item.endpoint.endpoint_type,
'old_delivery_server_id': old_delivery_server_id,
'new_delivery_server_name': new_delivery_server_name,
'action': PUBSUB.DELIVERY_SERVER_CHANGE.value,
})
# ################################################################################################################################
# ################################################################################################################################
class ClearEndpointQueue(AdminService):
""" Clears messages from the queue given on input.
"""
class SimpleIO(AdminSIO):
input_required = ('cluster_id', 'sub_key')
input_optional = ('queue_type',)
def handle(self) -> 'None':
# Make sure the (optional) queue type is one of the allowed values
if queue_type := self.request.input.queue_type:
if queue_type not in _queue_type:
raise BadRequest(self.cid, 'Invalid queue_type:`{}`'.format(queue_type))
else:
if queue_type == _queue_type.CURRENT:
is_in_staging = False
elif queue_type == _queue_type.STAGING:
is_in_staging = True
else:
is_in_staging = False
else:
is_in_staging = None
# Remove all references to the queue given on input
with closing(self.odb.session()) as session:
q = session.query(PubSubEndpointEnqueuedMessage).\
filter(PubSubEndpointEnqueuedMessage.cluster_id==self.request.input.cluster_id).\
filter(PubSubEndpointEnqueuedMessage.sub_key==self.request.input.sub_key)
if is_in_staging is not None:
q = q.filter(cast_('Column', PubSubEndpointEnqueuedMessage.is_in_staging).is_(is_in_staging))
_ = q.delete()
session.commit()
# Notify delivery tasks that
self.broker_client.publish({
'sub_key': self.request.input.sub_key,
'action': PUBSUB.QUEUE_CLEAR.value,
})
# ################################################################################################################################
# ################################################################################################################################
class DeleteEndpointQueue(AdminService):
""" Deletes input message queues for a subscriber based on sub_keys - including all messages
and their parent subscription object.
"""
class SimpleIO(AdminSIO):
input_optional = ('cluster_id', 'sub_key', List('sub_key_list'))
def handle(self) -> 'None':
sub_key = self.request.input.sub_key
sub_key_list = self.request.input.sub_key_list
if not(sub_key or sub_key_list):
raise BadRequest(self.cid, 'Exactly one of sub_key or sub_key_list is required')
if sub_key and sub_key_list:
raise BadRequest(self.cid, 'Cannot provide both sub_key and sub_key_list on input')
if sub_key:
sub_key_list = [sub_key] # Otherwise, we already had sub_key_list on input so 'else' is not needed
cluster_id = self.request.input.get('cluster_id') or self.server.cluster_id
with closing(self.odb.session()) as session:
# First we need a list of topics to which sub_keys were related - required by broker messages.
topic_sub_keys = get_topic_sub_keys_from_sub_keys(session, cluster_id, sub_key_list)
# .. log what we are about to do ..
self.logger.info('Deleting subscriptions `%s`', topic_sub_keys)
# .. delete all subscriptions from the sub_key list ..
_:'any_' = session.execute(
delete(SubTable).\
where(
SubTable.c.sub_key.in_(sub_key_list),
)
)
# .. and commit the changes permanently.
session.commit()
# Notify workers about deleted subscription(s)
self.broker_client.publish({
'topic_sub_keys': topic_sub_keys,
'action': PUBSUB.SUBSCRIPTION_DELETE.value,
})
# ################################################################################################################################
# ################################################################################################################################
class _GetMessagesBase:
def _get_sub_by_sub_input(
self:'Service', # type: ignore
input:'Bunch'
) -> 'subnone':
if input.get('sub_id'):
return self.pubsub.get_subscription_by_id(input.sub_id)
elif input.get('sub_key'):
return self.pubsub.get_subscription_by_sub_key(input.sub_key)
else:
raise Exception('Either sub_id or sub_key must be given on input')
# ################################################################################################################################
# ################################################################################################################################
class GetEndpointQueueMessagesGD(AdminService, _GetMessagesBase):
""" Returns a list of GD messages queued up for input subscription.
"""
_filter_by = PubSubMessage.data_prefix,
SimpleIO = _GetEndpointQueueMessagesSIO # type: ignore
def get_data(self, session:'SASession') -> 'anylist':
input = self.request.input
sub = self._get_sub_by_sub_input(input)
if not sub:
self.logger.info('Could not find subscription by input `%s` (#1)', input)
return []
return self._search(
pubsub_messages_for_queue, session, self.request.input.cluster_id, sub.sub_key, True, False)
def handle(self) -> 'None':
with closing(self.odb.session()) as session:
self.response.payload[:] = [elem.get_value() for elem in self.get_data(session)]
for item in self.response.payload:
item['recv_time'] = datetime_from_ms(item['recv_time'] * 1000.0)
item['published_by_name'] = self.pubsub.get_endpoint_by_id(item['published_by_id']).name
# ################################################################################################################################
# ################################################################################################################################
class GetServerEndpointQueueMessagesNonGD(AdminService):
""" Returns a list of non-GD messages for an input queue by its sub_key which must exist on current server,
i.e. current server must be the delivery server for this sub_key.
"""
SimpleIO = _GetEndpointQueueMessagesSIO # type: ignore
def handle(self) -> 'None':
data_prefix_len = self.pubsub.data_prefix_len
data_prefix_short_len = self.pubsub.data_prefix_short_len
if ps_tool := self.pubsub.get_pubsub_tool_by_sub_key(self.request.input.sub_key):
messages = ps_tool.get_messages(self.request.input.sub_key, False)
self.response.payload[:] = [
make_short_msg_copy_from_msg(elem, data_prefix_len, data_prefix_short_len) for elem in messages]
for elem in self.response.payload:
elem['recv_time'] = datetime_from_ms(elem['recv_time'] * 1000.0)
elem['published_by_name'] = self.pubsub.get_endpoint_by_id(elem['published_by_id']).name
# ################################################################################################################################
# ################################################################################################################################
class GetEndpointQueueMessagesNonGD(NonGDSearchService, _GetMessagesBase):
""" Returns a list of non-GD messages for an input queue by its sub_key.
"""
SimpleIO = _GetEndpointQueueMessagesSIO # type: ignore
def handle(self) -> 'None':
input = self.request.input
sub = self._get_sub_by_sub_input(input)
if not sub:
self.logger.info('Could not find subscription by input `%s` (#2)', input)
return
sk_server = self.pubsub.get_delivery_server_by_sub_key(sub.sub_key)
if sk_server:
invoker = self.server.rpc.get_invoker_by_server_name(sk_server.server_name)
response = invoker.invoke(GetServerEndpointQueueMessagesNonGD.get_name(), {
'cluster_id': self.request.input.cluster_id,
'sub_key': sub.sub_key,
}, pid=sk_server.server_pid)
if response:
self.response.payload[:] = reversed(response['response'])
# ################################################################################################################################
# ################################################################################################################################
class _GetEndpointSummaryBase(AdminService):
""" Base class for services returning summaries about endpoints
"""
class SimpleIO:
input_required = ('cluster_id',)
input_optional = ('topic_id',)
output_required = ('id', 'endpoint_name', 'endpoint_type', 'subscription_count', 'is_active', 'is_internal')
output_optional = ['security_id', 'sec_type', 'sec_name', 'ws_channel_id', 'ws_channel_name',
'service_id', 'service_name', 'last_seen', 'last_deliv_time', 'role', 'endpoint_type_name'] + \
drop_sio_elems(common_sub_data, 'endpoint_name', 'endpoint_type', 'is_internal')
# ################################################################################################################################
# ################################################################################################################################
class GetEndpointSummary(_GetEndpointSummaryBase):
""" Returns summarized information about a selected endpoint subscribed to topics.
"""
class SimpleIO(_GetEndpointSummaryBase.SimpleIO):
input_required = _GetEndpointSummaryBase.SimpleIO.input_required + ('endpoint_id',)
request_elem = 'zato_pubsub_subscription_get_endpoint_summary_request'
response_elem = 'zato_pubsub_subscription_get_endpoint_summary_response'
def handle(self) -> 'None':
with closing(self.odb.session()) as session:
item = pubsub_endpoint_summary(session, self.server.cluster_id, self.request.input.endpoint_id)
item = item._asdict()
if item['last_seen']:
item['last_seen'] = datetime_from_ms(item['last_seen'])
if item['last_deliv_time']:
item['last_deliv_time'] = datetime_from_ms(item['last_deliv_time'])
item['endpoint_type_name'] = COMMON_PUBSUB.ENDPOINT_TYPE.get_name_by_type(item['endpoint_type'])
self.response.payload = item
# ################################################################################################################################
# ################################################################################################################################
class GetEndpointSummaryList(_GetEndpointSummaryBase):
""" Returns summarized information about all endpoints subscribed to topics.
"""
_filter_by = PubSubEndpoint.name,
class SimpleIO(_GetEndpointSummaryBase.SimpleIO, GetListAdminSIO):
request_elem = 'zato_pubsub_endpoint_get_endpoint_summary_list_request'
response_elem = 'zato_pubsub_endpoint_get_endpoint_summary_list_response'
def get_data(self, session:'SASession') -> 'anylist':
# This will be a list of dictionaries that we return
out = []
# These are SQL rows
result = self._search(pubsub_endpoint_summary_list, session, self.request.input.cluster_id,
self.request.input.get('topic_id') or None, False)
for item in result:
item = item._asdict()
if item['last_seen']:
item['last_seen'] = datetime_from_ms(item['last_seen'])
if item['last_deliv_time']:
item['last_deliv_time'] = datetime_from_ms(item['last_deliv_time'])
item['endpoint_type_name'] = COMMON_PUBSUB.ENDPOINT_TYPE.get_name_by_type(item['endpoint_type'])
out.append(item)
return out
def handle(self):
with closing(self.odb.session()) as session:
self.response.payload[:] = self.get_data(session)
# ################################################################################################################################
# ################################################################################################################################
class GetTopicSubList(AdminService):
""" Returns a list of topics to which a given endpoint has access for subscription,
including both endpoints that it's already subscribed to or all the remaining ones
the endpoint may be possible subscribe to.
"""
class SimpleIO(AdminSIO):
input_required = ('endpoint_id', 'cluster_id')
input_optional = ('topic_filter_by',)
output_optional = (List('topic_sub_list'),)
def handle(self) -> 'None':
# Local shortcuts
endpoint_id = self.request.input.endpoint_id
filter_by = self.request.input.topic_filter_by
# Response to produce
out = []
# For all topics this endpoint may in theory subscribe to ..
for topic in self.pubsub.get_sub_topics_for_endpoint(endpoint_id):
if filter_by and (filter_by not in topic.name):
continue
# .. add each of them, along with information if the endpoint is already subscribed.
out.append({
'cluster_id': self.request.input.cluster_id,
'endpoint_id': endpoint_id,
'topic_id': topic.id,
'topic_name': topic.name,
'is_subscribed': self.pubsub.is_subscribed_to(endpoint_id, topic.name)
})
self.response.payload.topic_sub_list = out
# ################################################################################################################################
# ################################################################################################################################
class GetServerDeliveryMessages(AdminService):
""" Returns a list of messages to be delivered to input endpoint. The messages must exist on current server.
"""
class SimpleIO(AdminSIO):
input_required = 'sub_key'
output_optional = List('msg_list')
def handle(self) -> 'None':
if ps_tool := self.pubsub.get_pubsub_tool_by_sub_key(self.request.input.sub_key):
self.response.payload.msg_list = ps_tool.pull_messages(self.request.input.sub_key)
# ################################################################################################################################
# ################################################################################################################################
class GetDeliveryMessages(AdminService, _GetMessagesBase):
""" Returns a list of messages to be delivered to input endpoint.
"""
class SimpleIO(AdminSIO):
input_required = ('cluster_id', 'sub_key')
output_optional = msg_pub_attrs_sio
output_repeated = True
skip_empty_keys = True
default_value = None
def handle(self) -> 'None':
input = self.request.input
sub = self._get_sub_by_sub_input(input)
if not sub:
self.logger.info('Could not find subscription by input `%s` (#3)', input)
return
sk_server = self.pubsub.get_delivery_server_by_sub_key(sub.sub_key)
if sk_server:
invoker = self.server.rpc.get_invoker_by_server_name(sk_server.server_name)
response = invoker.invoke(GetServerDeliveryMessages.get_name(), {
'sub_key': sub.sub_key,
}, pid=sk_server.server_pid)
if response:
# It may be a dict on a successful invocation ..
if isinstance(response, dict):
data = response # type: ignore
data = data['response'] # type: ignore
# .. otherwise, it may be an IPCResponse object.
else:
data = response.data # type: ignore
# Extract the actual list of messages ..
data = cast_('strdict', data)
msg_list = data['msg_list']
msg_list = reversed(msg_list)
msg_list = list(msg_list)
# .. at this point the topic may have been already deleted ..
try:
topic = self.pubsub.get_topic_by_sub_key(sub.sub_key)
topic_name = topic.name
except KeyError:
self.logger.info('Could not find topic by sk `%s`', sub.sub_key)
topic_name = '(None)'
# .. make sure that all of the sub_keys actually still exist ..
with closing(self.odb.session()) as session:
msg_list = ensure_subs_exist(
session,
topic_name,
msg_list,
msg_list,
'returning to endpoint',
'<no-ctx-string>'
)
self.response.payload[:] = msg_list
else:
self.logger.info('Could not find delivery server for sub_key:`%s`', sub.sub_key)
# ################################################################################################################################
# ################################################################################################################################
class GetEndpointMetadata(AdminService):
""" An invoker making use of the API that Redis-based communication used to use.
"""
def handle(self) -> 'None':
# Local aliases
endpoint_id = self.request.raw_request['endpoint_id']
# Build a full key to look up data by ..
endpoint_key = _meta_endpoint_key % (self.server.cluster_id, endpoint_id)
# .. get the data ..
topic_list = self.server.pub_sub_metadata.get(endpoint_key)
# .. and return it to our caller.
if topic_list:
self.response.payload = {'topic_list': topic_list}
# ################################################################################################################################
# ################################################################################################################################
| 48,687
|
Python
|
.py
| 825
| 49.33697
| 130
| 0.513064
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,365
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from logging import getLogger
from traceback import format_exc
# Zato
from zato.common.api import PUBSUB
from zato.common.exception import Forbidden
from zato.common.odb.model import PubSubSubscription, PubSubTopic
from zato.common.typing_ import cast_
from zato.server.service import AsIs, Bool, DateTime, Int, Opaque
from zato.server.service.internal import AdminService, AdminSIO
# ################################################################################################################################
if 0:
from zato.common.typing_ import anydict, anylist, anytuple, stranydict, strlist
from zato.server.pubsub.delivery.tool import PubSubTool
PubSubTool = PubSubTool
# ################################################################################################################################
logger_pubsub = getLogger('zato_pubsub.srv')
# ################################################################################################################################
# Jitter to add to sleep_time so as no to have all worker processes issue the same queries at the same time,
# in the range of 0.10 to 0.29, step 0.1.
cleanup_sleep_jitter = [elem / 10.0 for elem in range(1, 4, 1)]
# ################################################################################################################################
endpoint_type_service = {
PUBSUB.ENDPOINT_TYPE.INTERNAL.id: 'zato.pubsub.delivery.notify-pub-sub-message',
PUBSUB.ENDPOINT_TYPE.REST.id: 'zato.pubsub.delivery.notify-pub-sub-message',
PUBSUB.ENDPOINT_TYPE.SERVICE.id: 'zato.pubsub.delivery.notify-pub-sub-message',
PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id: 'zato.channel.web-socket.client.notify-pub-sub-message',
}
# ################################################################################################################################
hook_type_model = {
PUBSUB.HOOK_TYPE.BEFORE_PUBLISH: PubSubTopic,
PUBSUB.HOOK_TYPE.BEFORE_DELIVERY: PubSubSubscription,
}
_no_sk='no-sk'
_notify_error='notify-error'
_wsx_expected_endpoint_type=PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id
# ################################################################################################################################
# ################################################################################################################################
class CommonSubData:
common = ('subscription_id', 'is_internal', 'topic_name', 'active_status', 'endpoint_type', 'endpoint_id',
'endpoint_name', 'delivery_method', 'delivery_data_format', 'delivery_batch_size', Bool('wrap_one_msg_in_list'),
'delivery_max_retry', Bool('delivery_err_should_block'), 'wait_sock_err', 'wait_non_sock_err', 'server_id',
'server_name', 'out_http_method', 'out_http_method', DateTime('creation_time'), DateTime('last_interaction_time'),
'last_interaction_type', 'last_interaction_details', Int('total_depth'), Int('current_depth_gd'),
Int('current_depth_non_gd'), 'sub_key', 'has_gd', 'is_staging_enabled', 'sub_id', 'name', AsIs('ws_ext_client_id'),
AsIs('ext_client_id'), 'topic_id', 'should_ignore_if_sub_exists', 'should_delete_all') # type: anytuple
amqp = ('out_amqp_id', 'amqp_exchange', 'amqp_routing_key')
files = ('files_directory_list',)
ftp = ('ftp_directory_list',)
pubapi = ('security_id',)
rest = ('out_http_soap_id', 'out_rest_http_soap_id', 'rest_connection', 'rest_delivery_endpoint')
service = ('service_id',)
sms_twilio = ('sms_twilio_from', 'sms_twilio_to_list')
smtp = (Bool('smtp_is_html'), 'smtp_subject', 'smtp_from', 'smtp_to_list', 'smtp_body') # type: anytuple
soap = ('out_soap_http_soap_id', 'soap_delivery_endpoint')
wsx = ('ws_channel_id', 'ws_channel_name', AsIs('ws_pub_client_id'), 'sql_ws_client_id', Bool('unsub_on_wsx_close'),
Opaque('web_socket')) # type: anytuple
# ################################################################################################################################
common_sub_data = CommonSubData.common + CommonSubData.amqp + CommonSubData.files + \
CommonSubData.ftp + CommonSubData.rest + CommonSubData.service + \
CommonSubData.sms_twilio + CommonSubData.smtp + CommonSubData.soap + CommonSubData.wsx + CommonSubData.pubapi
# ################################################################################################################################
# ################################################################################################################################
class AfterPublish(AdminService):
""" A hook service invoked after each publication, sends messages from current server to delivery tasks.
"""
class SimpleIO(AdminSIO):
input_required = ('cid', AsIs('topic_id'), 'topic_name', 'is_bg_call', Opaque('pub_time_max')) # type: anytuple
input_optional = (Opaque('subscriptions'), Opaque('non_gd_msg_list'), 'has_gd_msg_list') # type: anytuple
def handle(self) -> 'None':
try:
# Notify all background tasks that new messages are available for their recipients.
# However, this needs to take into account the fact that there may be many notifications
# pointing to a single server so instead of sending notifications one by one,
# we first find all servers and then notify each server once giving it a list of subscriptions on input.
#
# We also need to remember that recipients may be currently offline, or in any other way inaccessible,
# in which case we keep non-GD messages in our server's RAM.
# Extract sub_keys from live Python subscription objects
sub_key_data = [{'sub_key':sub.config.sub_key, 'is_wsx':bool(sub.config.ws_channel_id)} \
for sub in self.request.input.subscriptions]
#
# There are two elements returned.
#
# current_servers - a list of servers that we know have currently subscribers
# for messsages on whose behalf we are being called
#
# not_found ------- a list of sub_keys for which right now we don't have any servers
# with delivery tasks
#
# All servers from current_servers will be invoked and notified about messages published (GD and non-GD).
# For all sub_keys from not_found, information about non-GD messages for each of them will be kept in RAM.
#
# Additionally, for all servers from current_servers that can not be invoked for any reasons,
# we will also store non-GD messages in our RAM store.
#
# Note that GD messages are not passed here directly at all - this is because at this point
# they have been already stored in SQL by publish service before the current one has run.
#
current_servers, not_found = self.pubsub.get_task_servers_by_sub_keys(sub_key_data)
# Local aliases
cid = self.request.input.cid
topic_id = self.request.input.topic_id
topic_name = self.request.input.topic_name
non_gd_msg_list = self.request.input.non_gd_msg_list
has_gd_msg_list = self.request.input.has_gd_msg_list
is_bg_call = self.request.input.is_bg_call
pub_time_max = self.request.input.pub_time_max
# We already know that we can store some of the messages in RAM,
# but only if there are any non-GD ones to keep in RAM.
if not_found and non_gd_msg_list:
self._store_in_ram(cid, topic_id, topic_name, not_found, non_gd_msg_list, _no_sk)
# .. but if some servers are up, attempt to notify pub/sub tasks about the messages ..
if current_servers:
notif_error_sub_keys = self._notify_pub_sub(current_servers, non_gd_msg_list,
has_gd_msg_list, is_bg_call, pub_time_max)
# .. but if there are any errors, store them in RAM as though they were from not_found in the first place.
# Note that only non-GD messages go to RAM because the GD ones are still in the SQL database.
if notif_error_sub_keys:
# This will signal that non-GD messages should be retried
if non_gd_msg_list:
self._store_in_ram(cid, topic_id, topic_name, notif_error_sub_keys, non_gd_msg_list, _notify_error)
# This will signal that GD messages should be retried
if has_gd_msg_list:
self.pubsub.after_gd_sync_error(topic_id, 'AfterPublish.gd_notif_error_sub_keys', pub_time_max)
except Exception:
self.logger.warning('Error in after_publish callback, e:`%s`', format_exc())
# ################################################################################################################################
def _store_in_ram(
self,
cid:'str',
topic_id:'int',
topic_name:'str',
sub_keys:'strlist',
non_gd_msg_list:'anylist',
error_source:'str'=''
) -> 'None':
""" Stores in RAM all input messages for all sub_keys.
"""
self.pubsub.store_in_ram(cid, topic_id, topic_name, sub_keys, non_gd_msg_list, error_source)
# ################################################################################################################################
def _notify_pub_sub(
self,
current_servers: 'anydict',
non_gd_msg_list: 'anylist',
has_gd_msg_list: 'bool',
is_bg_call: 'bool',
pub_time_max: 'float'
) -> 'anylist':
""" Notifies all relevant remote servers about new messages available for delivery.
For GD messages - a flag is sent to indicate that there is at least one message waiting in SQL DB.
For non-GD messages - their actual contents is sent.
"""
notif_error_sub_keys = [] # type: strlist
for server_info, sub_key_list in current_servers.items():
server_name, server_pid, pub_client_id, channel_name, endpoint_type = server_info
service_name = endpoint_type_service[endpoint_type]
full_request = {
'pub_client_id': pub_client_id,
'channel_name': channel_name,
'request': {
'endpoint_type': endpoint_type,
'has_gd': has_gd_msg_list,
'sub_key_list': sub_key_list,
'non_gd_msg_list': non_gd_msg_list,
'is_bg_call': is_bg_call,
'pub_time_max': pub_time_max,
},
}
try:
invoker = self.server.rpc.get_invoker_by_server_name(server_name)
invoker.invoke(service_name, full_request, pid=server_pid)
except Exception:
for logger in (self.logger, logger_pubsub):
logger.warning('Error in pub/sub notification, service:`%s` req:`%s` pid:`%s` e:`%s`',
service_name, full_request, server_pid, format_exc())
notif_error_sub_keys.extend(sub_key_list)
return notif_error_sub_keys
# ################################################################################################################################
# ################################################################################################################################
class ResumeWSXSubscription(AdminService):
""" Invoked by WSX clients after they reconnect with a list of their sub_keys on input.
Collects all messages waiting on other servers for that WebSocket and enqueues any available for a task that is started
on behalf of that WebSocket.
"""
class SimpleIO(AdminSIO):
input_required = ('sub_key',)
def handle(self) -> 'None':
# Local aliases
sub_key_list = [self.request.input.sub_key]
async_msg = self.wsgi_environ['zato.request_ctx.async_msg']
# This will exist if are being invoked directly ..
environ = async_msg.get('environ')
# .. however, if there is a service on whose behalf we are invoked, the 'environ' key will be further nested.
if not environ:
_wsgi_environ = async_msg['wsgi_environ']
_async_msg = _wsgi_environ['zato.request_ctx.async_msg']
environ = _async_msg['environ']
# We now have environ in one way or another
wsx = environ['web_socket']
pubsub_tool = wsx.pubsub_tool # type: PubSubTool
# Need to confirm that our WebSocket previously created all the input sub_keys
wsx_channel_id = environ['ws_channel_config'].id
wsx_endpoint = self.pubsub.get_endpoint_by_ws_channel_id(wsx_channel_id)
# First off, make sure that input sub_key(s) were previously created by current WebSocket
for sub_key in sub_key_list:
sub = self.pubsub.get_subscription_by_sub_key(sub_key)
if not sub:
self.logger.info('No such sub_key `%s` (ResumeWSXSubscription)', sub_key)
continue
if sub.config['endpoint_type'] != _wsx_expected_endpoint_type:
self.logger.warning('Subscription `%s` endpoint_type:`%s` did not match `%s`',
sub_key, sub.config['endpoint_type'], _wsx_expected_endpoint_type)
raise Forbidden(self.cid)
if wsx_endpoint.name != sub.config['endpoint_name']:
expected_endpoint = self.pubsub.get_endpoint_by_id(sub.config['endpoint_id'])
self.logger.warning('Current WSX endpoint did not match sub_key `%s` endpoint, current:%s (%s) vs. expected:%s (%s)',
sub_key, wsx_endpoint.name, wsx_endpoint.id, expected_endpoint.name, expected_endpoint.id)
raise Forbidden(self.cid)
try:
with closing(self.odb.session()) as session:
# Everything is performed using that WebSocket's pub/sub lock to ensure that both
# in-RAM and SQL (non-GD and GD) messages are made available to the WebSocket as a single unit.
with pubsub_tool.lock:
get_in_ram_service = 'zato.pubsub.topic.get-in-ram-message-list'
reply = self.server.rpc.invoke_all(get_in_ram_service, {
'sub_key_list':sub_key_list
}, timeout=120)
# Parse non-GD messages on output from all servers, if any at all, into per-sub_key lists ..
reply_data = cast_('anylist', reply.data) # type: ignore
if reply_data:
non_gd_messages = self._parse_non_gd_messages(sub_key_list, reply_data)
# If there are any non-GD messages, add them to this WebSocket's pubsub tool.
if non_gd_messages:
for sub_key, messages in non_gd_messages.items():
pubsub_tool.add_sub_key_no_lock(sub_key)
pubsub_tool.add_non_gd_messages_by_sub_key(sub_key, messages)
# For each sub_key from input ..
for sub_key in sub_key_list:
# .. add relevant SQL objects ..
self.pubsub.add_wsx_client_pubsub_keys(session, environ['sql_ws_client_id'], sub_key,
environ['ws_channel_config'].name, environ['pub_client_id'],
environ['web_socket'].get_peer_info_dict())
# .. update state of that WebSocket's pubsub tool that keeps track of message delivery
pubsub_tool.add_sub_key_no_lock(sub_key)
# Everything is ready - note that pubsub_tool itself will enqueue any initial messages
# using its enqueue_initial_messages method which does it in batches.
session.commit() # type: ignore
except Exception:
self.logger.warning('Error while resuming WSX pub/sub for keys `%s`, e:`%s`', sub_key_list, format_exc())
raise
else:
# No exception = all good and we can register this pubsub_tool with self.pubsub now
for sub_key in sub_key_list:
self.pubsub.set_pubsub_tool_for_sub_key(sub_key, pubsub_tool)
# No exceptions here = we have resumed the subscription(s) successfully and we can report it
_log_info = {}
for _sub_key in sub_key_list:
_log_info[_sub_key] = self.pubsub.get_topic_by_sub_key(_sub_key).name
self.logger.info('Subscription%sresumed: `%s', ' ' if len(sub_key_list) == 1 else 's ', _log_info)
# ################################################################################################################################
def _parse_non_gd_messages(
self,
sub_key_list: 'strlist',
messages_list: 'anylist'
) -> 'anydict':
out = dict.fromkeys(sub_key_list, []) # type: stranydict
for messages in messages_list:
messages = messages['response']
for sub_key, sub_key_data in messages.items():
for msg in sub_key_data:
out[sub_key].append(msg)
# Do not return empty lists unnecessarily - note that it may happen that all sub_keys
# will be deleted in which cases only an empty dictionary remains.
for sub_key in sub_key_list:
if not out[sub_key]:
del out[sub_key]
return out
# ################################################################################################################################
# ################################################################################################################################
#
# Since v3.2, these services are no longer in use.
# They were replaced by zato pubsub cleanup.
#
class _BaseCleanup(AdminService):
def handle(self):
pass
class DeleteMsgDelivered(_BaseCleanup):
pass
class DeleteMsgExpired(_BaseCleanup):
pass
class DeleteEnqDelivered(_BaseCleanup):
pass
class DeleteEnqMarkedDeleted(_BaseCleanup):
pass
class CleanupService(AdminService):
pass
# ################################################################################################################################
# ################################################################################################################################
| 18,987
|
Python
|
.py
| 305
| 51.57377
| 133
| 0.538929
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,366
|
migrate.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/migrate.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Zato
from zato.server.service.internal import AdminService, AdminSIO
# ################################################################################################################################
class MigrateDeliveryServer(AdminService):
""" Synchronously notifies all servers that a migration is in progress for input sub_key, then stops a delivery task
on current server and starts it on another one.
"""
class SimpleIO(AdminSIO):
input_required = ('sub_key', 'new_delivery_server_name', 'endpoint_type')
# ################################################################################################################################
def handle(self):
# Local aliases
sub_key = self.request.input.sub_key
new_delivery_server_name = self.request.input.new_delivery_server_name
endpoint_type = self.request.input.endpoint_type
# Get a PubSubTool for this sub_key ..
pub_sub_tool = self.pubsub.pubsub_tool_by_sub_key[sub_key]
# .. and find a particular delivery for this very sub key.
task = pub_sub_tool.get_delivery_task(sub_key)
self.logger.info('About to migrate delivery task for sub_key `%s` (%s) to server `%s`',
sub_key, endpoint_type, new_delivery_server_name)
# First, let other servers know that this sub_key is no longer being handled.
# We do it synchronously to make sure that they do not send anything to us anymore.
reply = self.server.rpc.invoke_all('zato.pubsub.migrate.notify-delivery-task-stopping', {
'sub_key': sub_key,
'endpoint_type': endpoint_type,
'new_delivery_server_name': new_delivery_server_name,
})
if reply.is_ok:
self.logger.warning('Could not notify other servers of a stopping delivery task, e:`%s`', reply)
return
# Stop the task before proceeding to make sure this task will handle no new messages
task.stop()
# Clear any in-progress messages out of RAM. Note that any non-GD remaining messages
# will be lost but GD are in SQL anyway so they will be always available on the new server.
task.clear()
# We can remove this task from its pubsub_tool so as to release some memory
pub_sub_tool.remove_sub_key(sub_key)
# We can let the new server know it can start its task for sub_key
self.logger.info('Notifying server `%s` to start delivery task for `%s` (%s)', new_delivery_server_name,
sub_key, endpoint_type)
# Name of the service we are to invoke
service_name = 'zato.pubsub.delivery.create-delivery-task'
# Try to look up that subscription ..
sub = self.pubsub.get_subscription_by_sub_key(sub_key)
# .. create a new task if the subscription exists ..
if sub:
invoker = self.server.rpc.get_invoker_by_server_name(new_delivery_server_name)
invoker.invoke(service_name, {
'sub_key': sub_key,
'endpoint_type': endpoint_type,
'task_delivery_interval': sub.task_delivery_interval
})
# .. or log an exception otherwise.
else:
msg = 'Could not find sub_key `%s` to invoke service `%s` with'
self.logger.info(msg, sub_key, service_name)
# ################################################################################################################################
class NotifyDeliveryTaskStopping(AdminService):
""" Invoked when a delivery task is about to stop - deletes from pubsub information about input sub_key's delivery task.
Thanks to this, when a message is published and there is no new delivery task running yet, this message will be queued up
instead of being delivered to a task that is about to stop. The new task will pick it up when it has started up.
"""
class SimpleIO:
input_required = ('sub_key', 'endpoint_type')
def handle(self):
self.pubsub.delete_sub_key_server(self.request.input.sub_key)
# ################################################################################################################################
| 4,461
|
Python
|
.py
| 74
| 52.013514
| 130
| 0.588856
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,367
|
subscription.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/subscription.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from contextlib import closing
from logging import getLogger
# Bunch
from bunch import Bunch
# SQLAlchemy
from sqlalchemy import update
# Zato
from zato.common.api import PUBSUB
from zato.common.broker_message import PUBSUB as BROKER_MSG_PUBSUB
from zato.common.exception import BadRequest, NotFound, Forbidden, PubSubSubscriptionExists
from zato.common.odb.model import PubSubSubscription
from zato.common.odb.query.pubsub.queue import get_queue_depth_by_sub_key
from zato.common.odb.query.pubsub.subscribe import add_subscription, add_wsx_subscription, has_subscription, \
move_messages_to_sub_queue
from zato.common.odb.query.pubsub.subscription import pubsub_subscription_list_by_endpoint_id_no_search, \
pubsub_subscription_list_by_endpoint_id_list_no_search, pubsub_subscription_list_no_search
from zato.common.pubsub import new_sub_key
from zato.common.simpleio_ import drop_sio_elems
from zato.common.typing_ import cast_
from zato.common.util.api import get_sa_model_columns, make_repr
from zato.common.util.time_ import datetime_to_ms, utcnow_as_ms
from zato.server.connection.web_socket import WebSocket
from zato.server.pubsub import PubSub
from zato.server.pubsub.model import Topic
from zato.server.service import AsIs, Bool, Int, List, Opaque, Service
from zato.server.service.internal import AdminService, AdminSIO
from zato.server.service.internal.pubsub import common_sub_data
# ################################################################################################################################
if 0:
from sqlalchemy import Column
from zato.common.typing_ import any_, boolnone, dictlist, intlist, intnone, optional, strnone
from zato.common.model.wsx import WSXConnectorConfig
Column = Column
WSXConnectorConfig = WSXConnectorConfig
# ################################################################################################################################
logger_pubsub = getLogger('zato_pubsub.srv')
# ################################################################################################################################
# For pyflakes and code completion
PubSub = PubSub
Topic = Topic
WebSocket = WebSocket
# ################################################################################################################################
sub_broker_attrs:'any_' = get_sa_model_columns(PubSubSubscription)
sub_impl_input_optional = list(common_sub_data)
sub_impl_input_optional.remove('is_internal')
sub_impl_input_optional.remove('topic_name')
# ################################################################################################################################
class SubCtx:
""" A container for information pertaining to a given subscription request.
"""
pubsub: 'PubSub'
cluster_id: 'int'
topic: 'Topic'
creation_time: 'float'
has_gd: 'any_' = None
is_internal: 'boolnone' = None
topic_name: 'str' = ''
server_id: 'intnone' = None
sub_pattern_matched: 'strnone' = None
active_status: 'strnone' = None
endpoint_type: 'strnone' = None
endpoint_id: 'intnone' = None
delivery_method: 'strnone' = None
delivery_data_format: 'strnone' = None
delivery_batch_size: 'intnone' = None
wrap_one_msg_in_list: 'boolnone' = None
delivery_max_retry: 'intnone' = None
delivery_err_should_block: 'boolnone' = None
wait_sock_err:'intnone' = None
wait_non_sock_err:'intnone' = None
ext_client_id: 'str' = ''
delivery_endpoint: 'strnone' = None
out_http_soap_id: 'intnone' = None
out_http_method: 'strnone' = None
out_amqp_id: 'intnone' = None
sub_key: 'strnone' = None
security_id: 'intnone' = None
ws_channel_id: 'intnone' = None
ws_channel_name: 'strnone' = None
sql_ws_client_id: 'intnone' = None
unsub_on_wsx_close: 'boolnone' = None
ws_pub_client_id: 'strnone' = None
web_socket: 'optional[WebSocket]'
def __init__(self, cluster_id:'int', pubsub:'PubSub') -> 'None':
self.cluster_id = cluster_id
self.pubsub = pubsub
def __repr__(self) -> 'str':
return make_repr(self)
def set_endpoint_id(self) -> 'None':
if self.endpoint_id:
return
elif self.security_id:
self.endpoint_id = self.pubsub.get_endpoint_id_by_sec_id(self.security_id)
elif self.ws_channel_id:
wsx_endpoint_id = self.pubsub.get_endpoint_id_by_ws_channel_id(self.ws_channel_id)
if wsx_endpoint_id:
self.endpoint_id = wsx_endpoint_id
else:
raise ValueError('Could not obtain endpoint_id')
def after_properties_set(self) -> 'None':
""" A hook that lets subclasses customize this object after it is known that all common properties have been set.
"""
# ################################################################################################################################
class SubCtxSecBased(SubCtx):
""" Pub/sub context config for endpoints based around security definitions (e.g. REST and SOAP).
"""
def __init__(self, *args:'any_', **kwargs:'any_') -> 'None':
super(SubCtxSecBased, self).__init__(*args, **kwargs)
self.security_id = None
# ################################################################################################################################
class SubCtxREST(SubCtxSecBased):
""" Pub/sub context config for REST endpoints.
"""
def __init__(self, *args:'any_', **kwargs:'any_') -> 'None':
super(SubCtxREST, self).__init__(*args, **kwargs)
self.out_rest_http_soap_id = None
self.rest_delivery_endpoint = None
def after_properties_set(self) -> 'None':
super(SubCtxREST, self).after_properties_set()
self.out_http_soap_id = self.out_rest_http_soap_id
self.delivery_endpoint = self.rest_delivery_endpoint
# ################################################################################################################################
class SubCtxService(SubCtx):
""" Pub/sub context config for Zato service endpoints.
"""
def __init__(self, *args:'any_', **kwargs:'any_') -> 'None':
super(SubCtxService, self).__init__(*args, **kwargs)
self.service_id = None
# ################################################################################################################################
class SubCtxWebSockets(SubCtx):
""" Pub/sub context config for WebSockets endpoints.
"""
def __init__(self, *args:'any_', **kwargs:'any_') -> 'None':
super(SubCtxWebSockets, self).__init__(*args, **kwargs)
self.ws_channel_id = None
self.ws_channel_name = None
self.ws_pub_client_id = ''
self.sql_ws_client_id = None
self.unsub_on_wsx_close = True
self.web_socket = None
# ################################################################################################################################
ctx_class = {
PUBSUB.ENDPOINT_TYPE.REST.id: SubCtxREST,
PUBSUB.ENDPOINT_TYPE.SERVICE.id: SubCtxService,
PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id: SubCtxWebSockets,
}
# ################################################################################################################################
class _Subscribe(AdminService):
""" Base class for services implementing pub/sub subscriptions.
"""
def _get_sub_pattern_matched(
self,
topic_name:'str',
ws_channel_id:'intnone',
sql_ws_client_id:'intnone',
security_id:'intnone',
endpoint_id:'intnone',
) -> 'str':
pubsub = self.server.worker_store.pubsub
if ws_channel_id and (not sql_ws_client_id):
raise BadRequest(self.cid, 'sql_ws_client_id must not be empty if ws_channel_id is given on input')
# Confirm if this client may subscribe at all to the topic it chose
if endpoint_id:
sub_pattern_matched = pubsub.is_allowed_sub_topic_by_endpoint_id(topic_name, endpoint_id)
else:
kwargs = {'security_id':security_id} if security_id else {'ws_channel_id':ws_channel_id}
sub_pattern_matched = pubsub.is_allowed_sub_topic(topic_name, **kwargs)
# Not allowed - raise an exception then
if not sub_pattern_matched:
raise Forbidden(self.cid)
# Alright, we can proceed
else:
return cast_('str', sub_pattern_matched)
# Check if subscription is allowed and getting a pattern that would have matched is the same thing.
_is_subscription_allowed = _get_sub_pattern_matched
# ################################################################################################################################
class SubscribeServiceImpl(_Subscribe):
""" Lower-level service that actually handles pub/sub subscriptions. Each endpoint_type has its own subclass.
"""
endpoint_type:'str'
class SimpleIO(AdminSIO):
input_required = 'topic_name'
input_optional = drop_sio_elems(common_sub_data, 'is_internal', 'topic_name')
output_optional = 'sub_key', 'queue_depth'
default_value = None
force_empty_keys = True
# ################################################################################################################################
def _get_sub_ctx(self) -> 'SubCtx':
""" Returns a new pub/sub config context specific to self.endpoint_type.
"""
# Create output object
ctx = ctx_class[self.endpoint_type](self.server.cluster_id, self.server.worker_store.pubsub)
# Set all attributes that we were given on input
for k, v in self.request.input.items():
setattr(ctx, k, v)
# If there is no server_id on input, check if we have it name.
# If we do not have a name either, we use our own server as the delivery one.
if not (server_id := self.request.input.server_id):
# .. no server_id on input, perhaps we have a name ..
if server_name := self.request.input.server_id:
# .. get a list of all servers we are aware of ..
servers = self.invoke('zato.server.get-list', cluster_id=self.server.cluster_id)
# .. skip root elements ..
if 'zato_server_get_list_response' in servers:
servers = servers['zato_server_get_list_response']
# .. find our server in the list ..
for server in servers:
if server['name'] == server_name:
server_id = server['id']
break
else:
raise Exception('Server not found -> {server_name}')
# .. no name, let's use our own server.
else:
server_id = self.server.id
# .. if we are here, it means that we have server_id obtained in one way or another.
ctx.server_id = server_id
# Now we can compute endpoint ID
ctx.set_endpoint_id()
# Call hooks
ctx.after_properties_set()
# Return data
return ctx
def _handle_subscription(self, ctx:'SubCtx') -> 'None':
raise NotImplementedError('Must be implement by subclasses')
# ################################################################################################################################
def handle(self) -> 'None':
# Get basic pub/sub subscription context
ctx = self._get_sub_ctx()
# Confirm correctness of input data, including whether the caller can subscribe
# to this topic and if the topic exists at all.
ctx.sub_pattern_matched = self._get_sub_pattern_matched(
ctx.topic_name, ctx.ws_channel_id, ctx.sql_ws_client_id, ctx.security_id, ctx.endpoint_id)
try:
topic = ctx.pubsub.get_topic_by_name(ctx.topic_name)
except KeyError:
raise NotFound(self.cid, 'No such topic `{}`'.format(ctx.topic_name))
else:
ctx.topic = topic
# Inherit GD from topic if it is not set explicitly
ctx.has_gd = ctx.has_gd if isinstance(ctx.has_gd, bool) else topic.has_gd
# Ok, we can actually subscribe the caller now
self._handle_subscription(ctx)
# ################################################################################################################################
def _subscribe_impl(self, ctx:'SubCtx') -> 'None':
""" Invoked by subclasses to subscribe callers using input pub/sub config context.
"""
with self.lock('zato.pubsub.subscribe.%s' % (ctx.topic_name), timeout=90):
# Is it a WebSockets client?
is_wsx = bool(ctx.ws_channel_id)
# These casts are needed for pylance
web_socket = cast_('WebSocket', None)
sql_ws_client_id = cast_('int', None)
ws_channel_name = cast_('str', None)
ws_pub_client_id = cast_('str', None)
if is_wsx:
web_socket = cast_('WebSocket', ctx.web_socket)
sql_ws_client_id = cast_('int', ctx.sql_ws_client_id)
ws_channel_name = cast_('str', ctx.ws_channel_name)
ws_pub_client_id = cast_('str', ctx.ws_pub_client_id)
# Endpoint on whose behalf the subscription will be made
endpoint_id = cast_('int', ctx.endpoint_id)
endpoint = self.pubsub.get_endpoint_by_id(endpoint_id)
with closing(self.odb.session()) as session:
with session.no_autoflush:
# Non-WebSocket clients cannot subscribe to the same topic multiple times,
# unless should_ignore_if_sub_exists is set to True (e.g. enmasse does it).
if not is_wsx:
if has_subscription(session, ctx.cluster_id, ctx.topic.id, ctx.endpoint_id):
if self.request.input.should_ignore_if_sub_exists:
# We do not raise an exception but we do not continue either.
return
else:
msg = f'Endpoint `{endpoint.name}` is already subscribed to topic `{ctx.topic.name}`'
raise PubSubSubscriptionExists(self.cid, msg)
ctx.creation_time = now = utcnow_as_ms()
sub_key = new_sub_key(self.endpoint_type, ctx.ext_client_id)
# Create a new subscription object and flush the session because the subscription's ID
# may be needed for the WSX subscription
ps_sub = add_subscription(session, ctx.cluster_id, sub_key, ctx)
session.flush()
# Common configuration for WSX and broker messages
sub_config = Bunch()
sub_config.topic_name = ctx.topic.name
sub_config.task_delivery_interval = ctx.topic.task_delivery_interval
sub_config.endpoint_name = endpoint.name
sub_config.endpoint_type = self.endpoint_type
sub_config.unsub_on_wsx_close = ctx.unsub_on_wsx_close
sub_config.ext_client_id = ctx.ext_client_id
for name in sub_broker_attrs: # type: ignore
sub_config[name] = getattr(ps_sub, name, None)
#
# At this point there may be several cases depending on whether there are already other subscriptions
# or messages in the topic.
#
# * If there are subscribers, then this method will not move any messages because the messages
# will have been already moved to queues of other subscribers before we are called
#
# * If there are no subscribers but there are messages in the topic then this subscriber will become
# the sole recipient of the messages (we don't have any intrinsic foreknowledge of when, if at all,
# other subscribers can appear)
#
# * If there are no subscribers and no messages in the topic then this is a no-op
#
move_messages_to_sub_queue(session, ctx.cluster_id, ctx.topic.id, ctx.endpoint_id,
ctx.sub_pattern_matched, sub_key, now)
# Subscription's ID is available only now, after the session was flushed
sub_config.id = ps_sub.id
# Update current server's pub/sub config
self.pubsub.add_subscription(sub_config)
if is_wsx:
# This object persists across multiple WSX connections
_ = add_wsx_subscription(session, ctx.cluster_id, ctx.is_internal, sub_key,
ctx.ext_client_id, ctx.ws_channel_id, ps_sub.id)
# This object will be transient - dropped each time a WSX client disconnects
self.pubsub.add_wsx_client_pubsub_keys(
session,
sql_ws_client_id,
sub_key,
ws_channel_name,
ws_pub_client_id,
web_socket.get_peer_info_dict()
)
# Let the WebSocket connection object know that it should handle this particular sub_key
web_socket.pubsub_tool.add_sub_key(sub_key)
# Commit all changes
session.commit()
# Produce response
self.response.payload.sub_key = sub_key
if is_wsx:
# Let the pub/sub task know it can fetch any messages possibly enqueued for that subscriber,
# note that since this is a new subscription, it is certain that only GD messages may be available,
# never non-GD ones.
web_socket.pubsub_tool.enqueue_gd_messages_by_sub_key(sub_key)
gd_depth, non_gd_depth = web_socket.pubsub_tool.get_queue_depth(sub_key)
self.response.payload.queue_depth = gd_depth + non_gd_depth
else:
# TODO:
# This should be read from that client's delivery task instead of SQL so as to include
# non-GD messages too.
self.response.payload.queue_depth = get_queue_depth_by_sub_key(session, ctx.cluster_id, sub_key, now)
# Notify workers of a new subscription
sub_config.action = BROKER_MSG_PUBSUB.SUBSCRIPTION_CREATE.value
# Append information about current server which will let all workers
# know if they should create a subscription object (if they are different) or not.
sub_config.server_receiving_subscription_id = self.server.id
sub_config.server_receiving_subscription_pid = self.server.pid
sub_config.is_api_call = True
logger_pubsub.info('Subscription created id=`%s`; t=`%s`; sk=`%s`; patt=`%s`',
sub_config['id'], sub_config['topic_name'], sub_config['sub_key'], sub_config['sub_pattern_matched'])
self.broker_client.publish(sub_config)
# ################################################################################################################################
class SubscribeWebSockets(SubscribeServiceImpl):
""" Handles pub/sub subscriptions for WebSockets.
"""
name = 'zato.pubsub.subscription.subscribe-websockets'
endpoint_type = PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id
def _handle_subscription(self, ctx:'SubCtxWebSockets') -> 'None':
ctx.delivery_method = PUBSUB.DELIVERY_METHOD.WEB_SOCKET.id # This is a WebSocket so delivery_method is always fixed
self._subscribe_impl(ctx)
# ################################################################################################################################
class SubscribeREST(SubscribeServiceImpl):
""" Handles pub/sub subscriptions for REST clients.
"""
endpoint_type = PUBSUB.ENDPOINT_TYPE.REST.id
def _handle_subscription(self, ctx:'SubCtx') -> 'None':
self._subscribe_impl(ctx)
# ################################################################################################################################
class SubscribeService(SubscribeServiceImpl):
""" Handles pub/sub subscriptions for Zato services.
"""
endpoint_type = PUBSUB.ENDPOINT_TYPE.SERVICE.id
def _handle_subscription(self, ctx:'SubCtx') -> 'None':
self._subscribe_impl(ctx)
class SubscribeSrv(SubscribeService):
pass
# ################################################################################################################################
# ################################################################################################################################
class GetList(Service):
""" Returns all topics that an endpoint is subscribed to.
"""
input:'any_' = '-cluster_id', '-endpoint_id', AsIs('-endpoint_id_list'), '-endpoint_name', AsIs('-sql_session')
# ################################################################################################################################
def _get_all_items(self, sql_session:'any_', cluster_id:'int') -> 'dictlist':
# Our response to produce
out:'dictlist' = []
# .. get all subscriptions for that endpoint ..
items = pubsub_subscription_list_no_search(sql_session, cluster_id)
# .. go through everything found ..
for item in items:
# .. append it for later use ..
out.append({
'endpoint_name': item.endpoint_name,
'endpoint_type': item.endpoint_type,
'topic_name': item.topic_name,
'delivery_server': item.server_name,
'delivery_method': item.delivery_method,
'rest_connection': item.rest_connection,
'rest_method': item.out_http_method,
})
return out
# ################################################################################################################################
def _get_items_by_endpoint_id(self, sql_session:'any_', cluster_id:'int', endpoint_id:'int') -> 'dictlist':
# Our response to produce
out:'dictlist' = []
# .. get all subscriptions for that endpoint ..
items = pubsub_subscription_list_by_endpoint_id_no_search(sql_session, cluster_id, endpoint_id)
# .. go through everything found ..
for item in items:
# .. append it for later use ..
out.append({'topic_name':item.topic_name})
return out
# ################################################################################################################################
def _get_items_by_endpoint_id_list(self, sql_session:'any_', cluster_id:'int', endpoint_id_list:'intlist') -> 'dictlist':
# Our response to produce
out:'dictlist' = []
# .. get all subscriptions for that endpoint ..
items = pubsub_subscription_list_by_endpoint_id_list_no_search(sql_session, cluster_id, endpoint_id_list)
# .. go through everything found ..
for item in items:
# .. append it for later use ..
out.append({
'endpoint_id': item.endpoint_id,
'endpoint_name': item.endpoint_name,
'topic_name': item.topic_name,
})
return out
# ################################################################################################################################
def handle(self) -> 'None':
# Our response to produce
out = []
# Local variables ..
sql_session = self.request.input.sql_session
cluster_id = self.request.input.cluster_id or self.server.cluster_id
endpoint_id = self.request.input.endpoint_id
endpoint_name = self.request.input.endpoint_name
endpoint_id_list = self.request.input.endpoint_id_list
# .. we can be invoked by endpoint ID, a list of IDs or with a name ..
if endpoint_id or endpoint_id_list:
# Explicitly do nothing here
pass
elif endpoint_name:
endpoint = self.pubsub.get_endpoint_by_name(endpoint_name)
endpoint_id = endpoint.id
# .. connect to the database or reuse a connection we received on input ..
if sql_session:
is_new_sql_session = False
else:
is_new_sql_session = True
sql_session = self.odb.session()
try:
# .. get all subscriptions for that endpoint ..
if endpoint_id:
items = self._get_items_by_endpoint_id(sql_session, cluster_id, endpoint_id)
# .. or for a list of endpoints
elif endpoint_id_list:
items = self._get_items_by_endpoint_id_list(sql_session, cluster_id, endpoint_id_list)
# .. or for all of endpoints ..
else:
items = self._get_all_items(sql_session, cluster_id)
# .. populate it for later use ..
out[:] = items
except Exception:
raise
finally:
if is_new_sql_session:
_ = sql_session.close()
# .. and return everything to our caller.
self.response.payload = out
# ################################################################################################################################
# ################################################################################################################################
class Create(_Subscribe):
""" Creates a new pub/sub subscription by invoking a subscription service specific to input endpoint_type.
"""
def handle(self) -> 'None':
# This is a multi-line string of topic names
topic_list_text = [elem.strip() for elem in (self.request.raw_request.pop('topic_list_text', '') or '').splitlines()]
# This is a JSON list of topic names
topic_list_json = self.request.raw_request.pop('topic_list_json', [])
# This is a single topic
topic_name = self.request.raw_request.pop('topic_name', '').strip()
if topic_name:
topic_name = [topic_name]
if not(topic_list_text or topic_list_json or topic_name):
return
else:
if topic_list_text:
topic_list = topic_list_text
elif topic_list_json:
topic_list = topic_list_json
else:
topic_list = topic_name
# Try to use an endpoint by its ID ..
if not (endpoint_id := self.request.raw_request.get('endpoint_id') or 0):
# .. we have no endpoint ID so we will try endpoint name ..
if not (endpoint_name := self.request.raw_request.get('endpoint_name')):
raise Exception(f'Either endpoint_id or endpoint_name should be given on input to {self.name}')
# .. if we are here, we have an endpoint by its name and we turn it into an ID now ..
endpoint = self.pubsub.get_endpoint_by_name(endpoint_name)
endpoint_id = endpoint.id
# Optionally, delete all the subscriptions for that endpoint
# before any news ones (potentially the same ones) will be created.
# This is a flag that enmasse may pass on.
if self.request.raw_request.get('should_delete_all'):
_ = self.invoke(DeleteAll, endpoint_id=endpoint_id)
# If we have a REST connection by its name, we need to turn it into an ID
if rest_connection := self.request.raw_request.get('rest_connection'):
rest_connection_item = self.server.worker_store.get_outconn_rest(rest_connection)
if rest_connection_item:
rest_connection_item = rest_connection_item['config']
out_rest_http_soap_id = rest_connection_item['id']
self.request.raw_request['out_rest_http_soap_id'] = out_rest_http_soap_id
else:
msg = f'REST outgoing connection not found -> {rest_connection}'
raise Exception(msg)
# For all topics given on input, check it upfront if caller may subscribe to all of them
check_input = [
int(self.request.raw_request.get('ws_channel_id') or 0),
int(self.request.raw_request.get('sql_ws_client_id') or 0),
int(self.request.raw_request.get('security_id') or 0),
int(endpoint_id),
]
for topic_name in topic_list:
try:
# Assignment to sub_pattern_matched will need to be changed once
# we support subscriptions to multiple topics at a time,
# but for the time being, this is fine.
self.request.raw_request['sub_pattern_matched'] = self._is_subscription_allowed(topic_name, *check_input)
except Forbidden:
self.logger.warning('Could not subscribe to `%r` using `%r`', topic_name, check_input)
raise
sub_service = 'zato.pubsub.subscription.subscribe-{}'.format(self.request.raw_request['endpoint_type'])
sub_request = self.request.raw_request
# Append the endpoint ID, either because we earlier received it on input,
# or because we had to obtain it via endpoint_name.
sub_request['endpoint_id'] = endpoint_id
# Invoke subscription for each topic given on input. At this point we know we can subscribe to all of them.
for topic_name in topic_list:
sub_request['topic_name'] = topic_name
response = self.invoke(sub_service, sub_request)
self.response.payload = response
# ################################################################################################################################
class DeleteAll(AdminService):
""" Deletes all pub/sub subscriptions of a given endpoint.
"""
class SimpleIO(AdminService.SimpleIO):
input = '-cluster_id', '-endpoint_id', '-endpoint_name'
def handle(self) -> 'None':
cluster_id = self.request.input.cluster_id or self.server.cluster_id
if not (endpoint_id := self.request.input.endpoint_id):
endpoint = self.pubsub.get_endpoint_by_name(self.request.input.endpoint_name)
endpoint_id = endpoint.id
with closing(self.odb.session()) as session:
# Get all subscriptions for that endpoint ..
items = pubsub_subscription_list_by_endpoint_id_no_search(session, cluster_id, endpoint_id)
items = list(items)
# Build a list of sub_keys that this endpoint was using and delete them all in one go.
sub_key_list = [item.sub_key for item in items]
if sub_key_list:
self.invoke('zato.pubsub.endpoint.delete-endpoint-queue', {
'cluster_id': cluster_id,
'sub_key_list': sub_key_list,
})
# ################################################################################################################################
class CreateWSXSubscriptionForCurrent(AdminService):
""" A high-level, simplified, service for creating subscriptions for a WSX. Calls CreateWSXSubscription ultimately.
"""
class SimpleIO:
input_required = 'topic_name'
output_optional = 'sub_key'
def handle(self) -> 'None':
self.response.payload.sub_key = self.pubsub.subscribe(
self.request.input.topic_name, use_current_wsx=True, service=self)
# ################################################################################################################################
class CreateWSXSubscription(AdminService):
""" Low-level interface for creating a new pub/sub subscription for current WebSocket connection.
"""
class SimpleIO:
input_optional:'any_' = 'topic_name', List('topic_name_list'), Bool('wrap_one_msg_in_list'), Int('delivery_batch_size')
output_optional = 'sub_key', 'current_depth', 'sub_data'
response_elem = None
force_empty_keys = True
def handle(self) -> 'None':
# Local aliases
topic_name = self.request.input.topic_name
topic_name_list = set(self.request.input.topic_name_list)
async_msg = self.wsgi_environ['zato.request_ctx.async_msg']
async_msg_wsgi_environ = async_msg.get('wsgi_environ', {})
unsub_on_wsx_close = async_msg_wsgi_environ.get('zato.request_ctx.pubsub.unsub_on_wsx_close', True)
# This will exist if we are being invoked directly ..
environ = async_msg.get('environ')
# .. however, if there is a service on whose behalf we are invoked, the 'environ' key will be further nested.
if not environ:
_wsgi_environ = async_msg['wsgi_environ']
_async_msg = _wsgi_environ['zato.request_ctx.async_msg']
environ = _async_msg['environ']
ws_channel_id = environ['ws_channel_config'].id
# Make sure the WSX channel actually points to an endpoint. If it does not,
# we cannot proceed, i.e. there is no such API client.
endpoint_id = self.pubsub.get_endpoint_id_by_ws_channel_id(ws_channel_id)
if not endpoint_id:
self.logger.info('There is no pub/sub endpoint for WSX channel ID `%s`', ws_channel_id)
environ['web_socket'].disconnect_client()
if environ['ws_channel_config'].name in (os.environ.get('Zato_WSX_Missing_Endpoints_To_Ignore') or ''):
return
else:
raise Forbidden(self.cid)
# Either an exact topic name or a list thereof is needed ..
if not (topic_name or topic_name_list):
raise BadRequest(self.cid, 'Either or topic_name or topic_name_list is required')
# .. but we cannot accept both of them.
elif topic_name and topic_name_list:
raise BadRequest(self.cid, 'Cannot provide both topic_name and topic_name_list on input')
subscribe_to = [topic_name] if topic_name else topic_name_list
responses = {}
for item in subscribe_to:
ws_channel_config = environ['ws_channel_config'] # type: WSXConnectorConfig
request = {
'is_internal': False,
'topic_name': item,
'ws_channel_id': ws_channel_id,
'ext_client_id': environ['ext_client_id'],
'ws_pub_client_id': environ['pub_client_id'],
'ws_channel_name': ws_channel_config.name,
'sql_ws_client_id': environ['sql_ws_client_id'],
'unsub_on_wsx_close': unsub_on_wsx_close,
'web_socket': environ['web_socket'],
}
request['delivery_batch_size'] = self.request.input.get('delivery_batch_size')
request['delivery_batch_size'] = self.request.input.get('delivery_batch_size') or PUBSUB.DEFAULT.DELIVERY_BATCH_SIZE
response = self.invoke('zato.pubsub.subscription.subscribe-websockets', request)
response = response['response']
responses[item] = response
# There was only one topic on input ..
if topic_name:
self.response.payload = responses[topic_name]
# .. or a list of topics on was given on input.
else:
out = []
for key, value in responses.items(): # type: ignore
out.append({
'topic_name': key,
'sub_key': value['sub_key'],
'current_depth': value['queue_depth'],
})
self.response.payload.sub_data = out
# ################################################################################################################################
class UpdateInteractionMetadata(AdminService):
""" Updates last interaction metadata for input sub keys.
"""
class SimpleIO:
input_required:'any_' = List('sub_key'), Opaque('last_interaction_time'), 'last_interaction_type', \
'last_interaction_details'
def handle(self) -> 'None':
# Local aliases
req = self.request.input
# Convert from string to milliseconds as expected by the database
if not isinstance(req.last_interaction_time, float):
last_interaction_time = datetime_to_ms(req.last_interaction_time) / 1000.0
else:
last_interaction_time = req.last_interaction_time
with closing(self.odb.session()) as session:
# Run the query
session.execute(
update(PubSubSubscription).\
values({
'last_interaction_time': last_interaction_time,
'last_interaction_type': req.last_interaction_type,
'last_interaction_details': req.last_interaction_details.encode('utf8'),
}).\
where(cast_('Column', PubSubSubscription.sub_key).in_(req.sub_key)) # type: ignore
)
# And commit it to the database
session.commit()
# ################################################################################################################################
| 38,245
|
Python
|
.py
| 682
| 44.831378
| 130
| 0.543041
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,368
|
delivery.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/delivery.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.common.api import PUBSUB
from zato.common.broker_message import PUBSUB as BROKER_MSG_PUBSUB
from zato.common.exception import BadRequest
from zato.common.json_internal import dumps
from zato.common.pubsub import HandleNewMessageCtx
from zato.common.util.pubsub import is_service_subscription
from zato.server.pubsub.delivery.tool import PubSubTool
from zato.server.service import Int, Opaque
from zato.server.service.internal import AdminService, AdminSIO
# ################################################################################################################################
if 0:
from zato.common.pubsub import PubSubMessage
from zato.common.typing_ import any_, anydict, callable_, strcalldict, strdict
from zato.server.connection.http_soap.outgoing import RESTWrapper
from zato.server.pubsub.model import Subscription
PubSubMessage = PubSubMessage
Subscription = Subscription
# ################################################################################################################################
class NotifyPubSubMessage(AdminService):
""" Notifies pubsub about new messages available. It is guaranteed that this service will be always invoked
on the server where each sub_key from sub_key_list exists.
"""
def handle(self):
# The request that we have on input needs to be sent to a pubsub_tool for each sub_key,
# even if it is possibly the same pubsub_tool for more than one input sub_key.
raw_request = self.request.raw_request # type: anydict
req = raw_request['request']
for sub_key in req['sub_key_list']:
pubsub_tool = self.pubsub.pubsub_tool_by_sub_key[sub_key]
pubsub_tool.handle_new_messages(HandleNewMessageCtx(self.cid, req['has_gd'], [sub_key],
req['non_gd_msg_list'], req['is_bg_call'], req['pub_time_max']))
# ################################################################################################################################
class CreateDeliveryTask(AdminService):
""" Starts a new delivery task for endpoints other than WebSockets (which are handled separately).
"""
def handle(self):
config = self.request.raw_request # type: anydict
# Creates a pubsub_tool that will handle this subscription and registers it with pubsub
pubsub_tool = PubSubTool(self.pubsub, self.server, config['endpoint_type'])
# Makes this sub_key known to pubsub but only if this is not a service subscription
# because subscriptions of this sort are handled by the worker store directly in init_pubsub.
if not is_service_subscription(config):
pubsub_tool.add_sub_key(config['sub_key'])
# Common message for both local server and broker
msg = {
'cluster_id': self.server.cluster_id,
'server_name': self.server.name,
'server_pid': self.server.pid,
'sub_key': config['sub_key'],
'endpoint_type': config['endpoint_type'],
'task_delivery_interval': config['task_delivery_interval'],
'source': 'delivery.CreateDeliveryTask',
'source_server_name': self.server.name,
'source_server_pid': self.server.pid,
}
# Update in-RAM state of workers
msg['action'] = BROKER_MSG_PUBSUB.SUB_KEY_SERVER_SET.value
self.broker_client.publish(msg)
# ################################################################################################################################
class DeliverMessage(AdminService):
""" Callback service invoked by delivery tasks for each message or a list of messages that need to be delivered
to a given endpoint.
"""
class SimpleIO(AdminSIO):
input_required:'any_' = (Opaque('msg'), Opaque('subscription'))
# ################################################################################################################################
def handle(self) -> 'None':
msg = self.request.input.msg # type: any_
subscription = self.request.input.subscription # type: Subscription
endpoint_impl_getter = self.pubsub.get_endpoint_impl_getter(subscription.config['endpoint_type']) # type: callable_
func = deliver_func[subscription.config['endpoint_type']] # type: callable_
func(self, msg, subscription, endpoint_impl_getter)
# ################################################################################################################################
def _get_data_from_message(self, msg:'any_') -> 'any_':
# A list of messages is given on input so we need to serialize each of them individually
if isinstance(msg, list):
out:'any_' = []
for elem in msg: # type: ignore
out.append(elem.serialized if elem.serialized else elem.to_external_dict())
return out
# A single message was given on input
else:
return msg.serialized if msg.serialized else msg.to_external_dict()
# ################################################################################################################################
def _deliver_rest(self,
msg:'list[PubSubMessage]',
sub:'Subscription',
impl_getter:'callable_',
) -> 'None':
# Local variables
out_http_method = sub.config['out_http_method']
out_http_soap_id = sub.config.get('out_http_soap_id')
if not out_http_soap_id:
raise ValueError('Missing out_http_soap_id for subscription `{}`'.format(sub))
else:
# Extract the actual data from the pub/sub message ..
data = self._get_data_from_message(msg)
# .. the outgoing connection's configuration ..
rest_config:'strdict' = impl_getter(out_http_soap_id)
# .. from which we can extract the actual wrapper ..
conn:'RESTWrapper' = rest_config['conn']
# .. make sure that we send JSON ..
if not isinstance(data, str):
data = dumps(data)
# .. which now can be invoked.
_ = conn.http_request(out_http_method, self.cid, data=data)
# ################################################################################################################################
def _deliver_amqp(
self,
msg:'PubSubMessage',
sub:'Subscription',
_ignored_impl_getter # type: ignore
) -> 'None':
# Ultimately we should use impl_getter to get the outconn
for value in self.server.worker_store.worker_config.out_amqp.values(): # type: ignore
if value['config']['id'] == sub.config['out_amqp_id']:
data = self._get_data_from_message(msg)
name:'str' = value['config']['name']
kwargs = {}
if sub.config['amqp_exchange']:
kwargs['exchange'] = sub.config['amqp_exchange']
if sub.config['amqp_routing_key']:
kwargs['routing_key'] = sub.config['amqp_routing_key']
self.outgoing.amqp.send(dumps(data), name, **kwargs)
# We found our outconn and the message was sent, we can stop now
break
# ################################################################################################################################
def _deliver_wsx(self, msg, sub, _ignored_impl_getter) -> 'None': # type: ignore
raise NotImplementedError('WSX deliveries should be handled by each socket\'s deliver_pubsub_msg')
# ################################################################################################################################
def _deliver_srv(self, msg:'any_', sub:'Subscription', _ignored_impl_getter:'any_') -> 'None':
# Reusable
is_list = isinstance(msg, list)
#
# We can have two cases.
#
# 1) The messages were published via self.pubsub.publish('service.name')
# 2) The messages were published to a topic and one of its subscribers is a service
#
# Depending on which case it is, we will extract the actual service's name differently.
#
# We do not know upfront which case it will be so this needs to be extracted upfront.
# Each message will be destinated for the same service so we can extract the target service's name
# from the first message in list, assuming it is in a list at all.
zato_ctx:'any_' = msg[0].zato_ctx if is_list else msg.zato_ctx
#
# Case 1) is where we can find the service name immediately.
#
target_service_name = zato_ctx != '{}' and zato_ctx.get('target_service_name')
#
# Case 2) is where we need to look up the service's name based on a given endpoint that points to the service.
#
if not target_service_name:
endpoint = self.pubsub.get_endpoint_by_id(sub.endpoint_id)
target_service_name = self.server.service_store.get_service_name_by_id(endpoint.service_id)
# Invoke the target service, giving it on input everything that we have,
# regardless of whether it is a list or not.
self.invoke(target_service_name, msg)
# ################################################################################################################################
# We need to register it here because it refers to DeliverMessage's methods
deliver_func:'strcalldict' = {
PUBSUB.ENDPOINT_TYPE.REST.id: DeliverMessage._deliver_rest,
PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id: DeliverMessage._deliver_wsx,
PUBSUB.ENDPOINT_TYPE.SERVICE.id: DeliverMessage._deliver_srv,
}
# ################################################################################################################################
class GetServerPIDForSubKey(AdminService):
""" Returns PID of a server process for input sub_key.
"""
class SimpleIO(AdminSIO):
input_required = ('sub_key',)
output_optional:'any_' = (Int('server_pid'),)
# ################################################################################################################################
def _raise_bad_request(self, sub_key:'str') -> 'None':
raise BadRequest(self.cid, 'No such sub_key found `{}`'.format(sub_key))
# ################################################################################################################################
def handle(self) -> 'None':
sub_key = self.request.input.sub_key
try:
server = self.pubsub.get_delivery_server_by_sub_key(sub_key, needs_lock=False)
except KeyError:
self._raise_bad_request(sub_key)
else:
if server:
self.response.payload.server_pid = server.server_pid
else:
self._raise_bad_request(sub_key)
# ################################################################################################################################
| 11,248
|
Python
|
.py
| 194
| 49.536082
| 130
| 0.535201
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,369
|
sync.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/task/sync.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from datetime import datetime
from operator import itemgetter
# Zato
from zato.common.exception import BadRequest
from zato.common.json_internal import dumps
from zato.common.pubsub import all_dict_keys, pubsub_main_data
from zato.server.service import AsIs, Int, List
from zato.server.service.internal import AdminService, AdminSIO, GetListAdminSIO
len_keys = 'subscriptions_by_topic', 'subscriptions_by_sub_key', 'sub_key_servers', \
'pubsub_tool_by_sub_key', 'pubsub_tools'
# ################################################################################################################################
class _TaskSyncGetServerListSIO(AdminSIO):
output_optional = pubsub_main_data
skip_empty_keys = True
# ################################################################################################################################
class GetServerList(AdminService):
""" Per-server implementation of TaskMainGetList.
"""
SimpleIO = _TaskSyncGetServerListSIO
def handle(self):
out = {
'cluster_id': self.server.cluster_id,
'server_name': self.server.name,
'server_pid': self.server.pid,
'server_api_address': '{}:{}'.format(self.server.preferred_address, self.server.port),
'msg_pub_counter': self.pubsub.msg_pub_counter,
'has_meta_endpoint': self.pubsub.has_meta_endpoint,
'endpoint_meta_store_frequency': self.pubsub.endpoint_meta_store_frequency,
'endpoint_meta_data_len': self.pubsub.endpoint_meta_data_len,
'endpoint_meta_max_history': self.pubsub.endpoint_meta_max_history,
'data_prefix_len': self.pubsub.data_prefix_len,
'data_prefix_short_len': self.pubsub.data_prefix_short_len,
'endpoints': self.pubsub.endpoint_api.endpoints,
'sec_id_to_endpoint_id': self.pubsub.endpoint_api.sec_id_to_endpoint_id,
'ws_channel_id_to_endpoint_id': self.pubsub.endpoint_api.ws_channel_id_to_endpoint_id,
'service_id_to_endpoint_id': self.pubsub.endpoint_api.service_id_to_endpoint_id,
'topics': self.pubsub.topic_api.topics,
}
for key in len_keys:
attr = getattr(self.pubsub, key)
out[key] = len(attr)
self.response.payload = out
# ################################################################################################################################
class GetDict(AdminService):
""" Returns a list of dictionaries keyed by attributes of PubSub, i.e. the input dictionary's name
must be an attribute of PubSub.
"""
class SimpleIO(GetListAdminSIO):
input_required = 'dict_name',
output_repeated = True
skip_empty_keys = True
_keys_allowed = 'subscriptions_by_topic', 'subscriptions_by_sub_key', 'sub_key_servers', 'endpoints', 'topics', \
'sec_id_to_endpoint_id', 'ws_channel_id_to_endpoint_id', 'service_id_to_endpoint_id', 'topic_name_to_id', \
'pubsub_tool_by_sub_key', 'pubsub_tools', 'endpoint_msg_counter'
def validate_input(self):
if self.request.input.dict_name not in self._keys_allowed:
raise BadRequest(self.cid, 'Invalid value `{}`'.format(self.request.input.dict_name))
def handle(self):
attr = getattr(self.pubsub, self.request.input.dict_name) # type: dict
self._handle_attr_call(attr)
def _handle_attr_call(self, attr):
raise NotImplementedError()
# ################################################################################################################################
class GetDictKeys(GetDict):
""" Returns keys from the input PubSub dictionary.
"""
class SimpleIO(GetDict.SimpleIO):
output_optional = 'key', Int('key_len'), List('id_list'), 'is_list'
def _handle_attr_call(self, attr):
out = []
for key, values in attr.items():
if isinstance(values, list):
is_list = True
else:
is_list = False
values = values or []
values = values if isinstance(values, list) else [values]
out.append({
'key': key,
'key_len': len(values),
'id_list': sorted([elem.get_id() for elem in values]),
'is_list': is_list,
})
self.response.payload[:] = sorted(out, key=itemgetter('key'))
# ################################################################################################################################
class GetDictValues(GetDict):
""" Returns values from the input PubSub dictionary.
"""
class SimpleIO(GetDict.SimpleIO):
input_required = GetDict.SimpleIO.input_required + ('key', List('sort_by'))
output_optional = all_dict_keys
def _handle_attr_call(self, attr):
key = self.request.input.key
try:
# This may be potentially an integer key that we received as string
# so we need to try to convert it to one.
try:
key = int(key)
except ValueError:
pass # That is fine, it was not an integer
values = attr[key]
except KeyError:
raise KeyError('No such key `{}` ({}) among `{}`'.format(key, type(key), sorted(attr.keys())))
values = values if isinstance(values, list) else [values]
out = [elem.to_dict() for elem in values]
out.sort(key=itemgetter(*self.request.input.sort_by), reverse=True)
for item in out:
for key, value in item.items():
if isinstance(value, datetime):
item[key] = value.isoformat()
self.response.payload = dumps(out)
# ################################################################################################################################
class GetList(AdminService):
""" Returns basic information about all the main PubSub objects of each server from the input cluster.
"""
class SimpleIO(_TaskSyncGetServerListSIO, GetListAdminSIO):
input_required = 'cluster_id'
output_repeated = True
def get_data(self):
reply = self.server.rpc.invoke_all(GetServerList.get_name(), timeout=10)
return reply.data
def handle(self):
self.response.payload[:] = self.get_data()
# ################################################################################################################################
class _GetEventList(AdminService):
""" Returns a list of events for a particular topic.
"""
class SimpleIO(GetListAdminSIO):
input_required = 'cluster_id', 'server_name', 'server_pid'
input_optional = GetListAdminSIO.input_optional + ('topic_name',)
output_required = AsIs('log_id'), AsIs('event_id'), 'name', 'timestamp'
output_optional = 'ctx'
output_repeated = True
response_elem = None
# ################################################################################################################################
class GetServerEventList(_GetEventList):
""" Returns a list of events for a particular topic. Must be invoked on the same server the data is to be returned from.
"""
def handle(self):
# We always return PubSub's events ..
event_list = self.pubsub.get_event_list()
# .. and if requested, topic events are also included.
if self.request.input.topic_name:
topic_event_list = self.pubsub.get_topic_event_list(self.request.input.topic_name)
event_list.extend(topic_event_list)
# Sort the events if there are any to be returned
if event_list:
event_list.sort(key=itemgetter('timestamp', 'log_id', 'event_id'), reverse=True)
self.response.payload[:] = event_list
# ################################################################################################################################
class GetEventList(_GetEventList):
""" Returns a list of events for a particular topic. Must be invoked on the same server the data is to be returned from.
"""
def handle(self):
invoker = self.server.rpc.get_invoker_by_server_name(self.request.input.server_name)
response = invoker.invoke(GetServerEventList.get_name(), self.request.input, pid=self.request.input.server_pid)
self.response.payload[:] = response
# ################################################################################################################################
| 8,816
|
Python
|
.py
| 165
| 45.327273
| 130
| 0.550389
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,370
|
delivery_server.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/task/delivery_server.py
|
# -*- coding: utf-8 -*-
# This file will be removed in a future release of Zato.
| 82
|
Python
|
.py
| 2
| 39.5
| 56
| 0.670886
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,371
|
main.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/task/main.py
|
# -*- coding: utf-8 -*-
# This file will be removed in a future release of Zato.
| 82
|
Python
|
.py
| 2
| 39.5
| 56
| 0.670886
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,372
|
message.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/task/delivery/message.py
|
'''
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from operator import itemgetter
# Bunch
from bunch import Bunch
# Zato
from zato.common.util.time_ import datetime_from_ms
from zato.server.service import AsIs, Int
from zato.server.service.internal import AdminService, GetListAdminSIO
from zato.server.service.internal.pubsub.task.delivery import GetTaskSIO
# ################################################################################################################################
# Type checking
if 0:
from zato.server.pubsub.delivery.task import DeliveryTask
from zato.server.pubsub.delivery.tool import PubSubTool
DeliveryTask = DeliveryTask
Message = Message
PubSubTool = PubSubTool
# ################################################################################################################################
# ################################################################################################################################
class _GetMessageSIO:
output_required = (AsIs('msg_id'), 'published_by_id', Int('delivery_count'), 'recv_time')
output_optional = (AsIs('ext_client_id'), 'data_prefix_short', 'published_by_name')
response_elem = None
# ################################################################################################################################
# ################################################################################################################################
class GetServerDeliveryTaskMessageList(AdminService):
""" Returns all in-flight messages tasks from a particular delivery task, which must exist on current server.
"""
class SimpleIO(_GetMessageSIO):
input_required = (AsIs('python_id'),)
input_optional = (AsIs('msg_id'), 'needs_details')
output_repeated = True
def get_data(self):
out = []
msg_id = self.request.input.msg_id
needs_details = self.request.input.needs_details
# Get all pubsub tools ..
for ps_tool in self.pubsub.pubsub_tools: # type: PubSubTool
# Make sure nothing modifies any tool in the meantime
with ps_tool.lock:
# Get all tasks from current server ..
for sub_key, task in ps_tool.delivery_tasks.items(): # type: (str, DeliveryTask)
# Find the one task required on input ..
if task.python_id == self.request.input.python_id:
for msg in task.delivery_list: # type: Message
# If only a single message is to be returned, check it here ..
if msg_id and msg_id != msg.msg_id:
continue
# A message to be produced
item = Bunch()
item.recv_time = datetime_from_ms(msg.recv_time * 1000)
item.msg_id = msg.pub_msg_id
item.published_by_id = msg.published_by_id
item.published_by_name = self.pubsub.get_endpoint_by_id(msg.published_by_id).name
item.ext_client_id = msg.ext_client_id
item.data_prefix_short = msg.data[:self.pubsub.data_prefix_short_len]
item.delivery_count = msg.delivery_count
# If details are needed, add them too ..
if needs_details:
pass
out.append(item)
return out
def handle(self):
self.response.payload[:] = self.get_data()
# ################################################################################################################################
# ################################################################################################################################
class GetDeliveryTaskMessageList(AdminService):
""" Returns all in-flight messages tasks from a particular delivery task.
"""
name = 'pubsub.task.message.get-list2'
class SimpleIO(GetListAdminSIO, _GetMessageSIO):
input_optional = GetListAdminSIO.input_optional + (AsIs('python_id'),)
input_required = 'cluster_id', 'server_name', 'server_pid'
def handle(self):
invoker = self.server.rpc.get_invoker_by_server_name(self.request.input.server_name)
response = invoker.invoke(GetServerDeliveryTaskMessageList.get_name(), {
'cluster_id': self.request.input.cluster_id,
'python_id': self.request.input.python_id,
}, pid=self.request.input.server_pid)
self.response.payload[:] = response
# ################################################################################################################################
# ################################################################################################################################
'''
| 5,099
|
Python
|
.py
| 89
| 46.955056
| 130
| 0.469171
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,373
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/task/delivery/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from operator import itemgetter
# Zato
from zato.common.util.time_ import datetime_from_ms
from zato.server.service import AsIs, Int
from zato.server.service.internal import AdminService, GetListAdminSIO
# ################################################################################################################################
if 0:
from zato.common.typing_ import anylist, anytuple
from zato.server.pubsub.delivery.task import DeliveryTask
from zato.server.pubsub.delivery.tool import PubSubTool
DeliveryTask = DeliveryTask
PubSubTool = PubSubTool
# ################################################################################################################################
class GetTaskSIO:
output_required = ('server_name', 'server_pid', 'sub_key', 'topic_id', 'topic_name', 'is_active',
'endpoint_id', 'endpoint_name', 'py_object', AsIs('python_id'), Int('len_messages'), Int('len_history'), Int('len_batches'),
Int('len_delivered')) # type: anytuple
output_optional = 'last_sync', 'last_sync_sk', 'last_iter_run', AsIs('ext_client_id') # type: anytuple
output_elem = None
response_elem = None
# ################################################################################################################################
# ################################################################################################################################
class GetServerDeliveryTaskList(AdminService):
""" Returns all delivery tasks for a particular server process (must be invoked on the required one).
"""
class SimpleIO(GetTaskSIO):
output_repeated = True
def get_data(self) -> 'anylist':
out = [] # type: anylist
for ps_tool in self.pubsub.pubsub_tools: # type: PubSubTool
with ps_tool.lock:
for _ignored_sub_key, task in ps_tool.delivery_tasks.items(): # type: (str, DeliveryTask)
last_sync = task.last_iter_run # ps_tool.last_gd_run
if last_sync:
last_sync = datetime_from_ms(last_sync * 1000)
if sub := self.pubsub.get_subscription_by_sub_key(task.sub_key):
endpoint_id = sub.endpoint_id
endpoint = self.pubsub.get_endpoint_by_id(endpoint_id)
out.append({
'server_name': ps_tool.server_name,
'server_pid': ps_tool.server_pid,
'endpoint_id': endpoint.id,
'endpoint_name': endpoint.name,
'py_object': task.py_object,
'python_id': task.python_id,
'sub_key': task.sub_key,
'topic_id': self.pubsub.get_topic_id_by_name(task.topic_name),
'topic_name': task.topic_name,
'is_active': task.keep_running,
'len_messages': len(task.delivery_list),
'len_history': len(task.delivery_list),
'last_sync': last_sync,
'last_iter_run': datetime_from_ms(task.last_iter_run * 1000),
'len_batches': task.len_batches,
'len_delivered': task.len_delivered,
})
# Return the list of tasks sorted by sub_keys and their Python names
return sorted(out, key=itemgetter('sub_key', 'py_object'))
def handle(self):
self.response.payload[:] = self.get_data()
# ################################################################################################################################
# ################################################################################################################################
class GetDeliveryTaskList(AdminService):
""" Returns all delivery tasks for a particular server process (possibly a remote one).
"""
class SimpleIO(GetListAdminSIO, GetTaskSIO):
input_required = 'cluster_id', 'server_name', 'server_pid'
def handle(self):
invoker = self.server.rpc.get_invoker_by_server_name(self.request.input.server_name)
self.response.payload[:] = invoker.invoke(GetServerDeliveryTaskList.get_name(), {
'cluster_id': self.request.input.cluster_id,
}, pid=self.request.input.server_pid)
# ################################################################################################################################
# ################################################################################################################################
class GetDeliveryTask(AdminService):
""" Returns a particular delivery task by its Python object's ID.
"""
class SimpleIO(GetTaskSIO):
input_required = 'server_name', 'server_pid', AsIs('python_id')
def handle(self):
request = {
'cluster_id': self.server.cluster_id,
'server_name': self.request.input.server_name,
'server_pid': self.request.input.server_pid,
}
invoker = self.server.rpc.get_invoker_by_server_name(self.request.input.server_name)
response = invoker.invoke(GetDeliveryTaskList.get_name(), request)
for item in response:
if item['python_id'] == self.request.input.python_id:
self.response.payload = item
return
# ################################################################################################################################
# ################################################################################################################################
| 5,935
|
Python
|
.py
| 99
| 48.757576
| 132
| 0.463406
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,374
|
server.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/pubsub/task/delivery/server.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
# Bunch
from bunch import bunchify
# Zato
from zato.common.odb.model import PubSubSubscription, Server, WebSocketClient, WebSocketClientPubSubKeys, WebSocketSubscription
from zato.common.util.time_ import datetime_from_ms
from zato.server.service import Int
from zato.server.service.internal import AdminService, GetListAdminSIO
# ################################################################################################################################
# ################################################################################################################################
if 0:
from sqlalchemy.orm.session import Session
from zato.common.typing_ import any_, anylist, anyset, anytuple
# ################################################################################################################################
# ################################################################################################################################
_summary_delivery_server_sio = ('tasks', 'tasks_running', 'tasks_stopped', 'sub_keys', 'topics',
'messages', 'messages_gd', 'messages_non_gd', Int('msg_handler_counter'), 'last_gd_run', 'last_task_run') # type: anytuple
# ################################################################################################################################
# ################################################################################################################################
def delivery_server_list(session:'Session', cluster_id:'int') -> 'anytuple':
""" Returns a list of all servers (without PIDs) that are known to be delivery ones.
"""
# WSX subscriptions first
q_wsx = session.query(
Server.id,
Server.name,
PubSubSubscription.sub_key
).\
filter(PubSubSubscription.sub_key==WebSocketClientPubSubKeys.sub_key).\
filter(WebSocketSubscription.sub_key==WebSocketClientPubSubKeys.sub_key).\
filter(WebSocketClientPubSubKeys.client_id==WebSocketClient.id).\
filter(WebSocketClient.server_id==Server.id).\
filter(Server.cluster_id==cluster_id) # type: ignore
# Non-WSX subscriptions now
q_non_wsx = session.query(
Server.id,
Server.name,
PubSubSubscription.sub_key
)
q_non_wsx = q_non_wsx.filter(Server.id==PubSubSubscription.server_id) # type: ignore
q_non_wsx = q_non_wsx.filter(Server.cluster_id==cluster_id) # type: ignore
# Return a union of WSX and non-WSX related subscription servers
return q_wsx.union(q_non_wsx).\
all() # type: ignore
# ################################################################################################################################
class GetDetails(AdminService):
""" Returns a summary of current activity for all delivery tasks on current PID (non-WSX clients only).
"""
class SimpleIO:
output_optional = _summary_delivery_server_sio
response_elem = None
def handle(self):
total_tasks = 0
tasks_running = 0
tasks_stopped = 0
total_messages = 0
messages_gd = 0
messages_non_gd = 0
total_sub_keys = 0
topics_seen = set() # type: anyset
max_last_gd_run = 0
max_last_task_run = 0
for item in self.pubsub.pubsub_tools:
total_tasks += len(item.delivery_tasks)
total_sub_keys += len(item.sub_keys)
item_last_gd_run = item.last_gd_run
item_last_gd_run_values = item_last_gd_run.values() if item_last_gd_run else [] # type: any_
max_item_last_gd_run = max(item_last_gd_run_values) if item_last_gd_run_values else 0 # type: int
max_last_gd_run = max(max_last_gd_run, max_item_last_gd_run)
for task in item.get_delivery_tasks():
max_last_task_run = max(max_last_task_run, task.last_iter_run)
topics_seen.add(task.topic_name)
if task.is_running():
tasks_running += 1
else:
tasks_stopped += 1
gd_depth, non_gd_depth = task.get_queue_depth()
total_messages += gd_depth
total_messages += non_gd_depth
messages_gd += gd_depth
messages_non_gd += non_gd_depth
self.response.payload.tasks = total_tasks
self.response.payload.tasks_running = tasks_running
self.response.payload.tasks_stopped = tasks_stopped
self.response.payload.messages = total_messages
self.response.payload.messages_gd = messages_gd
self.response.payload.messages_non_gd = messages_non_gd
self.response.payload.topics = len(topics_seen)
self.response.payload.sub_keys = total_sub_keys
if max_last_gd_run:
max_last_gd_run = datetime_from_ms(max_last_gd_run * 1000)
if max_last_task_run:
max_last_task_run = datetime_from_ms(max_last_task_run * 1000)
self.response.payload.last_gd_run = max_last_gd_run or ''
self.response.payload.last_task_run = max_last_task_run or ''
# ################################################################################################################################
class GetList(AdminService):
""" Returns all delivery servers defined for cluster.
"""
class SimpleIO(GetListAdminSIO):
input_required = ('cluster_id',)
output_required = ('name', 'pid')
output_optional = _summary_delivery_server_sio
output_repeated = True
output_elem = None
def get_data(self) -> 'anylist':
# Response to produce
out = [] # type: anylist
# All PIDs of all servers
server_pids = {}
with closing(self.odb.session()) as session:
# Iterate over all servers and their sub_keys as they are known in ODB
for _ignored_server_id, server_name, sub_key in delivery_server_list(session, self.request.input.cluster_id):
# All PIDs of current server
pids = server_pids.setdefault(server_name, set()) # type: anyset
# Add a PID found for that server
if sk_server := self.pubsub.get_sub_key_server(sub_key):
pids.add(sk_server.server_pid)
# We can now iterate over the PIDs found and append an output row for each one.
for server_name, pids in server_pids.items():
for pid in pids:
invoker = self.server.rpc.get_invoker_by_server_name(server_name)
pid_response = bunchify(invoker.invoke(GetDetails.get_name(), pid=pid))
# A summary of each PID's current pub/sub activities
pid_data = bunchify({
'name': server_name,
'pid': pid,
'tasks': pid_response.tasks,
'tasks_running': pid_response.tasks_running,
'tasks_stopped': pid_response.tasks_stopped,
'sub_keys': pid_response.sub_keys,
'topics': pid_response.topics,
'messages': pid_response.messages,
'messages_gd': pid_response.messages_gd,
'messages_non_gd': pid_response.messages_non_gd,
'msg_handler_counter': pid_response.get('msg_handler_counter'),
'last_gd_run': pid_response.last_gd_run,
'last_task_run': pid_response.last_task_run,
})
# OK, we can append data about this PID now
out.append(pid_data)
return out
def handle(self):
self.response.payload[:] = self.get_data()
# ################################################################################################################################
| 8,083
|
Python
|
.py
| 151
| 43.576159
| 130
| 0.527605
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,375
|
trends.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/stats/trends.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.server.service.internal import AdminService
class GetTrends(AdminService):
def handle(self):
pass
| 283
|
Python
|
.py
| 10
| 25.7
| 64
| 0.724907
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,376
|
summary.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/stats/summary.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.server.service.internal import AdminService
# ################################################################################################################################
# ################################################################################################################################
class _Abstract:
def handle(self):
pass
class BaseSummarizingService(_Abstract, AdminService):
pass
class CreateSummaryByDay(_Abstract, AdminService):
pass
class CreateSummaryByWeek(_Abstract, AdminService):
pass
class CreateSummaryByMonth(_Abstract, AdminService):
pass
class CreateSummaryByYear(_Abstract, AdminService):
pass
class GetSummaryBase(_Abstract, AdminService):
pass
class GetSummaryByDay(_Abstract, AdminService):
pass
class GetSummaryByWeek(_Abstract, AdminService):
pass
class GetSummaryByMonth(_Abstract, AdminService):
pass
class GetSummaryByYear(_Abstract, AdminService):
pass
class GetSummaryByRange(_Abstract, AdminService):
pass
# ################################################################################################################################
# ################################################################################################################################
| 1,461
|
Python
|
.py
| 36
| 37.555556
| 130
| 0.492898
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,377
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/stats/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.server.service.internal import AdminService
# ################################################################################################################################
# ################################################################################################################################
class _Abstract:
def handle(self):
pass
class Delete(_Abstract, AdminService):
pass
class ProcessRawTimes(_Abstract, AdminService):
pass
class AggregateByMinute(_Abstract, AdminService):
pass
class AggregateByHour(_Abstract, AdminService):
pass
class AggregateByDay(_Abstract, AdminService):
pass
class AggregateByMonth(_Abstract, AdminService):
pass
class StatsReturningService(_Abstract, AdminService):
pass
class GetByService(_Abstract, AdminService):
pass
# ################################################################################################################################
# ################################################################################################################################
| 1,255
|
Python
|
.py
| 30
| 38.9
| 130
| 0.437655
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,378
|
es.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/search/es.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.broker_message import SEARCH
from zato.common.odb.model import ElasticSearch
from zato.common.odb.query import search_es_list
from zato.server.service.internal import AdminService
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta
elem = 'search_es'
model = ElasticSearch
label = 'an ElasticSearch connection'
get_list_docs = 'ElasticSearch connections'
broker_message = SEARCH
broker_message_prefix = 'ES_'
list_func = search_es_list
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = ElasticSearch.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
| 1,739
|
Python
|
.py
| 38
| 43.947368
| 130
| 0.492289
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,379
|
solr.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/search/solr.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Python 2/3 compatibility
from six import add_metaclass
# Zato
from zato.common.broker_message import SEARCH
from zato.common.odb.model import Solr
from zato.common.odb.query import search_solr_list
from zato.common.util.api import ping_solr
from zato.server.service.internal import AdminService
from zato.server.service.meta import CreateEditMeta, DeleteMeta, GetListMeta, PingMeta
# ################################################################################################################################
elem = 'search_solr'
model = Solr
label = 'a Solr connection'
get_list_docs = 'Solr connections'
broker_message = SEARCH
broker_message_prefix = 'SOLR_'
list_func = search_solr_list
# ################################################################################################################################
@add_metaclass(GetListMeta)
class GetList(AdminService):
_filter_by = Solr.name,
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Create(AdminService):
pass
# ################################################################################################################################
@add_metaclass(CreateEditMeta)
class Edit(AdminService):
pass
# ################################################################################################################################
@add_metaclass(DeleteMeta)
class Delete(AdminService):
pass
# ################################################################################################################################
class Ping(AdminService):
""" Pings a Solr connection to check if it is alive.
"""
__metaclass__ = PingMeta
def ping(self, instance):
ping_solr(instance)
# ################################################################################################################################
| 2,198
|
Python
|
.py
| 47
| 44.510638
| 130
| 0.443715
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,380
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/search/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
| 238
|
Python
|
.py
| 6
| 38.166667
| 82
| 0.729258
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,381
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/groups/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from json import dumps
from operator import itemgetter
# Zato
from zato.common.api import CONNECTION, Groups
from zato.common.broker_message import Groups as Broker_Message_Groups
from zato.common.odb.model import GenericObject as ModelGenericObject
from zato.server.service import AsIs, Service
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, strlist
# ################################################################################################################################
# ################################################################################################################################
ModelGenericObjectTable:'any_' = ModelGenericObject.__table__
# ################################################################################################################################
# ################################################################################################################################
class GetList(Service):
""" Returns all groups matching the input criteria.
"""
input:'any_' = 'group_type', '-needs_members', '-needs_short_members'
def handle(self):
group_type = self.request.input.group_type
needs_members = self.request.input.needs_members
needs_short_members = self.request.input.needs_short_members
group_list = self.server.groups_manager.get_group_list(group_type)
member_count = self.invoke(GetMemberCount, group_type=group_type)
for item in group_list:
group_id = item['id']
group_member_count = member_count[group_id]
item['member_count'] = group_member_count
if needs_members:
members = self.invoke(GetMemberList, group_type=group_type, group_id=group_id)
if needs_short_members:
new_members = []
for member in members:
new_members.append({
'name': member['name'],
})
members = new_members
members.sort(key=itemgetter('name')) # type: ignore
if members:
item['members'] = members
if (group_member_count == 0) or (group_member_count > 1):
suffix = 's'
else:
suffix = ''
item['description'] = f'{group_member_count} member{suffix}'
self.response.payload = group_list
# ################################################################################################################################
# ################################################################################################################################
class Create(Service):
""" Creates a new group.
"""
input:'any_' = 'group_type', 'name', AsIs('-members')
output:'any_' = 'id', 'name'
def handle(self):
# Local variables
input = self.request.input
id = self.server.groups_manager.create_group(input.group_type, input.name)
self.invoke(
EditMemberList,
group_type=input.group_type,
group_action=Groups.Membership_Action.Add,
group_id=id,
members=input.members,
)
self.response.payload.id = id
self.response.payload.name = input.name
# ################################################################################################################################
# ################################################################################################################################
class Edit(Service):
""" Updates an existing group.
"""
input:'any_' = 'id', 'group_type', 'name', AsIs('-members')
output:'any_' = 'id', 'name'
def handle(self):
# Local variables
input = self.request.input
# All the new members of this group
to_add:'strlist' = []
# All the members that have to be removed from the group
to_remove:'strlist' = []
self.server.groups_manager.edit_group(input.id, input.group_type, input.name)
if input.members:
group_members = self.server.groups_manager.get_member_list(input.group_type, input.id)
input_member_names = {item['name'] for item in input.members}
group_member_names = {item['name'] for item in group_members}
for group_member_name in group_member_names:
if not group_member_name in input_member_names:
to_remove.append(group_member_name) # type: ignore
for input_member_name in input_member_names:
if not input_member_name in group_member_names:
to_add.append(input_member_name)
#
# Add all the new members to the group
#
if to_add:
_ = self.invoke(
EditMemberList,
group_action=Groups.Membership_Action.Add,
group_id=input.id,
members=to_add,
)
#
# Remove all the members that should not belong to the group
#
if to_remove:
_ = self.invoke(
EditMemberList,
group_action=Groups.Membership_Action.Remove,
group_id=input.id,
members=to_remove,
)
self.response.payload.id = self.request.input.id
self.response.payload.name = self.request.input.name
# .. enrich the message that is to be published ..
input.to_add = to_add
input.to_remove = to_remove
# .. now, let all the threads know about the update.
input.action = Broker_Message_Groups.Edit.value
self.broker_client.publish(input)
# ################################################################################################################################
# ################################################################################################################################
class Delete(Service):
""" Deletes an existing group.
"""
input:'any_' = 'id'
def handle(self):
# Local variables
input = self.request.input
group_id = int(input.id)
# Delete this group from the database ..
self.server.groups_manager.delete_group(group_id)
# .. make sure the database configuration of channels using it is also updated ..
to_update = []
data = self.invoke('zato.http-soap.get-list', connection=CONNECTION.CHANNEL, paginate=False, skip_response_elem=True)
for item in data:
if security_groups := item.get('security_groups'):
if group_id in security_groups:
security_groups.remove(group_id)
item['security_groups'] = security_groups
to_update.append(item)
for item in to_update: # type: ignore
_= self.invoke('zato.http-soap.edit', item)
# .. now, let all the threads know about the update.
input.action = Broker_Message_Groups.Delete.value
self.broker_client.publish(input)
# ################################################################################################################################
# ################################################################################################################################
class GetMemberList(Service):
""" Returns current members of a group.
"""
input:'any_' = 'group_type', 'group_id', '-should_serialize'
def handle(self):
# Local variables
input = self.request.input
member_list = self.server.groups_manager.get_member_list(input.group_type, input.group_id)
member_list = [elem.to_dict() for elem in member_list]
if input.should_serialize:
member_list = dumps(member_list)
self.response.payload = member_list
# ################################################################################################################################
# ################################################################################################################################
class GetMemberCount(Service):
""" Returns information about how many members are in each group.
"""
input:'any_' = 'group_type', '-should_serialize'
def handle(self):
# Local variables
input = self.request.input
member_count = self.server.groups_manager.get_member_count(input.group_type)
if input.should_serialize:
member_count = dumps(member_count)
self.response.payload = member_count
# ################################################################################################################################
# ################################################################################################################################
class EditMemberList(Service):
""" Adds members to or removes them from a group.
"""
input:'any_' = 'group_action', 'group_id', AsIs('-member_id_list'), AsIs('-members')
def _get_member_id_list_from_name_list(self, member_name_list:'any_') -> 'strlist':
# Our response to produce
out:'strlist' = []
# Make sure this is actually a list
member_name_list = member_name_list if isinstance(member_name_list, list) else [member_name_list] # type: ignore
# Get a list of all the security definitions possible, out of which we will be building our IDs.
security_list = self.invoke('zato.security.get-list', skip_response_elem=True)
for item in member_name_list:
if isinstance(item, dict):
item_name:'str' = item['name']
else:
item_name = item
for security in security_list:
if item_name == security['name']:
sec_type = security['sec_type']
sec_def_id = security['id']
member_name = f'{sec_type}-{sec_def_id}'
out.append(member_name)
return out
# ################################################################################################################################
def handle(self):
# Local variables
input = self.request.input
# We need to have member IDs in further steps so if we have names, they have to be turned into IDs here.
if not (member_id_list := input.get('member_id_list')):
member_id_list = self._get_member_id_list_from_name_list(input.members)
if not member_id_list:
return
if input.group_action == Groups.Membership_Action.Add:
func = self.server.groups_manager.add_members_to_group
else:
func = self.server.groups_manager.remove_members_from_group
func(input.group_id, member_id_list)
# .. now, let all the threads know about the update.
input.action = Broker_Message_Groups.Edit_Member_List.value
self.broker_client.publish(input)
# ################################################################################################################################
# ################################################################################################################################
| 11,761
|
Python
|
.py
| 224
| 43.040179
| 130
| 0.462512
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,382
|
ctx.py
|
zatosource_zato/code/zato-server/src/zato/server/service/internal/groups/ctx.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.server.groups.ctx import SecurityGroupsCtxBuilder
from zato.server.service import Service
# ################################################################################################################################
# ################################################################################################################################
class BuildCtx(Service):
name = 'dev.groups.build-ctx'
def handle(self):
channel_id = 85
security_groups = [1, 3]
builder = SecurityGroupsCtxBuilder(self.server)
_ = builder.build_ctx(channel_id, security_groups)
# ################################################################################################################################
| 914
|
Python
|
.py
| 18
| 47.055556
| 130
| 0.396843
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,383
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/service/reqresp/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
from copy import deepcopy
# Bunch
from bunch import Bunch, bunchify
# lxml
from lxml.etree import _Element as EtreeElement
from lxml.objectify import ObjectifiedElement
# Zato
from zato.common.api import simple_types
from zato.common.marshal_.api import Model
from zato.common.json_internal import loads
from zato.common.typing_ import cast_
from zato.common.util.api import make_repr
from zato.common.util.http_ import get_form_data as util_get_form_data
# Zato - Cython
from zato.simpleio import ServiceInput
# ################################################################################################################################
# ################################################################################################################################
if 0:
# stdlib
from logging import Logger
# Arrow
from arrow import Arrow
# hl7apy
from hl7apy.core import Message as hl7apy_Message
# Kombu
from kombu.message import Message as KombuAMQPMessage
# Zato
from zato.common.kvdb.api import KVDB as KVDBAPI
from zato.common.odb.api import PoolStore
from zato.common.typing_ import any_, callable_, stranydict, strnone
from zato.hl7.mllp.server import ConnCtx as HL7ConnCtx
from zato.server.config import ConfigDict, ConfigStore
from zato.server.connection.email import EMailAPI
from zato.server.connection.ftp import FTPStore
from zato.server.connection.jms_wmq.outgoing import WMQFacade
from zato.server.connection.search import SearchAPI
from zato.server.connection.sms import SMSAPI
from zato.server.connection.vault import VaultConnAPI
from zato.server.connection.zmq_.outgoing import ZMQFacade
from zato.server.service import AMQPFacade, Service
# Zato - Cython
from zato.simpleio import CySimpleIO
AMQPFacade = AMQPFacade
Arrow = Arrow
ConfigDict = ConfigDict
ConfigStore = ConfigStore
CySimpleIO = CySimpleIO
EMailAPI = EMailAPI
FTPStore = FTPStore
hl7apy_Message = hl7apy_Message
HL7ConnCtx = HL7ConnCtx
KombuAMQPMessage = KombuAMQPMessage
KVDBAPI = KVDBAPI
Logger = Logger
PoolStore = PoolStore
SearchAPI = SearchAPI
Service = Service
SMSAPI = SMSAPI
VaultConnAPI = VaultConnAPI
WMQFacade = WMQFacade
ZMQFacade = ZMQFacade
# ################################################################################################################################
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
NOT_GIVEN = 'ZATO_NOT_GIVEN'
# ################################################################################################################################
# ################################################################################################################################
direct_payload = simple_types + (EtreeElement, ObjectifiedElement)
# ################################################################################################################################
# ################################################################################################################################
class HTTPRequestData:
""" Data regarding an HTTP request.
"""
__slots__ = 'method', 'GET', 'POST', 'path', 'params', 'user_agent', '_wsgi_environ'
def __init__(self, _Bunch=Bunch):
self.method = None # type: str
self.GET = _Bunch()
self.POST = _Bunch()
self.path = None # type: str
self.params = _Bunch()
self.user_agent = ''
self._wsgi_environ = None # type: dict
def init(self, wsgi_environ=None):
self._wsgi_environ = wsgi_environ or {}
self.method = wsgi_environ.get('REQUEST_METHOD') # type: str
self.GET.update(wsgi_environ.get('zato.http.GET', {})) # type: dict
self.POST.update(wsgi_environ.get('zato.http.POST', {}))
self.path = wsgi_environ.get('PATH_INFO') # type: str
self.params.update(wsgi_environ.get('zato.http.path_params', {}))
self.user_agent = wsgi_environ.get('HTTP_USER_AGENT')
def get_form_data(self) -> 'stranydict':
return util_get_form_data(self._wsgi_environ)
def __repr__(self):
return make_repr(self)
# ################################################################################################################################
class AMQPRequestData:
""" Data regarding an AMQP request.
"""
__slots__ = ('msg', 'ack', 'reject')
def __init__(self, msg):
# type: (KombuAMQPMessage)
self.msg = msg
self.ack = msg.ack
self.reject = msg.reject
# ################################################################################################################################
class IBMMQRequestData:
""" Metadata for IBM MQ requests.
"""
__slots__ = ('ctx', 'data', 'msg_id', 'correlation_id', 'timestamp', 'put_date', 'put_time', 'reply_to', 'mqmd')
def __init__(self, ctx):
# type: dict
self.ctx = ctx
self.data = ctx['data'] # type: str
self.msg_id = ctx['msg_id'] # type: str
self.correlation_id = ctx['correlation_id'] # type: str
self.timestamp = ctx['timestamp'] # type: Arrow
self.put_date = ctx['put_date'] # type: str
self.put_time = ctx['put_time'] # type: str
self.reply_to = ctx['reply_to'] # type: str
self.mqmd = ctx['mqmd'] # type: object
# Backward compatibility
WebSphereMQRequestData = IBMMQRequestData
# ################################################################################################################################
class HL7RequestData:
""" Details of an individual HL7 request.
"""
__slots__ = 'connection', 'data',
def __init__(self, connection, data):
# type: (HL7ConnCtx, hl7apy_Message) -> None
self.connection = connection
self.data = data
# ################################################################################################################################
class Request:
""" Wraps a service request and adds some useful meta-data.
"""
text: 'any_'
__slots__ = ('service', 'logger', 'payload', 'text', 'input', 'cid', 'data_format', 'transport',
'encrypt_func', 'encrypt_secrets', 'bytes_to_str_encoding', '_wsgi_environ', 'channel_params',
'merge_channel_params', 'http', 'amqp', 'wmq', 'ibm_mq', 'hl7', 'enforce_string_encoding')
def __init__(
self,
service, # type: Service
simple_io_config=None, # type: any_
data_format=None, # type: strnone
transport=None # type: strnone
) -> 'None':
self.service = service
self.logger = cast_('Logger', service.logger)
self.payload = ''
self.text = ''
self.input = None # type: any_
self.cid = cast_('str', None)
self.data_format = cast_('str', data_format)
self.transport = cast_('str', transport)
self.http = HTTPRequestData()
self._wsgi_environ = cast_('stranydict', None)
self.channel_params = cast_('stranydict', {})
self.merge_channel_params = True
self.amqp = cast_('AMQPRequestData', None)
self.wmq = self.ibm_mq = cast_('IBMMQRequestData', None)
self.hl7 = cast_('HL7RequestData', None)
self.encrypt_func = None
self.encrypt_secrets = True
self.bytes_to_str_encoding = cast_('str', None)
# ################################################################################################################################
def init(
self,
is_sio, # type: bool
cid, # type: str
sio, # type: CySimpleIO
data_format, # type: str
transport, # type: str
wsgi_environ, # type: stranydict
encrypt_func # type: callable_
) -> 'None':
""" Initializes the object with an invocation-specific data.
"""
self.input = ServiceInput()
self.encrypt_func = encrypt_func
if is_sio:
parsed = sio.parse_input(self.payload or {}, data_format, extra=self.channel_params, service=self.service)
if isinstance(parsed, Model):
self.input = parsed
else:
if isinstance(parsed, dict):
self.input.update(parsed)
for param, value in self.channel_params.items():
if param not in self.input:
self.input[param] = value
# We merge channel params in if requested even if it's not SIO
else:
if self.merge_channel_params:
self.input.update(self.channel_params)
# ################################################################################################################################
@property
def raw_request(self) -> 'any_':
return self.text
# ################################################################################################################################
@raw_request.setter
def raw_request(self, value:'any_') -> 'any_':
self.text = value
# ################################################################################################################################
def deepcopy(self):
""" Returns a deep copy of self.
"""
request = Request(None)
request.logger = logging.getLogger(self.logger.name)
for name in Request.__slots__:
if name == 'logger':
continue
setattr(request, name, deepcopy(getattr(self, name)))
return request
# ################################################################################################################################
def to_bunch(self):
""" Returns a bunchified (converted into bunch.Bunch) version of self.raw_request,
deep copied if it's a dict (or a subclass). Note that it makes sense to use this method
only with dicts or JSON input.
"""
# We have a dict
if isinstance(self.raw_request, dict):
return bunchify(deepcopy(self.raw_request))
# Must be a JSON input, raises exception when attempting to load it if it's not
return bunchify(loads(self.raw_request))
# Backward-compatibility
bunchified = to_bunch
# ################################################################################################################################
# ################################################################################################################################
class Outgoing:
""" A container for various outgoing connections a service can access. This in fact is a thin wrapper around data
fetched from the service's self.worker_store.
"""
__slots__ = ('amqp', 'ftp', 'ibm_mq', 'jms_wmq', 'wmq', 'odoo', 'plain_http', 'rest', 'soap', 'sql', 'zmq', 'wsx', 'vault',
'sms', 'sap', 'sftp', 'ldap', 'mongodb', 'def_kafka', 'hl7', 'redis')
def __init__(self, amqp=None, ftp=None, jms_wmq=None, odoo=None, plain_http=None, soap=None, sql=None, zmq=None,
wsx=None, vault=None, sms=None, sap=None, sftp=None, ldap=None, mongodb=None, def_kafka=None,
redis=None, hl7=None):
self.amqp = cast_('AMQPFacade', amqp)
self.ftp = cast_('FTPStore', ftp)
# Backward compat with 2.0, self.ibm_mq is now preferred
self.ibm_mq = cast_('WMQFacade', jms_wmq)
self.wmq = self.ibm_mq
self.jms_wmq = self.ibm_mq
self.odoo = cast_('ConfigDict', odoo)
self.rest = cast_('ConfigDict', plain_http)
self.plain_http = self.rest
self.soap = cast_('ConfigDict', soap)
self.sql = cast_('PoolStore', sql)
self.zmq = cast_('ZMQFacade', zmq)
self.wsx = cast_('stranydict', wsx)
self.vault = cast_('VaultConnAPI', vault)
self.sms = cast_('SMSAPI', sms)
self.sap = cast_('ConfigDict', sap)
self.sftp = cast_('ConfigDict', sftp)
self.ldap = cast_('stranydict', ldap)
self.mongodb = cast_('stranydict', mongodb)
self.def_kafka = cast_('stranydict', None)
self.redis = cast_('KVDBAPI', redis)
self.hl7 = cast_('HL7API', hl7)
# ################################################################################################################################
# ################################################################################################################################
class AWS:
__slots__ = 's3',
s3: 'ConfigDict'
# ################################################################################################################################
# ################################################################################################################################
class Cloud:
""" A container for cloud-related connections a service can establish.
"""
__slots__ = 'aws', 'confluence', 'dropbox', 'jira', 'salesforce', 'ms365'
aws: 'AWS'
confluence: 'stranydict'
dropbox: 'stranydict'
jira: 'stranydict'
salesforce: 'stranydict'
ms365: 'stranydict'
def __init__(self) -> 'None':
self.aws = AWS()
# ################################################################################################################################
# ################################################################################################################################
class Definition:
""" A container for connection definitions a service has access to.
"""
__slots__ = 'kafka',
kafka: 'stranydict'
# ################################################################################################################################
# ################################################################################################################################
class InstantMessaging:
""" A container for Instant Messaging connections, e.g. Slack or Telegram.
"""
__slots__ = 'slack', 'telegram'
slack: 'stranydict'
telegram: 'stranydict'
# ################################################################################################################################
# ################################################################################################################################
class MLLP:
pass
# ################################################################################################################################
# ################################################################################################################################
class HL7API:
""" A container for HL7 connections a service can establish.
"""
__slots__ = 'fhir', 'mllp'
fhir: 'stranydict'
mllp: 'stranydict'
def __init__(self, fhir:'stranydict', mllp:'stranydict') -> None:
self.fhir = fhir
self.mllp = mllp
# ################################################################################################################################
# ################################################################################################################################
| 15,801
|
Python
|
.py
| 318
| 43.323899
| 130
| 0.443188
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,384
|
sync.py
|
zatosource_zato/code/zato-server/src/zato/server/pubsub/sync.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# pylint: disable=unused-import, unused-variable
# stdlib
import logging
from traceback import format_exc
# gevent
from gevent import sleep
from gevent.lock import RLock
# Zato
from zato.common.api import PUBSUB
from zato.common.exception import BadRequest
from zato.common.typing_ import any_, anydict, anylist, anyset, anytuple, callable_, dict_, dictlist, intsetdict, strlist, \
strdictdict, strset, strsetdict
from zato.common.util.api import spawn_greenlet
from zato.common.util.pubsub import make_short_msg_copy_from_dict
from zato.common.util.time_ import utcnow_as_ms
# ################################################################################################################################
if 0:
from zato.server.pubsub import PubSub
from zato.server.pubsub.model import Endpoint
# ################################################################################################################################
logger = logging.getLogger('zato_pubsub.ps')
logger_zato = logging.getLogger('zato')
logger_overflow = logging.getLogger('zato_pubsub_overflow')
# ################################################################################################################################
hook_type_to_method = {
PUBSUB.HOOK_TYPE.BEFORE_PUBLISH: 'before_publish',
PUBSUB.HOOK_TYPE.BEFORE_DELIVERY: 'before_delivery',
PUBSUB.HOOK_TYPE.ON_OUTGOING_SOAP_INVOKE: 'on_outgoing_soap_invoke',
PUBSUB.HOOK_TYPE.ON_SUBSCRIBED: 'on_subscribed',
PUBSUB.HOOK_TYPE.ON_UNSUBSCRIBED: 'on_unsubscribed',
}
# ################################################################################################################################
_default_pri=PUBSUB.PRIORITY.DEFAULT
_pri_min=PUBSUB.PRIORITY.MIN
_pri_max=PUBSUB.PRIORITY.MAX
# ################################################################################################################################
_update_attrs = (
'data', 'size', 'expiration', 'priority', 'pub_correl_id', 'in_reply_to', 'mime_type', 'expiration', 'expiration_time'
)
# ################################################################################################################################
_default_expiration = PUBSUB.DEFAULT.EXPIRATION
default_sk_server_table_columns = 6, 15, 8, 6, 17, 80
# ################################################################################################################################
def get_priority(
cid, # type: str
input, # type: anydict
_pri_min=_pri_min, # type: int
_pri_max=_pri_max, # type: int
_pri_def=_default_pri # type: int
) -> 'int':
""" Get and validate message priority.
"""
priority = input.get('priority')
if priority:
if priority < _pri_min or priority > _pri_max:
raise BadRequest(cid, 'Priority `{}` outside of allowed range {}-{}'.format(priority, _pri_min, _pri_max))
else:
priority = _pri_def
return priority
# ################################################################################################################################
def get_expiration(
cid, # type: str
input, # type: anydict
default_expiration=_default_expiration # type: int
) -> 'int':
""" Get and validate message expiration.
Returns (2 ** 31 - 1) * 1000 milliseconds (around 70 years) if expiration is not set explicitly.
"""
expiration = input.get('expiration')
if expiration is not None and expiration < 0:
raise BadRequest(cid, 'Expiration `{}` must not be negative'.format(expiration))
return expiration or default_expiration
# ################################################################################################################################
class InRAMSync:
""" A backlog of messages kept in RAM for whom there are subscriptions - that is, they are known to have subscribers
and will be ultimately delivered to them. Stores a list of sub_keys and all messages that a sub_key points to.
It acts as a multi-key dict and keeps only a single copy of message for each sub_key.
"""
lock: 'RLock'
pubsub: 'PubSub'
msg_id_to_msg: 'strdictdict'
topic_id_msg_id: 'intsetdict'
sub_key_to_msg_id: 'strsetdict'
msg_id_to_sub_key: 'strsetdict'
def __init__(self, pubsub:'PubSub') -> 'None':
self.lock = RLock()
self.pubsub = pubsub
# Msg ID -> Message data - What is the actual contents of each message
self.msg_id_to_msg = {}
# Topic ID -> Msg ID set --- What messages are available for each topic (no matter sub_key)
self.topic_id_msg_id = {}
# Sub key -> Msg ID set --- What messages are available for a given subcriber
self.sub_key_to_msg_id = {}
# Msg ID -> Sub key set - What subscribers are interested in a given message
self.msg_id_to_sub_key = {}
# Start in background a cleanup task that deletes all expired and removed messages
_ = spawn_greenlet(self.run_cleanup_task)
# ################################################################################################################################
def add_messages(
self,
cid, # type: str
topic_id, # type: int
topic_name, # type: str
max_depth, # type: int
sub_keys, # type: strlist
messages, # type: dictlist
_default_pri=_default_pri # type: int
) -> 'None':
""" Adds all input messages to sub_keys for the topic.
"""
with self.lock:
# Local aliases
msg_ids = [msg['pub_msg_id'] for msg in messages]
len_messages = len(messages)
topic_messages = self.topic_id_msg_id.setdefault(topic_id, set())
# Try to append the messages for each of their subscribers ..
for sub_key in sub_keys:
# .. but first, make sure that storing these messages would not overflow the topic's depth,
# if it could exceed the max depth, store the messages in log files only ..
if len(topic_messages) + len_messages > max_depth:
self.log_messages_to_store(cid, topic_name, max_depth, sub_key, messages)
# .. skip this sub_key in such a case ..
continue
# .. otherwise, we make it known that the sub_key is interested in this message ..
sub_key_msg = self.sub_key_to_msg_id.setdefault(sub_key, set())
sub_key_msg.update(msg_ids)
# For each message given on input, store its actual contents ..
for msg in messages:
self.msg_id_to_msg[msg['pub_msg_id']] = msg
# We received timestamps as strings whereas our recipients require floats
# so we need to do the conversion here.
msg['pub_time'] = float(msg['pub_time'])
if msg.get('ext_pub_time'):
msg['ext_pub_time'] = float(msg['ext_pub_time'])
# .. attach server metadata ..
msg['server_name'] = self.pubsub.server.name
msg['server_pid'] = self.pubsub.server.pid
# .. set default priority if none was given ..
if 'priority' not in msg:
msg['priority'] = _default_pri
# .. add a reverse mapping, from message ID to sub_key ..
msg_sub_key = self.msg_id_to_sub_key.setdefault(msg['pub_msg_id'], set())
msg_sub_key.update(sub_keys)
# .. and add a reference to it to the topic.
topic_messages.update(msg_ids)
# ################################################################################################################################
def update_msg(
self,
msg, # type: anydict
_update_attrs=_update_attrs, # type: anytuple
_warn='No such message in sync backlog `%s`' # type: str
) -> 'bool':
with self.lock:
_msg = self.msg_id_to_msg.get(msg['msg_id'])
if not _msg:
logger.warning(_warn, msg['msg_id'])
logger_zato.warning(_warn, msg['msg_id'])
return False # No such message
else:
for attr in _update_attrs:
_msg[attr] = msg[attr]
# Ok, found and updated
return True
# ################################################################################################################################
def delete_msg_by_id(self, msg_id:'str') -> 'None':
""" Deletes a message by its ID.
"""
self.delete_messages([msg_id])
# ################################################################################################################################
def _delete_messages(self, msg_list:'strlist') -> 'None':
""" Low-level implementation of self.delete_messages - must be called with self.lock held.
"""
logger.info('Deleting non-GD messages `%s`', msg_list)
for msg_id in list(msg_list):
found_to_sub_key = self.msg_id_to_sub_key.pop(msg_id, None)
found_to_msg = self.msg_id_to_msg.pop(msg_id, None)
_has_topic_msg = False # Was the ID found for at least one topic
_has_sk_msg = False # Ditto but for sub_keys
for _topic_msg_set in self.topic_id_msg_id.values():
try:
_ = _topic_msg_set.remove(msg_id)
except KeyError:
pass # This is fine, msg_id did not belong to this topic
else:
_has_topic_msg = True
for _sk_msg_set in self.sub_key_to_msg_id.values():
try:
_ = _sk_msg_set.remove(msg_id)
except KeyError:
pass # This is fine, msg_id did not belong to this topic
else:
_has_sk_msg = True
if not found_to_sub_key:
logger.warning('Message not found (msg_id_to_sub_key) %s', msg_id)
logger_zato.warning('Message not found (msg_id_to_sub_key) %s', msg_id)
if not found_to_msg:
logger.warning('Message not found (msg_id_to_msg) %s', msg_id)
logger_zato.warning('Message not found (msg_id_to_msg) %s', msg_id)
if not _has_topic_msg:
logger.warning('Message not found (_has_topic_msg) %s', msg_id)
logger_zato.warning('Message not found (_has_topic_msg) %s', msg_id)
if not _has_sk_msg:
logger.warning('Message not found (_has_sk_msg) %s', msg_id)
logger_zato.warning('Message not found (_has_sk_msg) %s', msg_id)
# ################################################################################################################################
def delete_messages(self, msg_list:'strlist') -> 'None':
""" Deletes all messages from input msg_list.
"""
with self.lock:
self._delete_messages(msg_list)
# ################################################################################################################################
def has_messages_by_sub_key(self, sub_key:'str') -> 'bool':
with self.lock:
msg_id_set = self.sub_key_to_msg_id.get(sub_key) or set()
return len(msg_id_set) > 0
# ################################################################################################################################
def clear_topic(self, topic_id:'int') -> 'None':
logger.info('Clearing topic `%s` (id:%s)', self.pubsub.get_topic_by_id(topic_id).name, topic_id)
with self.lock:
# Not all servers will have messages for the topic, hence .get
messages = self.topic_id_msg_id.get(topic_id, set())
if messages:
messages = list(messages) # We need a copy so as not to change the input set during iteration later on
self._delete_messages(messages)
else:
logger.info(
'Did not find any non-GD messages to delete for topic `%s`',
self.pubsub.get_topic_by_id(topic_id))
# ################################################################################################################################
def get_delete_messages_by_sub_keys(
self,
topic_id, # type: int
sub_keys, # type: strlist
delete_msg=True, # type: bool
delete_sub=False # type: bool
) -> 'dictlist':
""" Low-level implementation of retrieve_messages_by_sub_keys which must be called with self.lock held.
"""
# Forward declaration
msg_id: 'str'
# We cannot return expired messages
now = utcnow_as_ms()
# We cannot have duplicates on output
msg_seen = set() # type: strset
# Response to product
out = [] # type: dictlist
# A list of messages that will be optionally deleted before they are returned
to_delete_msg = set() # type: anyset
# First, collect data for all sub_keys ..
for sub_key in sub_keys:
for msg_id in self.sub_key_to_msg_id.get(sub_key, []):
# We already had this message marked for output
if msg_id in msg_seen:
continue
else:
# Mark as already seen
msg_seen.add(msg_id)
# Filter out expired messages
msg = self.msg_id_to_msg.get(msg_id)
if not msg:
logger.warning('Msg `%s` not found in self.msg_id_to_msg', msg_id)
continue
if now >= msg['expiration_time']:
continue
else:
out.append(self.msg_id_to_msg[msg_id])
if delete_msg:
to_delete_msg.add(msg_id)
# Delete all messages marked to be deleted ..
for msg_id in to_delete_msg:
# .. first, direct mappings ..
_ = self.msg_id_to_msg.pop(msg_id, None)
logger.info('Deleting msg from mapping dict `%s`, before:`%s`', msg_id, self.msg_id_to_msg)
# .. now, remove the message from topic ..
self.topic_id_msg_id[topic_id].remove(msg_id)
logger.info('Deleting msg from mapping topic `%s`, after:`%s`', msg_id, self.topic_id_msg_id)
# .. now, find the message for each sub_key ..
for sub_key in sub_keys:
sub_key_to_msg_id = self.sub_key_to_msg_id.get(sub_key)
# We need this if statement because it is possible that a client is subscribed to a topic
# but it will not receive a particular message. This is possible if the message is a response
# to a previous request and the latter used reply_to_sk, in which case only that one sub_key pointed to
# by reply_to_sk will get the response, which ultimately means that self.sub_key_to_msg_id
# will not have this response for current sub_key.
if sub_key_to_msg_id:
# .. delete the message itself - but we need to catch ValueError because
# to_delete_msg is a list of all messages to be deleted and we do not know
# if this particular message belonged to this particular sub_key or not.
try:
sub_key_to_msg_id.remove(msg_id)
except KeyError:
pass # OK, message was not found for this sub_key
# .. now delete the sub_key either because we are explicitly told to (e.g. during unsubscribe)
if delete_sub:# or (not sub_key_to_msg_id):
del self.sub_key_to_msg_id[sub_key]
return out
# ################################################################################################################################
def retrieve_messages_by_sub_keys(self, topic_id:'int', sub_keys:'strlist') -> 'dictlist':
""" Retrieves and returns all messages matching input - messages are deleted from RAM.
"""
with self.lock:
return self.get_delete_messages_by_sub_keys(topic_id, sub_keys)
# ################################################################################################################################
def get_messages_by_topic_id(
self,
topic_id, # type: int
needs_short_copy, # type: bool
query='' # type: str
) -> 'anylist':
""" Returns messages for topic by its ID, optionally with pagination and filtering by input query.
"""
# Forward declaration
msg_id: 'str'
with self.lock:
msg_id_list = self.topic_id_msg_id.get(topic_id, [])
if not msg_id_list:
return []
# A list of messages to be returned - we actually need to build a whole list instead of using
# generators because the underlying container is an unsorted set and we need a sorted result on output.
msg_list = [] # type: dictlist
for msg_id in msg_id_list:
msg = self.msg_id_to_msg[msg_id]
if query:
if query not in msg['data'][:self.pubsub.data_prefix_len]:
continue
if needs_short_copy:
out_msg = make_short_msg_copy_from_dict(msg, self.pubsub.data_prefix_len, self.pubsub.data_prefix_short_len)
else:
out_msg = msg
msg_list.append(out_msg)
return msg_list
# ################################################################################################################################
def get_message_by_id(self, msg_id:'str') -> 'anydict':
with self.lock:
return self.msg_id_to_msg[msg_id]
# ################################################################################################################################
def unsubscribe(
self,
topic_id, # type: int
topic_name, # type: str
sub_keys, # type: strlist
pattern='Removing subscription info for `%s` from topic `%s`' # type: str
) -> 'None':
""" Unsubscribes all the sub_keys from the input topic.
"""
# Forward declarations
msg_id: 'str'
sub_key: 'str'
# Always acquire a lock for this kind of operation
with self.lock:
# For each sub_key ..
for sub_key in sub_keys:
# .. get all messages waiting for this subscriber, assuming there are any at all ..
msg_ids = self.sub_key_to_msg_id.pop(sub_key, [])
# .. for each message found we need to check if it is needed by any other subscriber,
# and if it's not, then we delete all the reference to this message. Otherwise, we leave it
# as is, because there is at least one other subscriber waiting for it.
for msg_id in msg_ids:
# Get all subscribers interested in this message ..
current_subs = self.msg_id_to_sub_key[msg_id]
current_subs.remove(sub_key)
# .. if the list is empty, it means that there no some subscribers left for that message,
# in which case we may deleted references to this message from other look-up structures.
if not current_subs:
del self.msg_id_to_msg[msg_id]
topic_msg = self.topic_id_msg_id[topic_id]
topic_msg.remove(msg_id)
logger.info(pattern, sub_keys, topic_name)
logger_zato.info(pattern, sub_keys, topic_name)
# ################################################################################################################################
def run_cleanup_task(self, _utcnow:'callable_'=utcnow_as_ms, _sleep:'callable_'=sleep) -> 'None':
""" A background task waking up periodically to remove all expired and retrieved messages from backlog.
"""
# Forward declarations
msg_id: 'str'
sub_key: 'str'
topic_id: 'int'
while True:
try:
with self.lock:
# Local alias
publishers = {} # type: dict_[int, Endpoint]
# We keep them separate so as not to modify any objects during iteration.
expired_msg = [] # type: anylist
# Calling it once will suffice.
now = _utcnow()
for _, msg in self.msg_id_to_msg.items():
if now >= msg['expiration_time']:
# It's possible that there will be many expired messages all sent by the same publisher
# so there is no need to query self.pubsub for each message.
if msg['published_by_id'] not in publishers:
publishers[msg['published_by_id']] = self.pubsub.get_endpoint_by_id(msg['published_by_id'])
# We can be sure that it is always found
publisher = publishers[msg['published_by_id']] # type: Endpoint
# Log the message to make sure the expiration event is always logged ..
logger_zato.info('Found an expired msg:`%s`, topic:`%s`, publisher:`%s`, pub_time:`%s`, exp:`%s`',
msg['pub_msg_id'], msg['topic_name'], publisher.name, msg['pub_time'], msg['expiration'])
# .. and append it to the list of messages to be deleted.
expired_msg.append((msg['pub_msg_id'], msg['topic_id']))
# For logging what was done
len_expired = len(expired_msg)
# Iterate over all the expired messages found and delete them from in-RAM structures
for msg_id, topic_id in expired_msg:
# Get all sub_keys waiting for these messages and delete the message from each one,
# but note that there may be possibly no subscribers at all if the message was published
# to a topic without any subscribers.
for sub_key in self.msg_id_to_sub_key.pop(msg_id):
self.sub_key_to_msg_id[sub_key].remove(msg_id)
# Remove all references to the message from topic
self.topic_id_msg_id[topic_id].remove(msg_id)
# And finally, remove the message's contents
del self.msg_id_to_msg[msg_id]
suffix = 's' if (len_expired==0 or len_expired > 1) else ''
len_messages = len(self.msg_id_to_msg)
if len_expired or len_messages:
logger.info('In-RAM. Deleted %s pub/sub message%s. Left:%s' % (len_expired, suffix, self.msg_id_to_msg))
# Sleep for a moment before checking again but don't do it with self.lock held.
_sleep(2)
except Exception:
e = format_exc()
log_msg = 'Could not remove messages from in-RAM backlog, e:`%s`'
logger.warning(log_msg, e)
logger_zato.warning(log_msg, e)
_sleep(0.1)
# ################################################################################################################################
def log_messages_to_store(
self,
cid, # type: str
topic_name, # type: str
max_depth, # type: int
sub_key, # type: str
messages # type: any_
) -> 'None':
# Used by both loggers
msg = 'Reached max in-RAM delivery depth of %r for topic `%r` (cid:%r). Extra messages will be stored in logs.'
args = (max_depth, topic_name, cid)
# Log in pub/sub log and the main one as well, just to make sure it will be easily found
logger.warning(msg, *args)
logger_zato.warning(msg, *args)
# Store messages in logger - by default will go to disk
logger_overflow.info('CID:%s, topic:`%s`, sub_key:%s, messages:%s', cid, topic_name, sub_key, messages)
# ################################################################################################################################
def get_topic_depth(self, topic_id:'int') -> 'int':
""" Returns depth of a given in-RAM queue for the topic.
"""
with self.lock:
return len(self.topic_id_msg_id.get(topic_id, set()))
# ################################################################################################################################
# ################################################################################################################################
| 25,552
|
Python
|
.py
| 454
| 44.180617
| 130
| 0.491403
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,385
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/pubsub/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# pylint: disable=unused-import, redefined-builtin, unused-variable
# stdlib
import logging
import os
from contextlib import closing
from io import StringIO
from operator import attrgetter
from traceback import format_exc
# gevent
from gevent.lock import RLock
# Texttable
from texttable import Texttable
# Zato
from zato.common.api import PUBSUB
from zato.common.broker_message import PUBSUB as BROKER_MSG_PUBSUB
from zato.common.odb.model import WebSocketClientPubSubKeys
from zato.common.odb.query.pubsub.queue import set_to_delete
from zato.common.typing_ import cast_, dict_, optional
from zato.common.util.api import as_bool, spawn_greenlet, wait_for_dict_key, wait_for_dict_key_by_get_func
from zato.common.util.time_ import datetime_from_ms, utcnow_as_ms
from zato.server.pubsub.core.endpoint import EndpointAPI
from zato.server.pubsub.core.trigger import NotifyPubSubTasksTrigger
from zato.server.pubsub.core.hook import HookAPI
from zato.server.pubsub.core.pubapi import PubAPI
from zato.server.pubsub.core.sql import SQLAPI
from zato.server.pubsub.core.topic import TopicAPI
from zato.server.pubsub.model import inttopicdict, strsubdict, strtopicdict, Subscription, SubKeyServer
from zato.server.pubsub.publisher import Publisher
from zato.server.pubsub.sync import InRAMSync
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, anydict, anylist, anytuple, callable_, callnone, dictlist, intdict, \
intlist, intnone, list_, stranydict, strnone, strstrdict, strlist, strlistdict, \
strlistempty, strtuple, type_
from zato.distlock import Lock
from zato.server.base.parallel import ParallelServer
from zato.server.pubsub.model import Endpoint, subnone, sublist, Topic, topiclist
from zato.server.pubsub.delivery.task import msgiter
from zato.server.pubsub.delivery.tool import PubSubTool
from zato.server.service import Service
# ################################################################################################################################
# ################################################################################################################################
logger = logging.getLogger('zato_pubsub.ps')
logger_zato = logging.getLogger('zato')
logger_overflow = logging.getLogger('zato_pubsub_overflow')
# ################################################################################################################################
# ################################################################################################################################
_ps_default = PUBSUB.DEFAULT
_end_srv_id = PUBSUB.ENDPOINT_TYPE.SERVICE.id
# ################################################################################################################################
# ################################################################################################################################
default_sk_server_table_columns = 6, 15, 8, 6, 17, 80
default_sub_pattern_matched = '(No sub pattern)'
# ################################################################################################################################
# ################################################################################################################################
class PubSub:
endpoint_api: 'EndpointAPI'
notify_pub_sub_tasks_trigger: 'NotifyPubSubTasksTrigger'
def __init__(
self,
cluster_id, # type: int
server, # type: ParallelServer
broker_client=None, # type: any_
*,
sync_max_iters=None, # type: intnone
spawn_trigger_notify=True # type: bool
) -> 'None':
self.cluster_id = cluster_id
self.server = server
self.broker_client = broker_client
self.sync_max_iters = sync_max_iters
self.lock = RLock()
self.sk_server_table_columns = self.server.fs_server_config.pubsub.get('sk_server_table_columns') or \
default_sk_server_table_columns # type: anytuple
# This is a pub/sub tool for delivery of Zato services within this server
self.service_pubsub_tool = None # type: optional[PubSubTool]
self.has_meta_topic = self.server.fs_server_config.pubsub_meta_topic.enabled
self.topic_meta_store_frequency = self.server.fs_server_config.pubsub_meta_topic.store_frequency
self.log_if_deliv_server_not_found = \
self.server.fs_server_config.pubsub.log_if_deliv_server_not_found # type: bool
self.log_if_wsx_deliv_server_not_found = \
self.server.fs_server_config.pubsub.log_if_wsx_deliv_server_not_found # type: bool
# Manages Endpoint objects
self.endpoint_api = EndpointAPI()
# Topic name -> List of Subscription objects
self.subscriptions_by_topic = {} # type: dict_[str, sublist]
# Sub key -> Subscription object
self._subscriptions_by_sub_key = {} # type: strsubdict
# Sub key -> SubKeyServer server/PID handling it
self.sub_key_servers = {} # type: dict_[str, SubKeyServer]
# Sub key -> PubSubTool object
self.pubsub_tool_by_sub_key = {} # type: dict_[str, PubSubTool]
# A list of PubSubTool objects, each containing delivery tasks
self.pubsub_tools = [] # type: list_[PubSubTool]
# A backlog of messages that have at least one subscription, i.e. this is what delivery servers use.
self.sync_backlog = InRAMSync(self)
# How many messages have been published through this server, regardless of which topic they were for
self.msg_pub_counter = 0
# How many messages a given endpoint published, topic_id -> its message counter.
self.endpoint_msg_counter = {} # type: intdict
self.has_meta_endpoint = cast_('bool', self.server.fs_server_config.pubsub_meta_endpoint_pub.enabled)
self.endpoint_meta_store_frequency = self.server.fs_server_config.pubsub_meta_endpoint_pub.store_frequency # type: int
self.endpoint_meta_data_len = self.server.fs_server_config.pubsub_meta_endpoint_pub.data_len # type:int
self.endpoint_meta_max_history = self.server.fs_server_config.pubsub_meta_endpoint_pub.max_history # type:int
# How many bytes to use for look up purposes when conducting message searches
self.data_prefix_len = self.server.fs_server_config.pubsub.data_prefix_len # type: int
self.data_prefix_short_len = self.server.fs_server_config.pubsub.data_prefix_short_len # type: int
# Creates SQL sessions
self.new_session_func = self.server.odb.session
# A low level implementation that publishes messages to SQL
self.impl_publisher = Publisher(
pubsub = self,
server = self.server,
marshal_api = self.server.marshal_api,
service_invoke_func = self.invoke_service,
new_session_func = self.new_session_func
)
# Manages hooks
self.hook_api = HookAPI(
lock = self.lock,
server = self.server,
invoke_service_func = self.invoke_service,
)
# Manages topics
self.topic_api = TopicAPI(
hook_api = self.hook_api,
server_name = self.server.name,
server_pid = self.server.pid,
topic_meta_store_frequency = self.topic_meta_store_frequency,
subscriptions_by_topic = self.subscriptions_by_topic,
is_allowed_sub_topic_by_endpoint_id_func = self.is_allowed_sub_topic_by_endpoint_id,
)
# Provides access to SQL queries
self.sql_api = SQLAPI(self.cluster_id, self.new_session_func)
# Low-level implementation of the public pub/sub API
self.pubapi = PubAPI(
pubsub = self,
cluster_id = self.server.cluster_id,
service_store = self.server.service_store,
topic_api = self.topic_api,
endpoint_api = self.endpoint_api,
)
# This will trigger synchronization
self.notify_pub_sub_tasks_trigger = NotifyPubSubTasksTrigger(
lock = self.lock,
topics = self.topic_api.get_topics(),
sync_max_iters = self.sync_max_iters,
invoke_service_func = self.invoke_service,
set_sync_has_msg_func = self._set_sync_has_msg,
get_subscriptions_by_topic_func = self.get_subscriptions_by_topic,
get_delivery_server_by_sub_key_func = self.get_delivery_server_by_sub_key,
sync_backlog_get_delete_messages_by_sub_keys_func = self.sync_backlog.get_delete_messages_by_sub_keys
)
if spawn_trigger_notify:
_ = spawn_greenlet(self.notify_pub_sub_tasks_trigger.run)
# ################################################################################################################################
def stop(self) -> 'None':
""" Stops all pub/sub tools, which in turn stops all the delivery tasks.
"""
for item in self.pubsub_tools:
try:
item.stop()
except Exception:
logger.info('Ignoring exception in PubSub.stop -> %s', format_exc())
# ################################################################################################################################
@property
def subscriptions_by_sub_key(self) -> 'strsubdict':
return self._subscriptions_by_sub_key
# ################################################################################################################################
def incr_pubsub_msg_counter(self, endpoint_id:'int') -> 'None':
with self.lock:
# Update the overall counter
self.msg_pub_counter += 1
# Update the per-endpoint counter too
if endpoint_id in self.endpoint_msg_counter:
self.endpoint_msg_counter[endpoint_id] += 1
else:
self.endpoint_msg_counter[endpoint_id] = 0
# ################################################################################################################################
def needs_endpoint_meta_update(self, endpoint_id:'int') -> 'bool':
with self.lock:
return self.endpoint_msg_counter[endpoint_id] % self.endpoint_meta_store_frequency == 0
# ################################################################################################################################
def get_subscriptions_by_topic(self, topic_name:'str', require_backlog_messages:'bool'=False) -> 'sublist':
with self.lock:
subs = self.subscriptions_by_topic.get(topic_name, [])
subs = subs[:]
if require_backlog_messages:
out = [] # type: anylist
for item in subs:
if self.sync_backlog.has_messages_by_sub_key(item.sub_key):
out.append(item)
return out
else:
return subs
# ################################################################################################################################
def get_all_subscriptions(self) -> 'strsubdict':
""" Low-level method to return all the subscriptions by sub_key,
must be called with self.lock held.
"""
return self.subscriptions_by_sub_key
# ################################################################################################################################
def _wait_for_sub_key(self, sub_key:'str') -> 'None':
# Log what we are about to do
logger.info('Waiting for sub_key -> %s', sub_key)
# Make sure the key is there.
wait_for_dict_key(self.subscriptions_by_sub_key, sub_key, timeout=180)
# ################################################################################################################################
def _get_subscription_by_sub_key(self, sub_key:'str') -> 'Subscription':
""" Low-level implementation of self.get_subscription_by_sub_key, must be called with self.lock held.
"""
# Make sure the key is there ..
# self._wait_for_sub_key(sub_key)
# .. get the subscription ..
sub = self.subscriptions_by_sub_key.get(sub_key)
# .. and return it to the caller if it exists ..
if sub:
return sub
# .. otherwise, raise an error ..
else:
msg = 'No such subscription `{}` among `{}`'.format(sub_key, sorted(self.subscriptions_by_sub_key))
logger.info(msg)
raise KeyError(msg)
# ################################################################################################################################
def get_subscription_by_sub_key(self, sub_key:'str') -> 'subnone':
with self.lock:
try:
return self._get_subscription_by_sub_key(sub_key)
except KeyError:
return None
# ################################################################################################################################
def get_subscription_by_endpoint_id(
self,
endpoint_id, # type: int
topic_name, # type: str
needs_error=True, # type: bool
) -> 'subnone':
with self.lock:
for sub in self.get_all_subscriptions().values():
if sub.endpoint_id == endpoint_id:
return sub
else:
msg = f'No sub to topic `{topic_name}` for endpoint_id `{endpoint_id}`'
if needs_error:
raise KeyError(msg)
else:
logger.info(msg)
# ################################################################################################################################
def get_subscription_by_id(self, sub_id:'int') -> 'subnone':
with self.lock:
for sub in self.subscriptions_by_sub_key.values():
if sub.id == sub_id:
return sub
# ################################################################################################################################
def get_subscription_by_ext_client_id(self, ext_client_id:'str') -> 'subnone':
with self.lock:
for sub in self.subscriptions_by_sub_key.values():
if sub.ext_client_id == ext_client_id:
return sub
# ################################################################################################################################
def _write_log_sub_data(self, sub:'Subscription', out:'StringIO') -> 'None':
items = sorted(sub.to_dict().items())
_ = out.write('\n')
for key, value in items:
_ = out.write(' - {} {}'.format(key, value))
if key == 'creation_time':
_ = out.write('\n - creation_time_utc {}'.format(datetime_from_ms(value)))
_ = out.write('\n')
# ################################################################################################################################
def _log_subscriptions_dict(self, attr_name:'str', prefix:'str', title:'str') -> 'None':
out = StringIO()
_ = out.write('\n')
attr = getattr(self, attr_name) # type: anydict
for sub_key, sub_data in sorted(attr.items()):
sub_key = cast_('str', sub_key)
_ = out.write('* {}\n'.format(sub_key))
if isinstance(sub_data, Subscription):
self._write_log_sub_data(sub_data, out)
else:
sorted_sub_data = sorted(sub_data)
for item in sorted_sub_data:
if isinstance(item, Subscription):
self._write_log_sub_data(item, out)
else:
item = cast_('any_', item)
_ = out.write(' - {}'.format(item))
_ = out.write('\n')
_ = out.write('\n')
logger_zato.info('\n === %s (%s) ===\n %s', prefix, title, out.getvalue())
out.close()
# ################################################################################################################################
def log_subscriptions_by_sub_key(self, title:'str', prefix:'str'='PubSub.subscriptions_by_sub_key') -> 'None':
with self.lock:
self._log_subscriptions_dict('subscriptions_by_sub_key', prefix, title)
# ################################################################################################################################
def log_subscriptions_by_topic_name(self, title:'str', prefix:'str'='PubSub.subscriptions_by_topic') -> 'None':
with self.lock:
self._log_subscriptions_dict('subscriptions_by_topic', prefix, title)
# ################################################################################################################################
def has_sub_key(self, sub_key:'str') -> 'bool':
with self.lock:
return sub_key in self.subscriptions_by_sub_key
# ################################################################################################################################
def has_messages_in_backlog(self, sub_key:'str') -> 'bool':
with self.lock:
return self.sync_backlog.has_messages_by_sub_key(sub_key)
# ################################################################################################################################
def _len_subscribers(self, topic_name:'str') -> 'int':
""" Low-level implementation of self.len_subscribers, must be called with self.lock held.
"""
return len(self.subscriptions_by_topic[topic_name])
# ################################################################################################################################
def len_subscribers(self, topic_name:'str') -> 'int':
""" Returns the amount of subscribers for a given topic.
"""
with self.lock:
return self._len_subscribers(topic_name)
# ################################################################################################################################
def has_subscribers(self, topic_name:'str') -> 'bool':
""" Returns True if input topic has at least one subscriber.
"""
with self.lock:
return self._len_subscribers(topic_name) > 0
# ################################################################################################################################
def has_topic_by_name(self, topic_name:'str') -> 'bool':
with self.lock:
return self.topic_api.has_topic_by_name(topic_name)
# ################################################################################################################################
def has_topic_by_id(self, topic_id:'int') -> 'bool':
with self.lock:
return self.topic_api.has_topic_by_id(topic_id)
# ################################################################################################################################
def get_endpoint_by_id(self, endpoint_id:'int') -> 'Endpoint':
with self.lock:
return self.endpoint_api.get_by_id(endpoint_id)
# ################################################################################################################################
def get_endpoint_by_name(self, endpoint_name:'str') -> 'Endpoint':
with self.lock:
return self.endpoint_api.get_by_name(endpoint_name)
# ################################################################################################################################
def get_endpoint_by_ws_channel_id(self, ws_channel_id:'int') -> 'Endpoint':
with self.lock:
return self.endpoint_api.get_by_ws_channel_id(ws_channel_id)
# ################################################################################################################################
def get_endpoint_id_by_sec_id(self, sec_id:'int') -> 'int':
with self.lock:
return self.endpoint_api.get_id_by_sec_id(sec_id)
# ################################################################################################################################
def get_endpoint_id_by_ws_channel_id(self, ws_channel_id:'int') -> 'intnone':
with self.lock:
return self.endpoint_api.get_id_by_ws_channel_id(ws_channel_id)
# ################################################################################################################################
def get_endpoint_id_by_service_id(self, service_id:'int') -> 'int':
with self.lock:
return self.endpoint_api.get_id_by_service_id(service_id)
# ################################################################################################################################
def create_endpoint(self, config:'anydict') -> 'None':
with self.lock:
self.endpoint_api.create(config)
# ################################################################################################################################
def delete_endpoint(self, endpoint_id:'int') -> 'None':
with self.lock:
# First, delete the endpoint object ..
self.endpoint_api.delete(endpoint_id)
# .. a list of of sub_keys for this endpoint ..
sk_list = []
# .. find all the sub_keys this endpoint held ..
for sub in self.subscriptions_by_sub_key.values():
if sub.endpoint_id == endpoint_id:
sk_list.append(sub.sub_key)
# .. delete all references to the sub_keys found ..
for sub_key in sk_list: # type: ignore
# .. first, stop the delivery tasks ..
_ = self._delete_subscription_by_sub_key(sub_key, ignore_missing=True)
# ################################################################################################################################
def edit_endpoint(self, config:'stranydict') -> 'None':
with self.lock:
self.endpoint_api.delete(config['id'])
self.endpoint_api.create(config)
# ################################################################################################################################
def wait_for_endpoint(self, endpoint_name:'str', timeout:'int'=600) -> 'bool':
return wait_for_dict_key_by_get_func(self.endpoint_api.get_by_name, endpoint_name, timeout, interval=0.5)
# ################################################################################################################################
def get_endpoint_impl_getter(self, endpoint_type:'str') -> 'callable_':
with self.lock:
return self.endpoint_api.get_impl_getter(endpoint_type)
# ################################################################################################################################
def set_endpoint_impl_getter(self, endpoint_type:'str', impl_getter:'callable_') -> 'None':
with self.lock:
return self.endpoint_api.set_impl_getter(endpoint_type, impl_getter)
# ################################################################################################################################
def get_topic_id_by_name(self, topic_name:'str') -> 'int':
with self.lock:
return self.topic_api.get_topic_id_by_name(topic_name)
# ################################################################################################################################
def get_non_gd_topic_depth(self, topic_name:'str') -> 'int':
""" Returns of non-GD messages for a given topic by its name.
"""
with self.lock:
topic_id = self.topic_api.get_topic_id_by_name(topic_name)
return self.sync_backlog.get_topic_depth(topic_id)
# ################################################################################################################################
def get_topic_by_name(self, topic_name:'str') -> 'Topic':
with self.lock:
return self.get_topic_by_name_no_lock(topic_name)
# ################################################################################################################################
def get_topic_by_name_no_lock(self, topic_name:'str') -> 'Topic':
return self.topic_api.get_topic_by_name(topic_name)
# ################################################################################################################################
def get_topic_by_id(self, topic_id:'int') -> 'Topic':
with self.lock:
return self.topic_api.get_topic_by_id(topic_id)
# ################################################################################################################################
def get_topic_name_by_sub_key(self, sub_key:'str') -> 'str':
with self.lock:
return self._get_subscription_by_sub_key(sub_key).topic_name
# ################################################################################################################################
def get_target_service_name_by_topic_id(self, topic_id:'int') -> 'strnone':
with self.lock:
topic = self.topic_api.get_topic_by_id(topic_id)
return topic.config.get('target_service_name')
# ################################################################################################################################
def get_sub_key_to_topic_name_dict(self, sub_key_list:'strlist') -> 'strstrdict':
out = {} # type: strstrdict
with self.lock:
for sub_key in sub_key_list:
out[sub_key] = self._get_subscription_by_sub_key(sub_key).topic_name
return out
# ################################################################################################################################
def _get_topic_by_sub_key(self, sub_key:'str') -> 'Topic':
sub = self._get_subscription_by_sub_key(sub_key)
return self.topic_api.get_topic_by_name(sub.topic_name)
# ################################################################################################################################
def get_topic_by_sub_key(self, sub_key:'str') -> 'Topic':
with self.lock:
return self._get_topic_by_sub_key(sub_key)
# ################################################################################################################################
def get_topic_list_by_sub_key_list(self, sk_list:'strlist') -> 'strtopicdict':
out = cast_('strtopicdict', {})
with self.lock:
for sub_key in sk_list:
out[sub_key] = self._get_topic_by_sub_key(sub_key)
return out
# ################################################################################################################################
def edit_subscription(self, config:'stranydict') -> 'None':
# Make sure we are the only ones updating the configuration now ..
with self.lock:
# Reusable ..
sub_key = config['sub_key']
# .. such a subscription should exist ..
sub = self._get_subscription_by_sub_key(config['sub_key'])
# .. update the whole config of the subscription object in place ..
for key, value in config.items():
sub.config[key] = value
# .. now, try obtain the PubSub tool responsible for this subscription ..
# .. and trigger an update of the underlying delivery task's configuration as well, ..
# .. note, however, that there may be no such ps_tool when we edit a WebSockets-based subscription ..
# .. and the WebSocket client is not currently connected.
if ps_tool := self._get_pubsub_tool_by_sub_key(sub_key):
ps_tool.trigger_update_task_sub_config(sub_key)
# ################################################################################################################################
def _add_subscription(self, config:'stranydict') -> 'None':
""" Low-level implementation of self.add_subscription.
"""
sub = Subscription(config)
existing_by_topic = self.subscriptions_by_topic.setdefault(config['topic_name'], [])
existing_by_topic.append(sub)
logger_zato.info('Added sub `%s` -> `%s`', config['sub_key'], config['topic_name'])
self.subscriptions_by_sub_key[config['sub_key']] = sub
# ################################################################################################################################
def add_subscription(self, config:'stranydict') -> 'None':
""" Creates a Subscription object and an associated mapping of the subscription to input topic.
"""
with self.lock:
# Creates a subscription ..
self._add_subscription(config)
# .. triggers a relevant hook, if any is configured.
hook = self.get_on_subscribed_hook(config['sub_key'])
if hook:
_ = self.invoke_on_subscribed_hook(hook, config['topic_id'], config['sub_key'])
# ################################################################################################################################
def _delete_subscription_from_subscriptions_by_topic(self, sub:'Subscription') -> 'None':
# This is a list of all the subscriptions related to a given topic,
# it may be potentially empty if we are trying to delete subscriptions
# for a topic that has just been deleted ..
sk_list = self.get_subscriptions_by_topic(sub.topic_name)
# .. try to remove the subscription object from each list ..
try:
sk_list.remove(sub)
except ValueError:
# .. it is fine, this list did not contain the sub object.
pass
# ################################################################################################################################
def clear_task(self, sub_key:'str') -> 'None':
with self.lock:
# Clear the task but only if a given ps_tool exists at all.
# It may be missing if the sub_key points to a WebSocket
# that is not connected at the moment.
if ps_tool := self._get_pubsub_tool_by_sub_key(sub_key):
ps_tool.clear_task(sub_key)
# ################################################################################################################################
def _delete_subscription_by_sub_key(
self,
sub_key, # type: str
ignore_missing, # type: bool
_invalid=object() # type: any_
) -> 'subnone':
""" Deletes a subscription from the list of subscription. By default, it is not an error to call
the method with an invalid sub_key. Must be invoked with self.lock held.
"""
sub = self.subscriptions_by_sub_key.get(sub_key, _invalid) # type: Subscription
#
# There is no such subscription and we may either log it or raise an exception ..
#
if sub is _invalid:
# If this is on, we only log information about the event ..
if ignore_missing:
logger.info('Could not find sub_key to delete `%s`', sub_key)
# .. otherwise, we raise an entire exception.
else:
raise KeyError('No such sub_key `%s`', sub_key)
#
# If we are here, it means that the subscription is valid
#
else:
# Now, delete the subscription
sub = self.subscriptions_by_sub_key.pop(sub_key, _invalid)
# Delete the subscription's sk_server first because it depends on the subscription
# for sk_server table formatting.
self.delete_sub_key_server(sub_key, sub_pattern_matched=sub.sub_pattern_matched)
# Stop and remove the task for this sub_key ..
if ps_tool := self._get_pubsub_tool_by_sub_key(sub_key):
ps_tool.delete_by_sub_key(sub_key)
# Remove the mapping from the now-removed sub_key to its ps_tool
self._delete_subscription_from_subscriptions_by_topic(sub)
# Remove the subscription from the mapping of topics-to-sub-objects
self._delete_pubsub_tool_by_sub_key(sub_key)
# Log what we have done ..
logger.info('Deleted subscription object `%s` (%s)', sub.sub_key, sub.topic_name)
return sub # Either valid or invalid but ignore_missing is True
# ################################################################################################################################
def create_subscription_object(self, config:'stranydict') -> 'None':
""" Low-level implementation of self.subscribe. Must be called with self.lock held.
"""
with self.lock:
# It's possible that we already have this subscription - this may happen if we are the server that originally
# handled the request to create the subscription and we are now called again through
# on_broker_msg_PUBSUB_SUBSCRIPTION_CREATE. In such a case, we can just ignore it.
if not self.has_sub_key(config['sub_key']):
self._add_subscription(config)
# Is this a WebSockets-based subscription?
is_wsx = config['endpoint_type'] == PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id
# .. we do not start dedicated tasks for WebSockets - they are all dynamic without a fixed server ..
if is_wsx:
pass
# .. for other endpoint types, we create and start a delivery task here ..
else:
# We have a matching server..
if config['cluster_id'] == self.cluster_id and config['server_id'] == self.server.id:
# .. but make sure only the first worker of this server will start delivery tasks, not all of them.
if self.server.is_first_worker:
# Store in shared RAM information that our process handles this key
if self.server.has_posix_ipc:
self.server.server_startup_ipc.set_pubsub_pid(self.server.pid)
config['server_pid'] = self.server.pid
config['server_name'] = self.server.name
# Starts the delivery task and notifies other servers that we are the one
# to handle deliveries for this particular sub_key.
_ = self.invoke_service('zato.pubsub.delivery.create-delivery-task', config)
# We are not the first worker of this server and the first one must have already stored
# in RAM the mapping of sub_key -> server_pid, so we can safely read it here to add
# a subscription server.
else:
if self.server.has_posix_ipc:
config['server_pid'] = self.server.server_startup_ipc.get_pubsub_pid()
config['server_name'] = self.server.name
self.set_sub_key_server(config)
# ################################################################################################################################
def create_topic_object(self, config:'anydict') -> 'None':
with self.lock:
self.topic_api.create_topic_object(config)
# ################################################################################################################################
def create_topic_for_service(self, service_name:'str', topic_name:'str') -> 'None':
self.create_topic(topic_name, is_internal=True, target_service_name=service_name)
logger.info('Created topic `%s` for service `%s`', topic_name, service_name)
# ################################################################################################################################
def wait_for_topic(self, topic_name:'str', timeout:'int'=600) -> 'bool':
return wait_for_dict_key_by_get_func(self.topic_api.get_topic_by_name, topic_name, timeout, interval=0.01)
# ################################################################################################################################
def delete_topic(self, topic_id:'int') -> 'None':
with self.lock:
topic = self.topic_api.get_topic_by_id(topic_id)
topic_name = topic.name
subscriptions_by_topic = self.topic_api.delete_topic(topic_id, topic_name) # type: sublist
for sub in subscriptions_by_topic:
_ = self._delete_subscription_by_sub_key(sub.sub_key, ignore_missing=True)
# ################################################################################################################################
def edit_topic(self, del_name:'str', config:'anydict') -> 'None':
with self.lock:
subscriptions_by_topic = self.subscriptions_by_topic.pop(del_name, [])
_ = self.topic_api.delete_topic(config['id'], del_name)
self.topic_api.create_topic_object(config)
self.subscriptions_by_topic[config['name']] = subscriptions_by_topic
# ################################################################################################################################
def set_config_for_service_subscription(
self,
sub_key, # type: str
_endpoint_type=_end_srv_id # type: str
) -> 'None':
if self.service_pubsub_tool:
self.service_pubsub_tool.add_sub_key(sub_key)
else:
msg = 'No self.service_pubsub_tool to add sub key to (%s)'
logger.warning(msg, sub_key)
logger_zato.warning(msg, sub_key)
self.set_sub_key_server({
'sub_key': sub_key,
'cluster_id': self.server.cluster_id,
'server_name': self.server.name,
'server_pid': self.server.pid,
'endpoint_type': _endpoint_type,
})
# ################################################################################################################################
def is_allowed_pub_topic(self, name:'str', security_id:'int'=0, ws_channel_id:'int'=0) -> 'str | bool':
return self.endpoint_api.is_allowed_pub_topic(
name=name,
security_id=security_id,
ws_channel_id=ws_channel_id
)
# ################################################################################################################################
def is_allowed_pub_topic_by_endpoint_id(self, name:'str', endpoint_id:'int') -> 'str | bool':
return self.endpoint_api.is_allowed_pub_topic_by_endpoint_id(
name=name,
endpoint_id=endpoint_id
)
# ################################################################################################################################
def is_allowed_sub_topic(self, name:'str', security_id:'int'=0, ws_channel_id:'int'=0) -> 'str | bool':
return self.endpoint_api.is_allowed_sub_topic(
name=name,
security_id=security_id,
ws_channel_id=ws_channel_id
)
# ################################################################################################################################
def is_allowed_sub_topic_by_endpoint_id(self, name:'str', endpoint_id:'int') -> 'str | bool':
return self.endpoint_api.is_allowed_sub_topic_by_endpoint_id(
name=name,
endpoint_id=endpoint_id
)
# ################################################################################################################################
def get_topics(self) -> 'inttopicdict':
""" Returns all topics in existence.
"""
with self.lock:
return self.topic_api.get_topics()
# ################################################################################################################################
def get_sub_topics_for_endpoint(self, endpoint_id:'int') -> 'topiclist':
""" Returns all topics to which endpoint_id can subscribe.
"""
with self.lock:
return self.topic_api.get_sub_topics_for_endpoint(endpoint_id)
# ################################################################################################################################
def _is_subscribed_to(self, endpoint_id:'int', topic_name:'str') -> 'bool':
""" Low-level implementation of self.is_subscribed_to.
"""
for sub in self.subscriptions_by_topic.get(topic_name, []):
if sub.endpoint_id == endpoint_id:
return True
else:
return False
# ################################################################################################################################
def is_subscribed_to(self, endpoint_id:'int', topic_name:'str') -> 'bool':
""" Returns True if the endpoint is subscribed to the named topic.
"""
with self.lock:
return self._is_subscribed_to(endpoint_id, topic_name)
# ################################################################################################################################
def _delete_pubsub_tool_by_sub_key(self, sub_key:'str') -> 'None':
_ = self.pubsub_tool_by_sub_key.pop(sub_key, None)
# ################################################################################################################################
def _get_pubsub_tool_by_sub_key(self, sub_key:'str') -> 'PubSubTool | None':
return self.pubsub_tool_by_sub_key.get(sub_key)
# ################################################################################################################################
def get_pubsub_tool_by_sub_key(self, sub_key:'str') -> 'PubSubTool | None':
with self.lock:
return self._get_pubsub_tool_by_sub_key(sub_key)
# ################################################################################################################################
def add_wsx_client_pubsub_keys(
self,
session, # type: any_
sql_ws_client_id, # type: int
sub_key, # type: str
channel_name, # type: str
pub_client_id, # type: str
wsx_info # type: anydict
) -> 'None':
""" Adds to SQL information that a given WSX client handles messages for sub_key.
This information is transient - it will be dropped each time a WSX client disconnects
"""
# Update state in SQL
ws_sub_key = WebSocketClientPubSubKeys()
ws_sub_key.client_id = sql_ws_client_id
ws_sub_key.sub_key = sub_key
ws_sub_key.cluster_id = self.cluster_id
session.add(ws_sub_key)
# Update in-RAM state of workers
self.broker_client.publish({
'action': BROKER_MSG_PUBSUB.SUB_KEY_SERVER_SET.value,
'cluster_id': self.cluster_id,
'server_name': self.server.name,
'server_pid': self.server.pid,
'sub_key': sub_key,
'channel_name': channel_name,
'pub_client_id': pub_client_id,
'endpoint_type': PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id,
'wsx_info': wsx_info,
'source': 'pubsub.PubSub',
'source_server_name': self.server.name,
'source_server_pid': self.server.pid,
})
# ################################################################################################################################
def format_sk_servers(self, default:'str'='---', sub_pattern_matched:'str'=default_sub_pattern_matched) -> 'str':
# Prepare the table
len_columns = len(self.sk_server_table_columns)
table = Texttable()
_ = table.set_cols_width(self.sk_server_table_columns)
_ = table.set_cols_dtype(['t'] * len_columns)
_ = table.set_cols_align(['c'] * len_columns)
_ = table.set_cols_valign(['m'] * len_columns)
# Add headers
rows = [['#', 'created', 'name', 'pid', 'channel_name', 'sub_key']] # type: anylist
servers = list(self.sub_key_servers.values())
servers.sort(key=attrgetter('creation_time', 'channel_name', 'sub_key'), reverse=True)
for idx, item in enumerate(servers, 1):
# Let the basic information contain both the sub_key and the pattern matched during subscription.
sub = self.get_subscription_by_sub_key(item.sub_key)
if sub:
sub_pattern_matched = sub.sub_pattern_matched
else:
sub_pattern_matched = sub_pattern_matched or default_sub_pattern_matched
basic_info = f'{item.sub_key} -> {sub_pattern_matched}'
sub_key_info = [basic_info]
if item.wsx_info:
for name in ('swc', 'name', 'pub_client_id', 'peer_fqdn', 'forwarded_for_fqdn'):
if isinstance(name, bytes):
name = name.decode('utf8')
value = item.wsx_info[name]
if isinstance(value, bytes):
value = value.decode('utf8')
if isinstance(value, str):
value = value.strip()
sub_key_info.append('{}: {}'.format(name, value))
rows.append([
idx,
item.creation_time,
item.server_name,
item.server_pid,
item.channel_name or default,
'\n'.join(sub_key_info),
])
# Add all rows to the table
_ = table.add_rows(rows)
# And return already formatted output
return cast_('str', table.draw())
# ################################################################################################################################
def _set_sub_key_server(
self,
config, # type: stranydict
*,
ignore_missing_sub_key, # type: bool
_endpoint_type=PUBSUB.ENDPOINT_TYPE # type: type_[PUBSUB.ENDPOINT_TYPE]
) -> 'None':
""" Low-level implementation of self.set_sub_key_server - must be called with self.lock held.
"""
try:
# Try to see if we have such a subscription ..
sub = self._get_subscription_by_sub_key(config['sub_key'])
except KeyError:
# .. if we do not, it may be because it was already deleted
# before we have been invoked and this may be potentially ignored.
if not ignore_missing_sub_key:
raise
else:
sub_key:'str' = config['sub_key']
sk_server = SubKeyServer(config)
self.sub_key_servers[sub_key] = sk_server
config['endpoint_id'] = sub.endpoint_id
config['endpoint_name'] = self.endpoint_api.get_by_id(sub.endpoint_id)
endpoint_type = config['endpoint_type']
config['wsx'] = int(endpoint_type == _endpoint_type.WEB_SOCKETS.id)
config['srv'] = int(endpoint_type == _endpoint_type.SERVICE.id)
server_pid:'str' = config['server_pid']
server_name:'str' = config['server_name']
is_wsx:'int' = config['wsx']
is_service:'int' = config['srv']
pid_info:'str' = ' ' if config['server_pid'] else ' (no PID) '
# This is basic information that we always log ..
msg = f'Set sk_server{pid_info}for sub_key `{sub_key}` (w/s:{is_wsx}/{is_service}) - {server_name}:{server_pid}'
msg += f', len-sks:{len(self.sub_key_servers)}'
# .. optionally, we log the full connection table too ..
if as_bool(os.environ.get(PUBSUB.Env.Log_Table)):
sks_table = self.format_sk_servers()
msg += f', current sk_servers:\n{sks_table}'
logger.info(msg)
logger_zato.info(msg)
# ################################################################################################################################
def set_sub_key_server(self, config:'anydict') -> 'None':
with self.lock:
self._set_sub_key_server(config, ignore_missing_sub_key=True)
# ################################################################################################################################
def _get_sub_key_server(self, sub_key:'str', default:'any_'=None) -> 'sksnone': # type: ignore[valid-type]
return self.sub_key_servers.get(sub_key, default)
# ################################################################################################################################
def get_sub_key_server(self, sub_key:'str', default:'any_'=None) -> 'sksnone': # type: ignore[valid-type]
with self.lock:
return self._get_sub_key_server(sub_key, default)
# ################################################################################################################################
def get_delivery_server_by_sub_key(self, sub_key:'str', needs_lock:'bool'=True) -> 'sksnone': # type: ignore[valid-type]
if needs_lock:
with self.lock:
return self._get_sub_key_server(sub_key)
else:
return self._get_sub_key_server(sub_key)
# ################################################################################################################################
def _delete_sub_key_server(self, sub_key:'str', sub_pattern_matched:'str'='') -> 'None':
sub_key_server = self.sub_key_servers.get(sub_key)
if sub_key_server:
msg = 'Deleting sk_server for sub_key `%s`, was `%s:%s`'
logger.info(msg, sub_key, sub_key_server.server_name, sub_key_server.server_pid)
logger_zato.info(msg, sub_key, sub_key_server.server_name, sub_key_server.server_pid)
_ = self.sub_key_servers.pop(sub_key, None)
if as_bool(os.environ.get(PUBSUB.Env.Log_Table)):
sks_table = self.format_sk_servers(sub_pattern_matched=sub_pattern_matched)
msg_sks = 'Current sk_servers after deletion of `%s`:\n%s'
logger.info(msg_sks, sub_key, sks_table)
logger_zato.info(msg_sks, sub_key, sks_table)
else:
logger.info('Could not find sub_key `%s` while deleting sub_key server, current `%s` `%s`',
sub_key, self.server.name, self.server.pid)
# ################################################################################################################################
def delete_sub_key_server(self, sub_key:'str', sub_pattern_matched:'str'='') -> 'None':
with self.lock:
self._delete_sub_key_server(sub_key, sub_pattern_matched)
# ################################################################################################################################
def remove_ws_sub_key_server(self, config:'stranydict') -> 'None':
""" Called after a WSX client disconnects - provides a list of sub_keys that it handled
which we must remove from our config because without this client they are no longer usable (until the client reconnects).
"""
with self.lock:
for sub_key in config['sub_key_list']:
_ = self.sub_key_servers.pop(sub_key, None)
# ->> Compare this loop with the .pop call above
for server_info in self.sub_key_servers.values():
if server_info.sub_key == sub_key:
del self.sub_key_servers[sub_key]
break
# ################################################################################################################################
def get_server_pid_for_sub_key(self, server_name:'str', sub_key:'str') -> 'intnone':
""" Invokes a named server on current cluster and asks it for PID of one its processes that handles sub_key.
Returns that PID or None if the information could not be obtained.
"""
try:
invoker = self.server.rpc.get_invoker_by_server_name(server_name)
response = invoker.invoke('zato.pubsub.delivery.get-server-pid-for-sub-key', {
'sub_key': sub_key,
}) # type: anydict
except Exception:
msg = 'Could not invoke server `%s` to get PID for sub_key `%s`, e:`%s`'
exc_formatted = format_exc()
logger.warning(msg, server_name, sub_key, exc_formatted)
logger_zato.warning(msg, server_name, sub_key, exc_formatted)
else:
return response['response']['server_pid']
# ################################################################################################################################
def add_missing_server_for_sub_key(
self,
sub_key, # type: str
is_wsx, # type: bool
_wsx=PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id # type: str
) -> 'None':
""" Adds to self.sub_key_servers information from ODB about which server handles input sub_key.
Must be called with self.lock held.
"""
data = self.sql_api.get_delivery_server_for_sub_key(sub_key, is_wsx)
if not data:
if self.log_if_deliv_server_not_found:
if is_wsx and (not self.log_if_wsx_deliv_server_not_found):
return
msg = 'Could not find a delivery server in ODB for sub_key `%s` (wsx:%s)'
logger.info(msg, sub_key, is_wsx)
else:
endpoint_type = _wsx if is_wsx else data.endpoint_type
# This is common config that we already know is valid but on top of it
# we will try to the server found and ask about PID that handles messages for sub_key.
config = {
'sub_key': sub_key,
'cluster_id': data.cluster_id,
'server_name': data.server_name,
'endpoint_type': endpoint_type,
} # type: anydict
# Guaranteed to either set PID or None
config['server_pid'] = self.get_server_pid_for_sub_key(data.server_name, sub_key)
# OK, set up the server with what we found above
self._set_sub_key_server(config, ignore_missing_sub_key=False)
# ################################################################################################################################
def get_task_servers_by_sub_keys(self, sub_key_data:'dictlist') -> 'anytuple':
""" Returns a dictionary keyed by (server_name, server_pid, pub_client_id, channel_name) tuples
and values being sub_keys that a WSX client pointed to by each key has subscribed to.
"""
with self.lock:
found = {} # type: anydict
not_found = [] # type: anylist
for elem in sub_key_data:
sub_key = elem['sub_key']
is_wsx = elem['is_wsx']
# If we do not have a server for this sub_key we first attempt to find
# if there is an already running server that handles it but we do not know it yet.
# It may happen if our server is down, another server (for this sub_key) boots up
# and notifies other servers about its existence and the fact that we handle this sub_key
# but we are still down so we never receive this message. In this case we attempt to look up
# the target server in ODB and then invoke it to get the PID of worker process that handles
# sub_key, populating self.sub_key_servers as we go.
if not sub_key in self.sub_key_servers:
self.add_missing_server_for_sub_key(sub_key, is_wsx)
# At this point, if there is any information about this sub_key at all,
# no matter if its server is running or not, info will not be None.
info = self.sub_key_servers.get(sub_key)
# We report that a server is found only if we know the server itself and its concrete PID,
# which means that the server is currently running. Checking for server alone is not enough
# because we may have read this information from self.add_missing_server_for_sub_key
# and yet, self.get_server_pid_for_sub_key may have returned no information implying
# that the server, even if found in ODB in principle, is still currently not running.
if info and info.server_pid:
_key = (info.server_name, info.server_pid, info.pub_client_id, info.channel_name, info.endpoint_type)
_info = found.setdefault(_key, [])
_info.append(sub_key)
else:
not_found.append(sub_key)
return found, not_found
# ################################################################################################################################
def get_sql_messages_by_sub_key(self, *args:'any_', **kwargs:'any_') -> 'anytuple':
""" Returns all SQL messages queued up for all keys from sub_key_list.
"""
return self.sql_api.get_sql_messages_by_sub_key(*args, **kwargs)
# ################################################################################################################################
def get_initial_sql_msg_ids_by_sub_key(self, *args:'any_', **kwargs:'any_') -> 'anytuple':
return self.sql_api.get_initial_sql_msg_ids_by_sub_key(*args, **kwargs)
# ################################################################################################################################
def get_sql_messages_by_msg_id_list(self, *args:'any_', **kwargs:'any_') -> 'anytuple':
return self.sql_api.get_sql_messages_by_msg_id_list(*args, **kwargs)
# ################################################################################################################################
def confirm_pubsub_msg_delivered(self, *args:'any_', **kwargs:'any_') -> 'None':
""" Sets in SQL delivery status of a given message to True.
"""
self.sql_api.confirm_pubsub_msg_delivered(*args, **kwargs)
# ################################################################################################################################
def store_in_ram(
self,
cid, # type: str
topic_id, # type: int
topic_name, # type: str
sub_keys, # type: strlist
non_gd_msg_list, # type: dictlist
error_source='', # type: str
_logger=logger # type: logging.Logger
) -> 'None':
""" Stores in RAM up to input non-GD messages for each sub_key. A backlog queue for each sub_key
cannot be longer than topic's max_depth_non_gd and overflowed messages are not kept in RAM.
They are not lost altogether though, because, if enabled by topic's use_overflow_log, all such messages
go to disk (or to another location that logger_overflown is configured to use).
"""
_logger.info('Storing in RAM. CID:`%r`, topic ID:`%r`, name:`%r`, sub_keys:`%r`, ngd-list:`%r`, e:`%s`',
cid, topic_id, topic_name, sub_keys, [elem['pub_msg_id'] for elem in non_gd_msg_list], error_source)
with self.lock:
# Store the non-GD messages in backlog ..
topic = self.topic_api.get_topic_by_id(topic_id)
self.sync_backlog.add_messages(cid, topic_id, topic_name, topic.max_depth_non_gd, sub_keys, non_gd_msg_list)
# .. and set a flag to signal that there are some available.
self._set_sync_has_msg(topic_id, False, True, 'PubSub.store_in_ram ({})'.format(error_source))
# ################################################################################################################################
def unsubscribe(self, topic_sub_keys:'strlistdict') -> 'None':
""" Removes subscriptions for all input sub_keys. Input topic_sub_keys is a dictionary keyed by topic_name,
and each value is a list of sub_keys, possibly one-element long.
"""
with self.lock:
for topic_name, sub_keys in topic_sub_keys.items():
# We receive topic_names on input but in-RAM backlog requires topic IDs.
topic_id = self.topic_api.get_topic_id_by_name(topic_name)
# Delete subscriptions, and any related messages, from RAM
self.sync_backlog.unsubscribe(topic_id, topic_name, sub_keys)
# Delete subscription metadata from local pubsub, note that we use .get
# instead of deleting directly because this dictionary will be empty
# right after a server starts but before any client for that topic (such as WSX) connects to it.
subscriptions_by_topic = self.subscriptions_by_topic.get(topic_name, [])
for sub in subscriptions_by_topic[:]:
if sub.sub_key in sub_keys:
subscriptions_by_topic.remove(sub)
for sub_key in sub_keys:
# Remove mappings between sub_keys and sub objects but keep the subscription object around
# because an unsubscribe hook may need it.
deleted_sub = self._delete_subscription_by_sub_key(sub_key, ignore_missing=True)
# Find and stop all delivery tasks if we are the server that handles them
sub_key_server = self.sub_key_servers.get(sub_key)
if sub_key_server:
_cluster_id = sub_key_server.cluster_id
_server_name = sub_key_server.server_name
_server_pid = sub_key_server.server_pid
cluster_id = self.server.cluster_id
server_name = self.server.name
server_pid = self.server.pid
# If we are the server that handles this particular sub_key ..
if _cluster_id == cluster_id and _server_name == server_name and _server_pid == server_pid:
# .. then find the pubsub_tool that actually does it ..
for pubsub_tool in self.pubsub_tools:
if pubsub_tool.handles_sub_key(sub_key):
# .. stop the delivery task ..
pubsub_tool.remove_sub_key(sub_key)
# and remove the mapping of sub_key -> pubsub_tool ..
del self.pubsub_tool_by_sub_key[sub_key]
# .. and invoke the unsubscription hook, if any is given.
hook = self.get_on_unsubscribed_hook(sub=deleted_sub)
if hook:
self.invoke_on_unsubscribed_hook(hook, topic_id, deleted_sub)
# No need to iterate further, there can be only one task for each sub_key
break
# ################################################################################################################################
def register_pubsub_tool(self, pubsub_tool:'PubSubTool') -> 'None':
""" Registers a new pubsub_tool for this server, i.e. a new delivery task container.
"""
self.pubsub_tools.append(pubsub_tool)
# ################################################################################################################################
def set_pubsub_tool_for_sub_key(self, sub_key:'str', pubsub_tool:'PubSubTool') -> 'None':
""" Adds a mapping between a sub_key and pubsub_tool handling its messages.
"""
self.pubsub_tool_by_sub_key[sub_key] = pubsub_tool
# ################################################################################################################################
def migrate_delivery_server(self, msg:'anydict') -> 'None':
""" Migrates the delivery task for sub_key to a new server given by ID on input,
including all current in-RAM messages. This method must be invoked in the same worker process that runs
delivery task for sub_key.
"""
_ = self.invoke_service('zato.pubsub.migrate.migrate-delivery-server', {
'sub_key': msg['sub_key'],
'old_delivery_server_id': msg['old_delivery_server_id'],
'new_delivery_server_name': msg['new_delivery_server_name'],
'endpoint_type': msg['endpoint_type'],
})
# ################################################################################################################################
def get_before_delivery_hook(self, sub_key:'str') -> 'callnone':
""" Returns a hook for messages to be invoked right before they are about to be delivered
or None if such a hook is not defined for sub_key's topic.
"""
with self.lock:
sub = self.get_subscription_by_sub_key(sub_key)
if sub:
topic = self.topic_api.get_topic_by_name(sub.topic_name)
return topic.before_delivery_hook_service_invoker
# ################################################################################################################################
def get_on_subscribed_hook(self, sub_key:'str') -> 'callnone':
""" Returns a hook triggered when a new subscription is made to a particular topic.
"""
with self.lock:
sub = self.get_subscription_by_sub_key(sub_key)
if sub:
topic = self.topic_api.get_topic_by_name(sub.topic_name)
return topic.on_subscribed_service_invoker
# ################################################################################################################################
def get_on_unsubscribed_hook(self, sub_key:'str'='', sub:'subnone'=None) -> 'callnone':
""" Returns a hook triggered when a client unsubscribes from a topic.
"""
with self.lock:
sub = sub or self.get_subscription_by_sub_key(sub_key)
if sub:
topic = self.topic_api.get_topic_by_name(sub.topic_name)
return topic.on_unsubscribed_service_invoker
# ################################################################################################################################
def get_on_outgoing_soap_invoke_hook(self, sub_key:'str') -> 'callnone':
""" Returns a hook that sends outgoing SOAP Suds connections-based messages or None if there is no such hook
for sub_key's topic.
"""
with self.lock:
sub = self.get_subscription_by_sub_key(sub_key)
if sub:
topic = self.topic_api.get_topic_by_name(sub.topic_name)
return topic.on_outgoing_soap_invoke_invoker
# ################################################################################################################################
def invoke_before_delivery_hook(
self,
hook, # type: callable_
topic_id, # type: int
sub_key, # type: str
batch, # type: msgiter
messages, # type: anydict
actions=tuple(PUBSUB.HOOK_ACTION()), # type: strtuple
_deliver=PUBSUB.HOOK_ACTION.DELIVER # type: str
) -> 'None':
""" Invokes a hook service for each message from a batch of messages possibly to be delivered and arranges
each one to a specific key in messages dict.
"""
for msg in batch:
topic = self.topic_api.get_topic_by_id(topic_id)
response = hook(topic, msg)
hook_action = response.get('hook_action', _deliver) # type: str
if hook_action not in actions:
raise ValueError('Invalid action returned `{}` for msg `{}`'.format(hook_action, msg))
else:
messages[hook_action].append(msg)
# ################################################################################################################################
def invoke_on_outgoing_soap_invoke_hook(self, batch:'anylist', sub:'Subscription', http_soap:'any_') -> 'None':
hook = self.get_on_outgoing_soap_invoke_hook(sub.sub_key)
topic = self.get_topic_by_id(sub.config['topic_id'])
if hook:
hook(topic, batch, http_soap=http_soap)
else:
# We know that this service exists, it just does not implement the expected method
service_info = self.server.service_store.get_service_info_by_id(topic.config['hook_service_id'])
service_class = service_info['service_class'] # type: Service
service_name = service_class.get_name()
raise Exception('Hook service `{}` does not implement `on_outgoing_soap_invoke` method'.format(service_name))
# ################################################################################################################################
def _invoke_on_sub_unsub_hook(
self,
hook, # type: callable_
topic_id, # type: int
sub_key='', # type: str
sub=None # type: subnone
) -> 'any_':
sub = sub if sub else self._get_subscription_by_sub_key(sub_key)
topic = self.topic_api.get_topic_by_id(topic_id)
return hook(topic=topic, sub=sub)
# ################################################################################################################################
def invoke_on_subscribed_hook(self, hook:'callable_', topic_id:'int', sub_key:'str') -> 'any_':
return self._invoke_on_sub_unsub_hook(hook, topic_id, sub_key, sub=None)
# ################################################################################################################################
def invoke_on_unsubscribed_hook(self, hook:'callable_', topic_id:'int', sub:'subnone') -> 'any_':
return self._invoke_on_sub_unsub_hook(hook, topic_id, sub_key='', sub=sub)
# ################################################################################################################################
def on_broker_msg_HOT_DEPLOY_CREATE_SERVICE(self, services_deployed:'intlist') -> 'None':
""" Invoked after a package with one or more services is hot-deployed. Goes over all topics
and updates hooks that any of these services possibly implements.
"""
with self.lock:
topics = self.topic_api.get_topics()
for topic in topics.values():
hook_service_id = topic.config.get('hook_service_id')
if hook_service_id in services_deployed:
self.hook_api.set_topic_config_hook_data(topic.config)
topic.set_hooks()
# ################################################################################################################################
def deliver_pubsub_msg(self, sub_key:'str', msg:'msgiter') -> 'any_':
""" A callback method invoked by pub/sub delivery tasks for one or more message that is to be delivered.
"""
return self.invoke_service('zato.pubsub.delivery.deliver-message', {
'msg':msg,
'subscription':self.get_subscription_by_sub_key(sub_key)
})
# ################################################################################################################################
def set_to_delete(self, sub_key:'str', msg_list:'strlistempty') -> 'None':
""" Marks all input messages as ready to be deleted.
"""
logger.info('Deleting messages set to be deleted `%s`', msg_list)
with closing(self.new_session_func()) as session:
set_to_delete(session, self.cluster_id, sub_key, msg_list, utcnow_as_ms())
# ################################################################################################################################
def topic_lock(self, topic_name:'str') -> 'Lock':
return self.server.zato_lock_manager('zato.pubsub.publish.%s' % topic_name)
# ################################################################################################################################
def invoke_service(self, name:'str', msg:'any_', *args:'any_', **kwargs:'any_') -> 'any_':
return self.server.invoke(name, msg, *args, **kwargs)
# ################################################################################################################################
def after_gd_sync_error(self,
topic_id, # type: int
source, # type: str
pub_time_max, # type: float
_float_str=PUBSUB.FLOAT_STRING_CONVERT # type: str
) -> 'None':
""" Invoked by the after-publish service in case there was an error with letting
a delivery task know about GD messages it was to handle. Resets the topic's
sync_has_gd_msg flag to True to make sure the notification will be resent
in the main loop's next iteration.
"""
# Get the topic object
topic = self.topic_api.get_topic_by_id(topic_id) # type: Topic
# Store information about what we are about to do
logger.info('Will resubmit GD messages after sync error; topic:`%s`, src:`%s`', topic.name, source)
with self.lock:
# We need to use the correct value of pub_time_max - since we are resyncing
# a failed message for a delivery task, it is possible that in the meantime
# another message was published to the topic so in case topic's gd_pub_time_max
# is bigger than our pub_time_max, the value from topic takes precedence.
topic_gd_pub_time_max = topic.gd_pub_time_max
if topic_gd_pub_time_max > pub_time_max:
logger.warning('Choosing topic\'s gd_pub_time_max:`%s` over `%s`',
topic_gd_pub_time_max, _float_str.format(pub_time_max))
new_pub_time_max = topic_gd_pub_time_max
else:
new_pub_time_max = pub_time_max
self._set_sync_has_msg(topic_id, True, True, source, new_pub_time_max)
# ################################################################################################################################
def _set_sync_has_msg(self,
topic_id, # type: int
is_gd, # type: bool
value, # type: bool
source, # type: str
gd_pub_time_max=0.0 # type: float
) -> 'None':
""" Updates a given topic's flags indicating that a message has been published since the last sync.
Must be called with self.lock held.
"""
topic = self.topic_api.get_topic_by_id(topic_id) # type: Topic
if is_gd:
topic.sync_has_gd_msg = value
topic.gd_pub_time_max = gd_pub_time_max
else:
topic.sync_has_non_gd_msg = value
# ################################################################################################################################
def set_sync_has_msg(
self,
*,
topic_id, # type: int
is_gd, # type: bool
value, # type: bool
source, # type: str
gd_pub_time_max # type: float
) -> 'None':
with self.lock:
self._set_sync_has_msg(topic_id, is_gd, value, source, gd_pub_time_max)
# ################################################################################################################################
def get_default_internal_pubsub_endpoint_id(self) -> 'int':
return self.server.get_default_internal_pubsub_endpoint_id()
# ################################################################################################################################
# ################################################################################################################################
# Public API methods
# ################################################################################################################################
# ################################################################################################################################
def publish(
self,
name:'any_',
*args:'any_',
**kwargs:'any_'
) -> 'any_':
""" Publishes a new message to input name, which may point either to a topic or service.
POST /zato/pubsub/topic/{topic_name}
"""
return self.pubapi.publish(name, *args, **kwargs)
# ################################################################################################################################
# ################################################################################################################################
def get_messages(
self,
topic_name, # type: str
sub_key, # type: str
/,
needs_details=False, # type: bool
needs_msg_id=False, # type: bool
) -> 'anylist':
""" Returns messages from a subscriber's queue, deleting them from the queue in progress.
POST /zato/pubsub/topic/{topic_name}?sub_key=...
"""
return self.pubapi.get_messages(topic_name, sub_key, needs_details, needs_msg_id)
# ################################################################################################################################
# ################################################################################################################################
def read_messages(
self,
topic_name, # type: str
sub_key, # type: str
has_gd, # type: bool
*args, # type: any_
**kwargs # type: any_
) -> 'any_':
""" Looks up messages in subscriber's queue by input criteria without deleting them from the queue.
"""
return self.pubapi.read_messages(topic_name, sub_key, has_gd, *args, **kwargs)
# ################################################################################################################################
# ################################################################################################################################
def read_message(
self,
topic_name, # type: str
msg_id, # type: str
has_gd, # type: bool
*args, # type: any_
**kwargs # type: any_
) -> 'any_':
""" Returns details of a particular message without deleting it from the subscriber's queue.
"""
return self.pubapi.read_message(topic_name, msg_id, has_gd, *args, **kwargs)
# ################################################################################################################################
# ################################################################################################################################
def delete_message(
self,
sub_key, # type: str
msg_id, # type: str
has_gd, # type: bool
*args, # type: anytuple
**kwargs, # type: any_
) -> 'any_':
""" Deletes a message from a subscriber's queue.
DELETE /zato/pubsub/msg/{msg_id}
"""
return self.pubapi.delete_message(sub_key, msg_id, has_gd, *args, **kwargs)
# ################################################################################################################################
# ################################################################################################################################
def subscribe(
self,
topic_name, # type: str
**kwargs # type: any_
) -> 'str':
return self.pubapi.subscribe(topic_name, **kwargs)
# ################################################################################################################################
# ################################################################################################################################
def resume_wsx_subscription(
self,
sub_key, # type: str
service, # type: Service
) -> 'None':
""" Invoked by WSX clients that want to resume deliveries of their messages after they reconnect.
"""
return self.pubapi.resume_wsx_subscription(sub_key, service)
# ################################################################################################################################
# ################################################################################################################################
def create_topic(self,
name, # type: str
has_gd=False, # type: bool
accept_on_no_sub=True, # type: bool
is_active=True, # type: bool
is_internal=False, # type: bool
is_api_sub_allowed=True, # type: bool
hook_service_id=None, # type: intnone
target_service_name=None, # type: strnone
task_sync_interval=_ps_default.TASK_SYNC_INTERVAL, # type: int
task_delivery_interval=_ps_default.TASK_DELIVERY_INTERVAL, # type: int
depth_check_freq=_ps_default.DEPTH_CHECK_FREQ, # type: int
max_depth_gd=_ps_default.TOPIC_MAX_DEPTH_GD, # type: int
max_depth_non_gd=_ps_default.TOPIC_MAX_DEPTH_NON_GD, # type: int
pub_buffer_size_gd=_ps_default.PUB_BUFFER_SIZE_GD, # type: int
) -> 'None':
self.pubapi.create_topic(
name = name,
has_gd = has_gd,
accept_on_no_sub = accept_on_no_sub,
is_active = is_active,
is_internal = is_internal,
is_api_sub_allowed = is_api_sub_allowed,
hook_service_id = hook_service_id,
target_service_name = target_service_name,
task_sync_interval = task_sync_interval,
task_delivery_interval = task_delivery_interval,
depth_check_freq = depth_check_freq,
max_depth_gd = max_depth_gd,
max_depth_non_gd = max_depth_non_gd,
pub_buffer_size_gd = pub_buffer_size_gd
)
# ################################################################################################################################
# ################################################################################################################################
sksnone = optional[SubKeyServer]
# ################################################################################################################################
# ################################################################################################################################
| 83,730
|
Python
|
.py
| 1,361
| 51.187362
| 130
| 0.463957
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,386
|
model.py
|
zatosource_zato/code/zato-server/src/zato/server/pubsub/model.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# pylint: disable=unused-import, redefined-builtin, unused-variable
# stdlib
import logging
from dataclasses import dataclass, field as dc_field
from datetime import datetime
# globre
from globre import compile as globre_compile
# Python 2/3 compatibility
from zato.common.ext.future.utils import iteritems
# Zato
from zato.common.api import PUBSUB
from zato.common.exception import BadRequest
from zato.common.pubsub import dict_keys
from zato.common.typing_ import any_, anydict, anylist, callable_, cast_, dict_, intnone, list_, optional, strlist, strtuple
from zato.common.util.api import make_repr
from zato.common.util.time_ import utcnow_as_ms
# ################################################################################################################################
if 0:
from zato.server.pubsub.delivery.message import msgnone
# ################################################################################################################################
logger = logging.getLogger('zato_pubsub.ps')
logger_zato = logging.getLogger('zato')
logger_overflow = logging.getLogger('zato_pubsub_overflow')
# ################################################################################################################################
hook_type_to_method = {
PUBSUB.HOOK_TYPE.BEFORE_PUBLISH: 'before_publish',
PUBSUB.HOOK_TYPE.BEFORE_DELIVERY: 'before_delivery',
PUBSUB.HOOK_TYPE.ON_OUTGOING_SOAP_INVOKE: 'on_outgoing_soap_invoke',
PUBSUB.HOOK_TYPE.ON_SUBSCRIBED: 'on_subscribed',
PUBSUB.HOOK_TYPE.ON_UNSUBSCRIBED: 'on_unsubscribed',
}
# ################################################################################################################################
_does_not_exist = object()
# ################################################################################################################################
_default_expiration = PUBSUB.DEFAULT.EXPIRATION
default_sk_server_table_columns = 6, 15, 8, 6, 17, 80
# ################################################################################################################################
_PRIORITY=PUBSUB.PRIORITY
_pri_min=_PRIORITY.MIN
_pri_max=_PRIORITY.MAX
_pri_def=_PRIORITY.DEFAULT
# ################################################################################################################################
def get_priority(
cid, # type: str
input, # type: anydict
_pri_min=_pri_min, # type: int
_pri_max=_pri_max, # type: int
_pri_def=_pri_def # type: int
) -> 'int':
""" Get and validate message priority.
"""
priority = input.get('priority')
if priority:
if priority < _pri_min or priority > _pri_max:
raise BadRequest(cid, 'Priority `{}` outside of allowed range {}-{}'.format(priority, _pri_min, _pri_max))
else:
priority = _pri_def
return priority
# ################################################################################################################################
def get_expiration(cid:'str', input:'anydict', default_expiration:'int'=_default_expiration) -> 'int':
""" Get and validate message expiration.
Returns (2 ** 31 - 1) * 1000 milliseconds (around 70 years) if expiration is not set explicitly.
"""
expiration = input.get('expiration') # type: intnone
if expiration is not None and expiration < 0:
raise BadRequest(cid, 'Expiration `{}` must not be negative'.format(expiration))
return expiration or default_expiration
# ################################################################################################################################
# ################################################################################################################################
class EventType:
class Topic:
set_hooks = 'set_hooks'
incr_topic_msg_counter = 'incr_topic_msg_counter'
update_task_sync_time_before = 'update_task_sync_time_before'
update_task_sync_time_after = 'update_task_sync_time_after'
needs_task_sync_before = 'needs_task_sync_before'
needs_task_sync_after = 'needs_task_sync_after'
class PubSub:
loop_topic_id_dict = 'loop_topic_id_dict'
loop_sub_keys = 'loop_sub_keys'
loop_before_has_msg = 'loop_before_has_msg'
loop_has_msg = 'loop_has_msg'
loop_before_sync = 'loop_before_sync'
_set_sync_has_msg = '_set_sync_has_msg'
about_to_subscribe = 'about_to_subscribe'
about_to_access_sub_sk = 'about_to_access_sub_sk'
in_subscribe_impl = 'in_subscribe_impl'
# ################################################################################################################################
# ################################################################################################################################
class ToDictBase:
_to_dict_keys:'tuple'
config:'anydict'
def to_dict(self) -> 'anydict':
out = {} # type: anydict
for name in self._to_dict_keys:
name = cast_('str', name)
value = getattr(self, name, _does_not_exist) # type: any_
if value is _does_not_exist:
value = self.config[name]
out[name] = value
return out
# ################################################################################################################################
# ################################################################################################################################
class Endpoint(ToDictBase):
""" A publisher/subscriber in pub/sub workflows.
"""
_to_dict_keys = dict_keys.endpoint
config: 'anydict'
id: 'int'
name: 'str'
endpoint_type: 'str'
role: 'str'
is_active: 'bool'
is_internal: 'bool'
topic_patterns: 'str'
pub_topic_patterns: 'strlist'
sub_topic_patterns: 'strlist'
pub_topics: 'anydict'
sub_topics: 'anydict'
def __init__(self, config:'anydict') -> 'None':
self.config = config
self.id = config['id']
self.name = config['name']
self.endpoint_type = config['endpoint_type']
self.role = config['role']
self.is_active = config['is_active']
self.is_internal = config['is_internal']
self.service_id = config['service_id']
self.topic_patterns = config.get('topic_patterns', '')
self.pub_topic_patterns = []
self.sub_topic_patterns = []
self.pub_topics = {}
self.sub_topics = {}
self.set_up_patterns()
# ################################################################################################################################
def __repr__(self) -> 'str':
return make_repr(self)
# ################################################################################################################################
def get_id(self) -> 'str':
return '{};{};{}'.format(self.id, self.endpoint_type, self.name)
# ################################################################################################################################
def to_dict(self, _replace:'strtuple'=('pub_topic_patterns', 'sub_topic_patterns')) -> 'anydict':
out = super(Endpoint, self).to_dict()
for key, value in out.items():
if key in _replace:
if value:
out[key] = sorted([(elem[0], str(elem[1])) for elem in value])
return out
# ################################################################################################################################
def set_up_patterns(self) -> 'None':
data = {
'topic': self.topic_patterns,
}
# is_pub, is_topic -> target set
targets = {
(True, True): self.pub_topic_patterns,
(False, True): self.sub_topic_patterns,
} # type: anydict
for key, config in iteritems(data):
is_topic = key == 'topic' # type: bool
for line in (config or '').splitlines():
line = line.strip()
if line.startswith('pub=') or line.startswith('sub='):
is_pub = line.startswith('pub=') # type: bool
matcher = line[line.find('=')+1:]
matcher = globre_compile(matcher)
source = (is_pub, is_topic)
target = targets[source] # type: anylist
target.append([line, matcher])
else:
msg = 'Ignoring invalid %s pattern `%s` for `%s` (role:%s) (reason: no pub=/sub= prefix found)'
logger.warning(msg, key, line, self.name, self.role)
# ################################################################################################################################
# ################################################################################################################################
class Topic(ToDictBase):
""" An individiual topic in in pub/sub workflows.
"""
_to_dict_keys = dict_keys.topic
config: 'anydict'
id: 'int'
name: 'str'
is_active: 'bool'
is_internal: 'bool'
has_gd: 'bool'
server_name: 'str'
server_pid: 'int'
max_depth_gd: 'int'
max_depth_non_gd: 'int'
last_synced: 'float'
gd_pub_time_max: 'float'
sync_has_gd_msg: 'bool'
sync_has_non_gd_msg: 'bool'
depth_check_freq: 'int'
pub_buffer_size_gd: 'int'
msg_pub_counter: 'int'
msg_pub_counter_gd: 'int'
msg_pub_counter_non_gd: 'int'
task_sync_interval: 'float'
meta_store_frequency: 'int'
task_delivery_interval: 'int'
limit_retention: 'int'
limit_message_expiry: 'int'
limit_sub_inactivity: 'int'
def __init__(self, config:'anydict', server_name:'str', server_pid:'int') -> 'None':
self.config = config
self.server_name = server_name
self.server_pid = server_pid
self.id = config['id']
self.name = config['name']
self.is_active = config['is_active']
self.is_internal = config['is_internal']
self.max_depth_gd = config['max_depth_gd']
self.max_depth_non_gd = config['max_depth_non_gd']
self.has_gd = config['has_gd']
self.depth_check_freq = config['depth_check_freq']
self.pub_buffer_size_gd = config['pub_buffer_size_gd']
self.task_delivery_interval = config['task_delivery_interval']
self.meta_store_frequency = config['meta_store_frequency']
self.limit_retention = config.get('limit_retention') or PUBSUB.DEFAULT.LimitTopicRetention
self.limit_message_expiry = config.get('limit_message_expiry') or PUBSUB.DEFAULT.LimitMessageExpiry
self.limit_sub_inactivity = config.get('limit_sub_inactivity') or PUBSUB.DEFAULT.LimitSubInactivity
self.set_hooks()
# For now, task sync interval is the same for GD and non-GD messages
# so we can arbitrarily pick the former to serve for both types of messages.
self.task_sync_interval = config['task_sync_interval'] / 1000.0
# How many messages have been published to this topic from current server,
# i.e. this is not a global counter.
self.msg_pub_counter = 0
self.msg_pub_counter_gd = 0
self.msg_pub_counter_non_gd = 0
# When were subscribers last notified about messages from current server,
# again, this is not a global counter.
self.last_synced = utcnow_as_ms()
# Flags to indicate if there has been a GD or non-GD message published for this topic
# since the last time self.last_synced has been updated. They are changed through PubSub
# with a lock for this topic held.
self.sync_has_gd_msg = False
self.sync_has_non_gd_msg = False
# The last time a GD message was published to this topic
self.gd_pub_time_max = 0.0 # type: float
# ################################################################################################################################
def get_id(self) -> 'str':
return '{};{}'.format(self.name, self.id)
# ################################################################################################################################
def set_hooks(self) -> 'None':
self.on_subscribed_service_invoker = self.config.get('on_subscribed_service_invoker')
self.on_unsubscribed_service_invoker = self.config.get('on_unsubscribed_service_invoker')
self.before_publish_hook_service_invoker = self.config.get('before_publish_hook_service_invoker')
self.before_delivery_hook_service_invoker = self.config.get('before_delivery_hook_service_invoker')
self.on_outgoing_soap_invoke_invoker = self.config.get('on_outgoing_soap_invoke_invoker')
# ################################################################################################################################
def incr_topic_msg_counter(self, has_gd:'bool', has_non_gd:'bool') -> 'None':
""" Increases counter of messages published to this topic from current server.
"""
self.msg_pub_counter += 1
if has_gd:
self.msg_pub_counter_gd += 1
if has_non_gd:
self.msg_pub_counter_non_gd += 1
# ################################################################################################################################
def update_task_sync_time(self, _utcnow_as_ms:'callable_'=utcnow_as_ms) -> 'None':
""" Increases counter of messages published to this topic from current server.
"""
self.last_synced = _utcnow_as_ms()
# ################################################################################################################################
def needs_task_sync(self, _utcnow_as_ms:'callable_'=utcnow_as_ms) -> 'bool':
now = _utcnow_as_ms()
needs_sync = now - self.last_synced >= self.task_sync_interval
return needs_sync
# ################################################################################################################################
def needs_depth_check(self) -> 'bool':
return self.msg_pub_counter_gd % self.depth_check_freq == 0
# ################################################################################################################################
def needs_meta_update(self) -> 'bool':
return self.msg_pub_counter % self.meta_store_frequency == 0
# ################################################################################################################################
# ################################################################################################################################
class Subscription(ToDictBase):
""" Describes an existing subscription object.
Note that, for WSX clients, it may exist even if the WebSocket is not currently connected.
"""
_to_dict_keys = dict_keys.subscription
config: 'anydict'
id: 'int'
creation_time: 'float'
sub_key: 'str'
endpoint_id: 'int'
topic_id: 'int'
topic_name: 'str'
sub_pattern_matched: 'str'
task_delivery_interval: 'int'
unsub_on_wsx_close: 'bool'
ext_client_id: 'str'
def __init__(self, config:'anydict') -> 'None':
self.config = config
self.id = config['id']
self.creation_time = config['creation_time'] * 1000.0
self.sub_key = config['sub_key']
self.endpoint_id = config['endpoint_id']
self.topic_id = config['topic_id']
self.topic_name = config['topic_name']
self.sub_pattern_matched = config['sub_pattern_matched']
self.task_delivery_interval = config['task_delivery_interval']
self.unsub_on_wsx_close = config.get('unsub_on_wsx_close', PUBSUB.DEFAULT.UnsubOnWSXClose)
self.ext_client_id = config['ext_client_id']
# Object ws_channel_id is an ID of a WSX channel this subscription potentially belongs to,
# otherwise it is None.
self.is_wsx = bool(self.config['ws_channel_id'])
# ################################################################################################################################
def __getitem__(self, key:'str') -> 'any_':
return getattr(self, key)
# ################################################################################################################################
def __lt__(self, other:'Subscription') -> 'bool':
return self.sub_key < other.sub_key
# ################################################################################################################################
def __repr__(self) -> 'str':
return make_repr(self)
# ################################################################################################################################
def get_id(self) -> 'str':
return self.sub_key
# ################################################################################################################################
# ################################################################################################################################
class HookCtx:
__slots__ = ('hook_type', 'msg', 'topic', 'sub', 'http_soap', 'outconn_name')
msg: 'msgnone'
sub: 'subnone' # type: ignore[valid-type]
topic: 'topicnone' # type: ignore[valid-type]
hook_type: 'str'
http_soap: 'anydict'
outconn_name: 'str'
def __init__(
self,
hook_type, # type: str
topic=None, # type: topicnone # type: ignore[valid-type]
msg=None, # type: msgnone
**kwargs # type: any_
) -> 'None':
self.hook_type = hook_type
self.msg = msg
self.topic = cast_(Topic, topic)
self.sub = kwargs.get('sub')
self.http_soap = kwargs.get('http_soap', {})
self.outconn_name = self.http_soap.get('config', {}).get('name', '')
# ################################################################################################################################
# ################################################################################################################################
class SubKeyServer(ToDictBase):
""" Holds information about which server has subscribers to an individual sub_key.
"""
_to_dict_keys = dict_keys.sks
config: 'anydict'
sub_key: 'str'
cluster_id: 'int'
server_name: 'str'
server_pid: 'int'
endpoint_type: 'str'
creation_time: 'datetime'
# Attributes below are only for WebSockets
channel_name: 'str'
pub_client_id: 'str'
ext_client_id: 'str'
wsx_info: 'anydict'
def __init__(self, config:'anydict', _utcnow:'callable_'=datetime.utcnow) -> 'None':
self.config = config
self.sub_key = config['sub_key']
self.cluster_id = config['cluster_id']
self.server_name = config['server_name']
self.server_pid = config['server_pid']
self.endpoint_type = config['endpoint_type']
# Attributes below are only for WebSockets
self.channel_name = config.get('channel_name', '')
self.pub_client_id = config.get('pub_client_id', '')
self.ext_client_id = config.get('ext_client_id', '')
self.wsx_info = config.get('wsx_info', {})
# When this object was created
self.creation_time = _utcnow()
# ################################################################################################################################
def __repr__(self) -> 'str':
return make_repr(self)
# ################################################################################################################################
def get_id(self) -> 'str':
return '{};{};{}'.format(self.server_name, self.server_pid, self.sub_key)
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=True)
class DeliveryResultCtx:
delivery_iter: int = 0
is_ok: bool = False
status_code: int = 0
reason_code: int = 0
exception_list: list_[Exception] = dc_field(default_factory=list)
# ################################################################################################################################
# ################################################################################################################################
subnone = optional[Subscription]
sublist = list_[Subscription]
strsubdict = dict_[str, Subscription]
topicnone = optional[Topic]
topiclist = list_[Topic]
strtopicdict = dict_[str, Topic]
inttopicdict = dict_[int, Topic]
# ################################################################################################################################
# ################################################################################################################################
| 21,471
|
Python
|
.py
| 406
| 46.325123
| 130
| 0.450315
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,387
|
publisher.py
|
zatosource_zato/code/zato-server/src/zato/server/pubsub/publisher.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from dataclasses import dataclass
from logging import DEBUG, getLogger
from operator import itemgetter
from traceback import format_exc
# ciso8601 / dateparser
try:
from ciso8601 import parse_datetime_as_naive
except ImportError:
from dateparser.parser import parse as parse_datetime_as_naive # type: ignore
# gevent
from gevent import spawn
# typing-extensions
from typing_extensions import TypeAlias
# Zato
from zato.common.api import PUBSUB, ZATO_NONE
from zato.common.exception import Forbidden, NotFound, ServiceUnavailable
from zato.common.json_ import dumps as json_dumps
from zato.common.marshal_.api import Model
from zato.common.odb.query.pubsub.publish import sql_publish_with_retry
from zato.common.odb.query.pubsub.topic import get_gd_depth_topic
from zato.common.pubsub import new_msg_id, PubSubMessage
from zato.common.typing_ import any_, anydict, anydictnone, anylistnone, anynone, boolnone, cast_, dict_field, intnone, \
list_field, strlistnone, strnone
from zato.common.util.pubsub import get_expiration, get_priority
from zato.common.util.sql import set_instance_opaque_attrs
from zato.common.util.time_ import datetime_from_ms, datetime_to_ms, utcnow_as_ms
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.marshal_.api import MarshalAPI
from zato.common.typing_ import anylist, callable_, dictlist, strlist, tuple_
from zato.server.base.parallel import ParallelServer
from zato.server.pubsub import PubSub, Topic
from zato.server.pubsub.model import sublist
from zato.server.service import Service
dictlist = dictlist
strlist = strlist
sublist = sublist
Service = Service
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
logger_pubsub = getLogger('zato_pubsub.srv')
logger_audit = getLogger('zato_pubsub_audit')
has_logger_pubsub_debug = logger_pubsub.isEnabledFor(DEBUG)
# ################################################################################################################################
# ################################################################################################################################
PublicationResult:TypeAlias = 'str | strlist | None'
# ################################################################################################################################
# ################################################################################################################################
_initialized = str(PUBSUB.DELIVERY_STATUS.INITIALIZED)
_meta_topic_key = PUBSUB.REDIS.META_TOPIC_LAST_KEY
_meta_endpoint_key = PUBSUB.REDIS.META_ENDPOINT_PUB_KEY
_meta_topic_optional = ('pub_correl_id', 'ext_client_id', 'in_reply_to')
_meta_sort_key = itemgetter('pub_time', 'ext_pub_time')
_log_turning_gd_msg = 'Turning message `%s` into a GD one ({})'
_inserting_gd_msg = 'Inserting GD messages for topic `%s` `%s` published by `%s` (ext:%s) (cid:%s)'
# ################################################################################################################################
# ################################################################################################################################
class _GetMessage:
_skip = PUBSUB.HOOK_ACTION.SKIP
_default_pri = PUBSUB.PRIORITY.DEFAULT
_opaque_only = PUBSUB.DEFAULT.SK_OPAQUE
_float_str = PUBSUB.FLOAT_STRING_CONVERT
_zato_mime_type = PUBSUB.MIMEType.Zato
# ################################################################################################################################
# ################################################################################################################################
class PubCtx:
""" A container for information describing a single publication.
"""
def __init__(
self,
*,
cid: 'str',
cluster_id: 'int',
pubsub: 'PubSub',
topic: 'Topic',
endpoint_id: 'int',
endpoint_name: 'str',
subscriptions_by_topic: 'sublist',
msg_id_list: 'anylist',
gd_msg_list: 'anylist',
non_gd_msg_list: 'anylist',
pub_pattern_matched: 'str',
ext_client_id: 'str',
is_first_run: 'bool',
now: 'float',
is_wsx: 'bool',
service_invoke_func: 'callable_',
new_session_func: 'callable_',
) -> 'None':
self.cid = cid
self.cluster_id = cluster_id
self.pubsub = pubsub
self.topic = topic
self.endpoint_id = endpoint_id
self.endpoint_name = endpoint_name
self.subscriptions_by_topic = subscriptions_by_topic
self.msg_id_list = msg_id_list
self.gd_msg_list = gd_msg_list
self.non_gd_msg_list = non_gd_msg_list
self.pub_pattern_matched = pub_pattern_matched
self.ext_client_id = ext_client_id
self.is_first_run = is_first_run
self.now = now
self.is_wsx = is_wsx
self.service_invoke_func = service_invoke_func
self.new_session_func = new_session_func
self.current_depth = 0
# Make sure we have the expected lists on input.
if not (self.gd_msg_list or self.non_gd_msg_list):
raise ValueError('At least one of gd_msg_list or non_gd_msg_list must be provided')
# We can now extract the last message for later use by our callers.
self.last_msg = self.gd_msg_list[-1] if self.gd_msg_list else self.non_gd_msg_list[-1]
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class PubRequest(Model):
cid: str
topic_name: str = ''
pub_pattern_matched: str = ''
msg_id: strnone = ''
correl_id: strnone = None
in_reply_to: strnone = None
has_gd: boolnone = None
data: anynone = None
data_list: anylistnone = None
priority: intnone = PUBSUB.PRIORITY.DEFAULT
expiration: intnone = PUBSUB.DEFAULT.EXPIRATION
mime_type: strnone = PUBSUB.DEFAULT.MIME_TYPE
ext_client_id: strnone = ''
ext_pub_time: strnone = None
security_id: intnone = None
security_name: strnone = None
endpoint_id: intnone = None
endpoint_name: strnone = None
ws_channel_id: intnone = None
group_id: strnone = ''
position_in_group: intnone = PUBSUB.DEFAULT.PositionInGroup
reply_to_sk: strlistnone = list_field()
deliver_to_sk: strlistnone = list_field()
user_ctx: anynone = None
zato_ctx: anydictnone = dict_field()
# ################################################################################################################################
# ################################################################################################################################
class Publisher:
""" Actual implementation of message publishing exposed through other services to the outside world.
"""
pubsub: 'PubSub'
server: 'ParallelServer'
marshal_api: 'MarshalAPI'
service_invoke_func: 'callable_'
new_session_func: 'callable_'
def __init__(
self,
*,
pubsub: 'PubSub',
server: 'ParallelServer',
marshal_api: 'MarshalAPI',
service_invoke_func: 'callable_',
new_session_func: 'callable_'
) -> 'None':
self.pubsub = pubsub
self.server = server
self.marshal_api = marshal_api
self.service_invoke_func = service_invoke_func
self.new_session_func = new_session_func
# ################################################################################################################################
def get_data_prefixes(self, data:'str') -> 'tuple_[str, str]':
data_prefix = data[:self.pubsub.data_prefix_len]
data_prefix_short = data[:self.pubsub.data_prefix_short_len]
return data_prefix, data_prefix_short
# ################################################################################################################################
def build_message(
self,
topic:'Topic',
request:'PubRequest',
now:'float',
pub_pattern_matched:'str',
endpoint_id:'int',
subscriptions_by_topic:'sublist',
has_no_sk_server:'bool'
) -> 'PubSubMessage | None':
priority = get_priority(request.cid, request.priority)
# So as not to send it to SQL if it is a default value anyway = less overhead = better performance
if priority == _GetMessage._default_pri:
priority = None
expiration = get_expiration(request.cid, request.expiration, topic.limit_message_expiry)
expiration_time = now + expiration
pub_msg_id = request.msg_id or new_msg_id()
# If there is at least one WSX subscriber to this topic which is not connected at the moment,
# which means it has no delivery server, we uncoditionally turn this message into a GD one ..
if has_no_sk_server:
has_gd = True
logger_pubsub.info(_log_turning_gd_msg.format('no SK server'), pub_msg_id)
# .. otherwise, use request GD value or the default per topic.
else:
has_gd = request.has_gd
if has_gd not in (None, '', ZATO_NONE):
if not isinstance(has_gd, bool):
raise ValueError('Input has_gd is not a bool (found:`{}`)'.format(repr(has_gd)))
else:
has_gd = topic.has_gd
pub_correl_id = request.correl_id
in_reply_to = request.in_reply_to
ext_client_id = request.ext_client_id
mime_type = request.mime_type
ext_pub_time = request.ext_pub_time or None
if ext_pub_time:
ext_pub_time = parse_datetime_as_naive(ext_pub_time) # type: ignore
ext_pub_time = datetime_to_ms(ext_pub_time) / 1000.0 # type: ignore
pub_correl_id = pub_correl_id if pub_correl_id else None
in_reply_to = in_reply_to if in_reply_to else None
ext_client_id = ext_client_id if ext_client_id else ''
mime_type = mime_type if mime_type else PUBSUB.DEFAULT.MIME_TYPE
reply_to_sk = request.reply_to_sk
deliver_to_sk = request.deliver_to_sk
user_ctx = request.user_ctx or None
zato_ctx = request.zato_ctx or {}
ps_msg = PubSubMessage()
ps_msg.topic = topic
ps_msg.pub_msg_id = pub_msg_id
ps_msg.pub_correl_id = pub_correl_id
ps_msg.in_reply_to = in_reply_to
# Convert to string to prevent pg8000 from rounding up float values.
# Note that the model says these fields are floats and this is why we ignore the type warnings in this case.
ps_msg.pub_time = _GetMessage._float_str.format(now)
ps_msg.ext_pub_time = _GetMessage._float_str.format(ext_pub_time) if ext_pub_time else ext_pub_time
# If the data published is not a string or object, we need to serialise it to JSON
# so as to be able to save it in the database - a delivery task will later
# need to de-serialise it.
data = request.data
if not isinstance(data, (str, bytes)):
data = json_dumps(data)
zato_ctx['zato_mime_type'] = _GetMessage._zato_mime_type
zato_ctx = json_dumps(zato_ctx)
ps_msg.delivery_status = _initialized
ps_msg.pub_pattern_matched = pub_pattern_matched
ps_msg.data = data
ps_msg.mime_type = mime_type
ps_msg.priority = priority # type: ignore
ps_msg.expiration = expiration
ps_msg.expiration_time = expiration_time
ps_msg.published_by_id = endpoint_id
ps_msg.topic_id = topic.id
ps_msg.topic_name = topic.name
ps_msg.cluster_id = self.server.cluster_id
ps_msg.has_gd = has_gd
ps_msg.ext_client_id = ext_client_id
ps_msg.group_id = request.group_id or None
ps_msg.position_in_group = request.position_in_group or PUBSUB.DEFAULT.PositionInGroup
ps_msg.is_in_sub_queue = bool(subscriptions_by_topic)
ps_msg.reply_to_sk = reply_to_sk # type: ignore
ps_msg.deliver_to_sk = deliver_to_sk # type: ignore
ps_msg.user_ctx = user_ctx
ps_msg.zato_ctx = zato_ctx
# Opaque attributes - we only need reply to sub_keys to be placed in there
# but we do not do it unless we known that any such sub key was actually requested.
if reply_to_sk or deliver_to_sk:
set_instance_opaque_attrs(ps_msg, request.to_dict(), only=_GetMessage._opaque_only)
# If there are any subscriptions for the topic this message was published to, we want to establish
# based on what subscription pattern each subscriber will receive the message.
for sub in subscriptions_by_topic:
ps_msg.sub_pattern_matched[sub.sub_key] = sub.sub_pattern_matched
if ps_msg.data:
# We need to store the size in bytes rather than Unicode codepoints
ps_msg.size = len(ps_msg.data if isinstance(ps_msg.data, bytes) else ps_msg.data.encode('utf8'))
else:
ps_msg.size = 0
# Invoke hook service here because it may want to update data in which case
# we need to take it into account below.
if topic.before_publish_hook_service_invoker:
response = topic.before_publish_hook_service_invoker(topic, ps_msg)
# Hook service decided that we should not process this message
if response.get('hook_action') == _GetMessage._skip:
logger_audit.info('Skipping message pub_msg_id:`%s`, pub_correl_id:`%s`, ext_client_id:`%s`',
ps_msg.pub_msg_id, ps_msg.pub_correl_id, ps_msg.ext_client_id)
return None
# These are needed only for GD messages that are stored in SQL
if has_gd:
data_prefix, data_prefix_short = self.get_data_prefixes(cast_('str', ps_msg.data))
ps_msg.data_prefix = data_prefix
ps_msg.data_prefix_short = data_prefix_short
return ps_msg
# ################################################################################################################################
def get_messages_from_data(
self,
*,
cid:'str',
topic:'Topic',
data_list:'any_',
request:'PubRequest',
now:'float',
pub_pattern_matched:'str',
endpoint_id:'int',
subscriptions_by_topic:'sublist',
has_no_sk_server:'bool'
) -> 'tuple_[anylist, ...]':
# List of messages with GD enabled
gd_msg_list = [] # type: anylist
# List of messages without GD enabled
non_gd_msg_list = [] # type: anylist
# List of all message IDs - in the same order as messages were given on request
msg_id_list = [] # type: anylist
if data_list and isinstance(data_list, (list, tuple)):
# Cast it to a list so that its type is known to type checkers
data_list = cast_('anylist', data_list)
for elem in data_list:
elem = self.marshal_api.from_dict(cast_('Service', None), elem, PubRequest, extra={'cid':cid})
msg = self.build_message(topic, elem, now, pub_pattern_matched, endpoint_id, subscriptions_by_topic,
has_no_sk_server)
if msg:
msg_id_list.append(msg.pub_msg_id)
msg_as_dict = msg.to_dict()
target_list = gd_msg_list if msg.has_gd else non_gd_msg_list
target_list.append(msg_as_dict)
else:
msg = self.build_message(topic, request, now, pub_pattern_matched, endpoint_id, subscriptions_by_topic,
has_no_sk_server)
if msg:
msg_id_list.append(msg.pub_msg_id)
msg_as_dict = msg.to_dict()
target_list = gd_msg_list if msg.has_gd else non_gd_msg_list
target_list.append(msg_as_dict)
return msg_id_list, gd_msg_list, non_gd_msg_list
# ################################################################################################################################
def _get_endpoint_id_by_security_name(self, security_name:'str') -> 'intnone':
security = self.server.worker_store.basic_auth_get(security_name)
security_config = security['config']
security_id = security_config['id']
endpoint_id = self.pubsub.get_endpoint_id_by_sec_id(security_id)
return endpoint_id
# ################################################################################################################################
def get_pub_pattern_matched(self, endpoint_id:'intnone', request:'PubRequest') -> 'tuple_[int, str]':
""" Returns a publication pattern matched that allows the endpoint to publish messages
or raises an exception if no pattern was matched. Takes into account various IDs possibly given on request,
depending on what our caller wanted to provide.
"""
security_id = request.security_id
security_name = request.security_name
ws_channel_id = request.ws_channel_id
endpoint_name = request.endpoint_name
#
# Note that if we have security name on input, it will take precedence over input security ID.
#
if security_name:
endpoint_id = self._get_endpoint_id_by_security_name(security_name)
#
# Again, the name of the endpoint takes precedence over its ID
#
if endpoint_name:
endpoint = self.pubsub.get_endpoint_by_name(endpoint_name)
endpoint_id = endpoint.id
if not endpoint_id:
if security_id:
endpoint_id = self.pubsub.get_endpoint_id_by_sec_id(security_id)
elif ws_channel_id:
endpoint_id_by_wsx_id = self.pubsub.get_endpoint_id_by_ws_channel_id(ws_channel_id)
if endpoint_id_by_wsx_id:
endpoint_id = endpoint_id_by_wsx_id
else:
raise Exception('Could not find endpoint by WSX channel ID -> `%s`', ws_channel_id)
else:
raise Exception('Either security_id or ws_channel_id is required if there is no endpoint_id')
kwargs = {'security_id':security_id} if security_id else {'ws_channel_id':ws_channel_id}
pub_pattern_matched = self.pubsub.is_allowed_pub_topic(request.topic_name, **kwargs)
else:
pub_pattern_matched = self.pubsub.is_allowed_pub_topic_by_endpoint_id(request.topic_name, endpoint_id)
# Not allowed, raise an exception in that case
if not pub_pattern_matched:
logger.warning('No pub pattern matched topic `%s` and endpoint `%s` (#2)',
request.topic_name, self.pubsub.get_endpoint_by_id(endpoint_id).name)
raise Forbidden(request.cid)
# Alright, we are in
pub_pattern_matched = cast_('str', pub_pattern_matched)
return endpoint_id, pub_pattern_matched
# ################################################################################################################################
def run_from_dict(self, cid:'str', data:'anydict') -> 'PublicationResult':
request = self.marshal_api.from_dict(cast_('Service', None), data, PubRequest, extra={'cid':cid}) # type: PubRequest
return self.run(request)
# ################################################################################################################################
def run(self, request:'PubRequest') -> 'PublicationResult':
endpoint_id = request.endpoint_id
# Will return publication pattern matched or raise an exception that we don't catch
endpoint_id, pub_pattern_matched = self.get_pub_pattern_matched(endpoint_id, request)
try:
topic = self.pubsub.get_topic_by_name(request.topic_name) # type: Topic
except KeyError:
raise NotFound(request.cid, 'No such topic `{}`'.format(request.topic_name))
# Reject the message is topic is not active
if not topic.is_active:
raise ServiceUnavailable(request.cid, 'Topic is inactive `{}`'.format(request.topic_name))
# We always count time in milliseconds since UNIX epoch
now = utcnow_as_ms()
# Get all subscribers for that topic from local worker store
all_subscriptions_by_topic = self.pubsub.get_subscriptions_by_topic(topic.name)
len_all_sub = len(all_subscriptions_by_topic)
# If we are to deliver the message(s) to only selected subscribers only,
# filter out any unwated ones first.
if request.deliver_to_sk:
has_all = False
subscriptions_by_topic = []
# Get any matching subscriptions out of the whole set
for sub in all_subscriptions_by_topic:
if sub.sub_key in request.deliver_to_sk:
subscriptions_by_topic.append(sub)
else:
# We deliver this message to all of the topic's subscribers
has_all = True
subscriptions_by_topic = all_subscriptions_by_topic
# This is only for logging purposes
_subs_found = []
# Assume that there are no missing servers for WSX clients by default
has_no_sk_server = False
for sub in subscriptions_by_topic:
# Prepare data for logging
_subs_found.append({sub.sub_key: sub.sub_pattern_matched})
# Is there at least one WSX subscriber to this topic that is currently not connected?
# If so, later on we will need to turn all the messages into GD ones.
sk_server = self.pubsub.get_sub_key_server(sub.sub_key)
if not sk_server:
if has_logger_pubsub_debug:
logger_pubsub.debug('No sk_server for sub_key `%s` among `%s`', sub.sub_key,
sorted(self.pubsub.sub_key_servers.keys()))
# We have found at least one subscriber that has no server
# (E.g. this is a WSX that is not currently connected).
has_no_sk_server = True
if has_logger_pubsub_debug:
logger_pubsub.debug('Subscriptions for topic `%s` `%s` (a:%d, %d/%d, cid:%s)',
topic.name, _subs_found, has_all, len(subscriptions_by_topic), len_all_sub, request.cid)
# If request.data is a list, it means that it is a list of messages, each of which has its own
# metadata. Otherwise, it's a string to publish and other request parameters describe it.
data_list = request.data_list if request.data_list else None
# Input messages may contain a mix of GD and non-GD messages, and we need to extract them separately.
msg_id_list, gd_msg_list, non_gd_msg_list = self.get_messages_from_data(
cid = request.cid,
topic = topic,
data_list = data_list,
request = request,
now = now,
pub_pattern_matched = pub_pattern_matched,
endpoint_id = endpoint_id,
subscriptions_by_topic = subscriptions_by_topic,
has_no_sk_server = has_no_sk_server
)
# Create a wrapper object for all the request data and metadata
is_wsx = bool(request.ws_channel_id)
ctx = PubCtx(
cid = request.cid,
cluster_id = self.server.cluster_id,
pubsub = self.pubsub,
topic = topic,
endpoint_id = endpoint_id,
endpoint_name = self.pubsub.get_endpoint_by_id(endpoint_id).name,
subscriptions_by_topic = subscriptions_by_topic,
msg_id_list = msg_id_list,
gd_msg_list = gd_msg_list,
non_gd_msg_list = non_gd_msg_list,
pub_pattern_matched = pub_pattern_matched,
ext_client_id = request.ext_client_id or '',
is_first_run = True,
now = now,
is_wsx = is_wsx,
service_invoke_func = self.service_invoke_func,
new_session_func = self.new_session_func,
)
# We have all the request data, publish the message(s) now
return self._publish(ctx)
# ################################################################################################################################
def _build_response(self, len_gd_msg_list:'int', ctx:'PubCtx') -> 'PublicationResult':
""" Return either a single msg_id if there was only one message published or a list of message IDs,
one for each message published.
"""
len_msg_list = len_gd_msg_list + len(ctx.non_gd_msg_list)
if len_msg_list == 1:
out = ctx.msg_id_list[0]
else:
out = ctx.msg_id_list
return out
# ################################################################################################################################
def _publish(self, ctx:'PubCtx') -> 'PublicationResult':
""" Publishes GD and non-GD messages to topics and, if subscribers exist, moves them to their queues / notifies them.
"""
len_gd_msg_list = len(ctx.gd_msg_list)
has_gd_msg_list = bool(len_gd_msg_list)
# Just so it is not overlooked, log information that no subscribers are found for this topic
if not ctx.subscriptions_by_topic:
log_msg = 'No matching subscribers found for topic `%s` (cid:%s, first:%d)'
log_msg_args = ctx.topic.name, ctx.cid, ctx.is_first_run
# There are no subscribers and depending on configuration we are to drop messages
# for whom no one is waiting or continue and place them in the topic directly.
if ctx.topic.config.get('on_no_subs_pub') == PUBSUB.ON_NO_SUBS_PUB.DROP.id:
log_msg_drop = 'Dropping messages. ' + log_msg
logger_pubsub.info(log_msg_drop, *log_msg_args)
return None
else:
logger_pubsub.info(log_msg, *log_msg_args)
# Local aliases
has_pubsub_audit_log = self.server.has_pubsub_audit_log
# Increase message counters for this pub/sub server and endpoint
ctx.pubsub.incr_pubsub_msg_counter(ctx.endpoint_id)
# Increase message counter for this topic
ctx.topic.incr_topic_msg_counter(has_gd_msg_list, bool(ctx.non_gd_msg_list))
# We don't always have GD messages on request so there is no point in running an SQL transaction otherwise.
if has_gd_msg_list:
with closing(ctx.new_session_func()) as session:
# Test first if we should check the depth in this iteration.
if ctx.topic.needs_depth_check():
# Get current depth of this topic ..
ctx.current_depth = get_gd_depth_topic(session, ctx.cluster_id, ctx.topic.id)
# .. and abort if max depth is already reached ..
if ctx.current_depth + len_gd_msg_list > ctx.topic.max_depth_gd:
# .. note thath is call raises an exception.
self.reject_publication(ctx.cid, ctx.topic.name, True)
else:
# This only updates the local ctx variable
ctx.current_depth = ctx.current_depth + len_gd_msg_list
pub_msg_list = [elem['pub_msg_id'] for elem in ctx.gd_msg_list]
if has_logger_pubsub_debug:
logger_pubsub.debug(_inserting_gd_msg, ctx.topic.name, pub_msg_list, ctx.endpoint_name,
ctx.ext_client_id, ctx.cid)
# This is the call that runs SQL INSERT statements with messages for topics and subscriber queues
_ = sql_publish_with_retry(
now = ctx.now,
cid = ctx.cid,
topic_id = ctx.topic.id,
topic_name = ctx.topic.name,
cluster_id = ctx.cluster_id,
pub_counter = self.server.get_pub_counter(),
session = session,
new_session_func = ctx.new_session_func,
before_queue_insert_func = None,
gd_msg_list = ctx.gd_msg_list,
subscriptions_by_topic = ctx.subscriptions_by_topic,
should_collect_ctx = False
)
# Run an SQL commit for all queries above ..
session.commit()
# .. increase the publication counter now that we have committed the messages ..
self.server.incr_pub_counter()
# .. and set a flag to signal that there are some GD messages available
ctx.pubsub.set_sync_has_msg(
topic_id = ctx.topic.id,
is_gd = True,
value = True,
source = 'Publish.publish',
gd_pub_time_max = ctx.now
)
# Either commit succeeded or there were no GD messages on request but in both cases we can now,
# optionally, store data in pub/sub audit log.
if has_pubsub_audit_log:
log_msg = 'Message published. CID:`%s`, topic:`%s`, from:`%s`, ext_client_id:`%s`, pattern:`%s`, new_depth:`%s`' + \
', GD data:`%s`, non-GD data:`%s`'
logger_audit.info(log_msg, ctx.cid, ctx.topic.name, self.pubsub.endpoints[ctx.endpoint_id].name, # type: ignore
ctx.ext_client_id, ctx.pub_pattern_matched, ctx.current_depth, ctx.gd_msg_list, ctx.non_gd_msg_list)
# If this is the very first time we are running during this invocation, try to deliver non-GD messages
if ctx.is_first_run:
if ctx.subscriptions_by_topic:
# Place all the non-GD messages in the in-RAM sync backlog
if ctx.non_gd_msg_list:
ctx.pubsub.store_in_ram(ctx.cid, ctx.topic.id, ctx.topic.name,
[item.sub_key for item in ctx.subscriptions_by_topic], ctx.non_gd_msg_list)
# .. however, if there are no subscriptions at the moment while there are non-GD messages,
# we need to re-run again and publish all such messages as GD ones. This is because if there
# are no subscriptions, we do not know to what delivery server they should go, so it's safest
# to store them in SQL.
else:
if ctx.non_gd_msg_list:
# Turn all non-GD messages into GD ones.
for msg in ctx.non_gd_msg_list:
msg['has_gd'] = True
logger_pubsub.info(_log_turning_gd_msg.format('no subscribers'), msg['pub_msg_id'])
data_prefix, data_prefix_short = self.get_data_prefixes(msg['data'])
msg['data_prefix'] = data_prefix
msg['data_prefix_short'] = data_prefix_short
# Note the reversed order - now non-GD messages are sent as GD ones and the list of non-GD messages is empty.
ctx.gd_msg_list = ctx.non_gd_msg_list[:]
ctx.non_gd_msg_list[:] = []
ctx.is_first_run = False
# Re-run with GD and non-GD reversed now
return self._publish(ctx)
# Update topic and endpoint metadata in background if configured to - we have a series of if's to confirm
# if it's needed because it is not a given that each publication will require the update and we also
# want to ensure that if there are two thigns to be updated at a time, it is only one greenlet spawned
# which will in turn use a single Redis pipeline to cut down on the number of Redis calls needed.
if ctx.pubsub.has_meta_topic or ctx.pubsub.has_meta_endpoint:
if ctx.pubsub.has_meta_topic and ctx.topic.needs_meta_update():
has_topic = True
else:
has_topic = False
if ctx.pubsub.has_meta_endpoint and ctx.pubsub.needs_endpoint_meta_update(ctx.endpoint_id):
has_endpoint = True
else:
has_endpoint = False
if has_topic or has_endpoint:
_ = spawn(self.update_pub_metadata, ctx, has_topic, has_endpoint,
ctx.pubsub.endpoint_meta_data_len, ctx.pubsub.endpoint_meta_max_history)
# Build and return a response for our caller.
out = self._build_response(len_gd_msg_list, ctx)
return out
# ################################################################################################################################
def reject_publication(self, cid:'str', topic_name:'str', is_gd:'bool') -> 'None':
""" Raises an exception to indicate that a publication was rejected.
"""
raise ServiceUnavailable(cid,
'Publication rejected - would exceed {} max depth for `{}`'.format('GD' if is_gd else 'non-GD', topic_name))
# ################################################################################################################################
def update_pub_metadata(
self,
ctx:'PubCtx',
has_topic:'bool',
has_endpoint:'bool',
endpoint_data_len:'int',
endpoint_max_history:'int'
) -> 'None':
""" Updates in background metadata about a topic and/or publisher.
"""
try:
# For later use
dt_now = datetime_from_ms(ctx.now * 1000)
# This is optional
ext_pub_time = ctx.last_msg.get('ext_pub_time')
if ext_pub_time:
if isinstance(ext_pub_time, str):
ext_pub_time = float(ext_pub_time)
ext_pub_time = datetime_from_ms(ext_pub_time * 1000)
# Prepare a document to update the topic's metadata with
if has_topic:
topic_key = _meta_topic_key % (ctx.cluster_id, ctx.topic.id)
topic_data = {
'pub_time': dt_now,
'topic_id': ctx.topic.id,
'endpoint_id': ctx.endpoint_id,
'endpoint_name': ctx.endpoint_name,
'pub_msg_id': ctx.last_msg['pub_msg_id'],
'pub_pattern_matched': ctx.pub_pattern_matched,
'has_gd': ctx.last_msg['has_gd'],
'server_name': self.server.name,
'server_pid': self.server.pid,
}
for name in _meta_topic_optional:
value = ctx.last_msg.get(name)
if value:
topic_data[name] = value
# Store data in RAM
self.server.pub_sub_metadata.set(topic_key, topic_data)
# Prepare a request to udpate the endpoint's metadata with
if has_endpoint:
endpoint_key = _meta_endpoint_key % (ctx.cluster_id, ctx.endpoint_id)
idx_found = None
endpoint_topic_list = self.server.pub_sub_metadata.get(endpoint_key) or []
endpoint_topic_list = cast_('dictlist', endpoint_topic_list)
# If we already have something stored in RAM, find information about this topic and remove it
# to make room for the newest entry.
if endpoint_topic_list:
for idx, elem in enumerate(endpoint_topic_list):
if elem['topic_id'] == ctx.topic.id:
idx_found = idx
break
if idx_found is not None:
_ = endpoint_topic_list.pop(idx_found)
# Newest information about this endpoint's publication to this topic
endpoint_data = {
'pub_time': dt_now,
'pub_msg_id': ctx.last_msg['pub_msg_id'],
'pub_correl_id': ctx.last_msg.get('pub_correl_id'),
'in_reply_to': ctx.last_msg.get('in_reply_to'),
'ext_client_id': ctx.last_msg.get('ext_client_id'),
'ext_pub_time': ext_pub_time,
'pub_pattern_matched': ctx.pub_pattern_matched,
'topic_id': ctx.topic.id,
'topic_name': ctx.topic.name,
'has_gd': ctx.last_msg['has_gd'],
}
# Storing actual data along with other information is optional
data = ctx.last_msg['data'][:endpoint_data_len] if endpoint_data_len else None
endpoint_data['data'] = data
# Append the newest entry and sort all results by publication time
endpoint_topic_list.append(endpoint_data)
endpoint_topic_list.sort(key=_meta_sort_key, reverse=True)
# Store only as many entries as configured to
endpoint_topic_list = endpoint_topic_list[:endpoint_max_history]
# Same as for topics, store data in RAM
self.server.pub_sub_metadata.set(endpoint_key, endpoint_topic_list)
# WSX connections update their SQL pub/sub metadata on their own because
# each possibly handles multiple sub_keys. Other types of connections
# update their SQL pub/sub metadata here.
if not ctx.is_wsx:
request = {
'sub_key': [sub.sub_key for sub in ctx.subscriptions_by_topic],
'last_interaction_time': ctx.now,
'last_interaction_type': 'publish',
'last_interaction_details': 'publish',
}
ctx.service_invoke_func('zato.pubsub.subscription.update-interaction-metadata', request)
except Exception:
logger.warning('Error while updating pub metadata `%s`', format_exc())
# ################################################################################################################################
| 39,107
|
Python
|
.py
| 705
| 44.21844
| 130
| 0.549032
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,388
|
hook.py
|
zatosource_zato/code/zato-server/src/zato/server/pubsub/core/hook.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# pylint: disable=unused-import, redefined-builtin, unused-variable
# stdlib
import logging
# gevent
from zato.common.api import PUBSUB
from zato.common.util.hook import HookTool
from zato.server.pubsub.model import HookCtx
# ################################################################################################################################
# ################################################################################################################################
hook_type_to_method = {
PUBSUB.HOOK_TYPE.BEFORE_PUBLISH: 'before_publish',
PUBSUB.HOOK_TYPE.BEFORE_DELIVERY: 'before_delivery',
PUBSUB.HOOK_TYPE.ON_OUTGOING_SOAP_INVOKE: 'on_outgoing_soap_invoke',
PUBSUB.HOOK_TYPE.ON_SUBSCRIBED: 'on_subscribed',
PUBSUB.HOOK_TYPE.ON_UNSUBSCRIBED: 'on_unsubscribed',
}
# ################################################################################################################################
# ################################################################################################################################
if 0:
from gevent.lock import RLock
from zato.common.typing_ import callable_, stranydict
from zato.server.base.parallel import ParallelServer
# ################################################################################################################################
# ################################################################################################################################
logger = logging.getLogger('zato_pubsub.ps')
logger_zato = logging.getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
class HookAPI:
def __init__(
self,
*,
lock, # type: RLock
server, # type: ParallelServer
invoke_service_func # type: callable_
) -> 'None':
self.lock = lock
self.server = server
self.invoke_service_func = invoke_service_func
# Manages access to service hooks
self.hook_tool = HookTool(self.server, HookCtx, hook_type_to_method, self.invoke_service_func)
# ################################################################################################################################
def set_topic_config_hook_data(self, config:'stranydict') -> 'None':
hook_service_id = config.get('hook_service_id')
if hook_service_id:
if not config['hook_service_name']:
config['hook_service_name'] = self.server.service_store.get_service_name_by_id(hook_service_id)
# Invoked when a new subscription to topic is created
config['on_subscribed_service_invoker'] = self.hook_tool.get_hook_service_invoker(
config['hook_service_name'], PUBSUB.HOOK_TYPE.ON_SUBSCRIBED)
# Invoked when an existing subscription to topic is deleted
config['on_unsubscribed_service_invoker'] = self.hook_tool.get_hook_service_invoker(
config['hook_service_name'], PUBSUB.HOOK_TYPE.ON_UNSUBSCRIBED)
# Invoked before messages are published
config['before_publish_hook_service_invoker'] = self.hook_tool.get_hook_service_invoker(
config['hook_service_name'], PUBSUB.HOOK_TYPE.BEFORE_PUBLISH)
# Invoked before messages are delivered
config['before_delivery_hook_service_invoker'] = self.hook_tool.get_hook_service_invoker(
config['hook_service_name'], PUBSUB.HOOK_TYPE.BEFORE_DELIVERY)
# Invoked for outgoing SOAP connections
config['on_outgoing_soap_invoke_invoker'] = self.hook_tool.get_hook_service_invoker(
config['hook_service_name'], PUBSUB.HOOK_TYPE.ON_OUTGOING_SOAP_INVOKE)
else:
config['hook_service_invoker'] = None
# ################################################################################################################################
# ################################################################################################################################
| 4,439
|
Python
|
.py
| 71
| 55.690141
| 130
| 0.453017
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,389
|
pubapi.py
|
zatosource_zato/code/zato-server/src/zato/server/pubsub/core/pubapi.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
from inspect import isclass
# Zato
from zato.common.api import PUBSUB
from zato.common.exception import Forbidden
from zato.common.pubsub import skip_to_external
from zato.common.typing_ import cast_
from zato.common.util.file_system import fs_safe_name
from zato.common.util.wsx import find_wsx_environ
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.model.wsx import WSXConnectorConfig
from zato.common.typing_ import any_, anydict, anylist, anytuple, callable_, commondict, intnone, stranydict, \
strnone, strtuple
from zato.server.connection.web_socket import WebSocket
from zato.server.pubsub import PubSub
from zato.server.pubsub.core.endpoint import EndpointAPI
from zato.server.pubsub.core.topic import TopicAPI
from zato.server.service import Service
from zato.server.service.store import ServiceStore
# ################################################################################################################################
# ################################################################################################################################
logger = logging.getLogger('zato_pubsub.ps')
logger_zato = logging.getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
_service_read_messages_gd = 'zato.pubsub.endpoint.get-endpoint-queue-messages-gd'
_service_read_messages_non_gd = 'zato.pubsub.endpoint.get-endpoint-queue-messages-non-gd'
_service_read_message_gd = 'zato.pubsub.message.get-from-queue-gd'
_service_read_message_non_gd = 'zato.pubsub.message.get-from-queue-non-gd'
_service_delete_message_gd = 'zato.pubsub.message.queue-delete-gd'
_service_delete_message_non_gd = 'zato.pubsub.message.queue-delete-non-gd'
_ps_default = PUBSUB.DEFAULT
# ################################################################################################################################
# ################################################################################################################################
class MsgConst:
wsx_sub_resumed = 'WSX subscription resumed, sk:`%s`, peer:`%s`'
# ################################################################################################################################
# ################################################################################################################################
class PubAPI:
def __init__(
self,
*,
# This is PubSub but we cannot use it because of spurious "cycle detected" warnings from pylance
pubsub, # type: PubSub
cluster_id, # type: int
service_store, # type: ServiceStore
topic_api, # type: TopicAPI
endpoint_api, # type: EndpointAPI
) -> 'None':
self.pubsub = pubsub
self.cluster_id = cluster_id
self.service_store = service_store
self.topic_api = topic_api
self.endpoint_api = endpoint_api
# ################################################################################################################################
# ################################################################################################################################
def _find_wsx_environ(self, service:'Service') -> 'stranydict':
wsx_environ = service.wsgi_environ.get('zato.request_ctx.async_msg', {}).get('environ')
if not wsx_environ:
raise Exception('Could not find `[\'zato.request_ctx.async_msg\'][\'environ\']` in WSGI environ `{}`'.format(
service.wsgi_environ))
else:
return wsx_environ
# ################################################################################################################################
# ################################################################################################################################
def publish(self, name:'any_', *args:'any_', **kwargs:'any_') -> 'any_':
""" Publishes a new message to input name, which may point either to a topic or service.
POST /zato/pubsub/topic/{topic_name}
"""
# We need to import it here to avoid circular imports
from zato.server.service import Service
# Initialize here for type checking
ws_channel_id = None
# For later use
from_service = cast_('Service', kwargs.get('service'))
ext_client_id = from_service.name if from_service else kwargs.get('ext_client_id')
# The first one is used if name is a service, the other one if it is a regular topic
correl_id = kwargs.get('cid') or kwargs.get('correl_id')
has_gd = kwargs.get('has_gd')
has_gd = cast_('bool', has_gd)
# By default, assume that cannot find any endpoint on input
endpoint_id = None
# If this is a WebSocket, we need to find its ws_channel_id ..
if from_service:
wsx_environ = find_wsx_environ(from_service, raise_if_not_found=False)
if wsx_environ:
wsx_config = wsx_environ['ws_channel_config'] # type: WSXConnectorConfig
ws_channel_id = wsx_config.id
endpoint = self.endpoint_api.get_by_ws_channel_id(ws_channel_id)
endpoint_id = endpoint.id
# Otherwise, use various default data.
if not endpoint_id:
endpoint_id = kwargs.get('endpoint_id') or self.pubsub.get_default_internal_pubsub_endpoint_id()
endpoint_id = cast_('int', endpoint_id)
# If input name is a topic, let us just use it
if self.topic_api.has_topic_by_name(name):
topic_name = name
# There is no particular Zato context if the topic name is not really a service name
zato_ctx = None
# Otherwise, if there is no topic by input name, it may be actually a service name ..
else:
# .. it may be a Python class representing the service ..
if isclass(name) and issubclass(name, Service):
name = name.get_name()
else:
name = cast_('str', name)
# .. but if there is no such service at all, we give up.
if not self.service_store.has_service(name):
msg = f'No such topic or service `{name}` (cid:{correl_id})'
logger.info(msg)
raise Forbidden(correl_id, 'You are not allowed to access this resource')
# At this point we know this is a service so we may build the topic's full name,
# taking into account the fact that a service's name is an arbitrary string
# so we need to make it filesystem-safe.
topic_name = PUBSUB.TOPIC_PATTERN.TO_SERVICE.format(fs_safe_name(name))
# We continue only if the publisher is allowed to publish messages to that service.
if not self.pubsub.is_allowed_pub_topic_by_endpoint_id(topic_name, endpoint_id):
endpoint = self.pubsub.get_endpoint_by_id(endpoint_id)
msg = f'No pub pattern matched service `{name}` and endpoint `{endpoint.name}` (#1) (cid:{correl_id})'
logger.info(msg)
raise Forbidden(correl_id, 'You are not allowed to access this resource')
# We create a topic for that service to receive messages from unless it already exists
if not self.topic_api.has_topic_by_name(topic_name):
self.pubsub.create_topic_for_service(name, topic_name)
_ = self.pubsub.wait_for_topic(topic_name)
# Messages published to services always use GD
has_gd = True
# Subscribe the default service delivery endpoint to messages from this topic
endpoint = self.endpoint_api.get_by_name(PUBSUB.SERVICE_SUBSCRIBER.NAME)
if not self.pubsub.is_subscribed_to(endpoint.id, topic_name):
# Subscribe the service to this topic ..
sub_key = self.subscribe(topic_name, endpoint_name=endpoint.name, is_internal=True, delivery_batch_size=1)
# .. and configure pub/sub metadata for the newly created subscription.
self.pubsub.set_config_for_service_subscription(sub_key)
# We need a Zato context to relay information about the service pointed to by the published message
zato_ctx = {
'target_service_name': name
}
# Data may be either in keyword arguments ..
if 'data' in kwargs:
data = kwargs['data'] or ''
# .. or it may be provided inline among positional arguments ..
elif args:
data = args[0] or ''
# .. otherwise, we assume that the data should be an empty string.
else:
data = ''
data_list = kwargs.get('data_list') or []
msg_id = kwargs.get('msg_id') or ''
priority = kwargs.get('priority')
expiration = kwargs.get('expiration')
mime_type = kwargs.get('mime_type')
in_reply_to = kwargs.get('in_reply_to')
ext_pub_time = kwargs.get('ext_pub_time')
reply_to_sk = kwargs.get('reply_to_sk')
deliver_to_sk = kwargs.get('deliver_to_sk')
user_ctx = kwargs.get('user_ctx')
zato_ctx = zato_ctx or kwargs.get('zato_ctx')
request = {
'topic_name': topic_name,
'data': data,
'data_list': data_list,
'msg_id': msg_id,
'has_gd': has_gd,
'priority': priority,
'expiration': expiration,
'mime_type': mime_type,
'correl_id': correl_id,
'in_reply_to': in_reply_to,
'ext_client_id': ext_client_id,
'ext_pub_time': ext_pub_time,
'endpoint_id': endpoint_id,
'ws_channel_id': ws_channel_id,
'reply_to_sk': reply_to_sk,
'deliver_to_sk': deliver_to_sk,
'user_ctx': user_ctx,
'zato_ctx': zato_ctx,
} # type: anydict
response = self.pubsub.invoke_service('zato.pubsub.publish.publish', request, serialize=False)
if isinstance(response, dict):
if 'response' in response:
response = response['response']
has_data = bool(response)
else:
has_data = response.has_data()
if has_data:
return response.get('msg_id') or response.get('msg_id_list')
# ################################################################################################################################
# ################################################################################################################################
def get_messages(self,
topic_name, # type: str
sub_key, # type: str
/,
needs_details=False, # type: bool
needs_msg_id=False, # type: bool
_skip=skip_to_external # type: strtuple
) -> 'anylist':
""" Returns messages from a subscriber's queue, deleting them from the queue in progress.
POST /zato/pubsub/topic/{topic_name}?sub_key=...
"""
response = self.pubsub.invoke_service('zato.pubsub.endpoint.get-delivery-messages', {
'cluster_id': self.cluster_id,
'sub_key': sub_key,
}, serialize=False)
response = response['response']
# Already includes all the details ..
if needs_details:
return response
# .. otherwise, we need to make sure they are not returned
out = [] # type: anylist
for item in response:
for name in _skip:
value = item.pop(name, None)
if needs_msg_id and name == 'pub_msg_id':
item['msg_id'] = value
out.append(item)
return out
# ################################################################################################################################
# ################################################################################################################################
def read_messages(self,
topic_name, # type: str
sub_key, # type: str
has_gd, # type: bool
*args, # type: any_
**kwargs # type: any_
) -> 'any_':
""" Looks up messages in subscriber's queue by input criteria without deleting them from the queue.
"""
service_name = _service_read_messages_gd if has_gd else _service_read_messages_non_gd
paginate = kwargs.get('paginate') or True
query = kwargs.get('query') or ''
cur_page = kwargs.get('cur_page') or 1
return self.pubsub.invoke_service(service_name, {
'cluster_id': self.cluster_id,
'sub_key': sub_key,
'paginate': paginate,
'query': query,
'cur_page': cur_page,
}, serialize=False).response
# ################################################################################################################################
# ################################################################################################################################
def read_message(self,
topic_name, # type: str
msg_id, # type: str
has_gd, # type: bool
*args, # type: any_
**kwargs # type: any_
) -> 'any_':
""" Returns details of a particular message without deleting it from the subscriber's queue.
"""
# Forward reference
service_data = {} # type: commondict
if has_gd:
service_name = _service_read_message_gd
service_data = {
'cluster_id': self.cluster_id,
'msg_id': msg_id
}
else:
sub_key = kwargs.get('sub_key')
server_name = kwargs.get('server_name')
server_pid = kwargs.get('server_pid')
if not(sub_key and server_name and server_pid):
raise Exception('All of sub_key, server_name and server_pid are required for non-GD messages')
service_name = _service_read_message_non_gd
service_data = {
'cluster_id': self.cluster_id,
'msg_id': msg_id,
'sub_key': sub_key,
'server_name': server_name,
'server_pid': server_pid,
}
return self.pubsub.invoke_service(service_name, service_data, serialize=False).response
# ################################################################################################################################
# ################################################################################################################################
def delete_message(self, sub_key:'str', msg_id:'str', has_gd:'bool', *args:'anytuple', **kwargs:'any_') -> 'any_':
""" Deletes a message from a subscriber's queue.
DELETE /zato/pubsub/msg/{msg_id}
"""
service_data = {
'sub_key': sub_key,
'msg_id': msg_id,
} # type: stranydict
if has_gd:
service_name = _service_delete_message_gd
service_data['cluster_id'] = self.cluster_id
else:
server_name = cast_('str', kwargs.get('server_name', ''))
server_pid = cast_('int', kwargs.get('server_pid', 0))
if not(sub_key and server_name and server_pid):
raise Exception('All of sub_key, server_name and server_pid are required for non-GD messages')
service_name = _service_delete_message_non_gd
service_data['server_name'] = server_name
service_data['server_pid'] = server_pid
# There is no response currently but one may be added at a later time
return self.pubsub.invoke_service(service_name, service_data, serialize=False)
# ################################################################################################################################
# ################################################################################################################################
def subscribe(self,
topic_name, # type: str
_find_wsx_environ=find_wsx_environ, # type: callable_
**kwargs # type: any_
) -> 'str':
# Forward reference
wsgi_environ = {} # type: stranydict
# Are we going to subscribe a WSX client?
use_current_wsx = kwargs.get('use_current_wsx')
# This is always needed to invoke the subscription service
request = {
'topic_name': topic_name,
'is_internal': kwargs.get('is_internal') or False,
'wrap_one_msg_in_list': kwargs.get('wrap_one_msg_in_list', True),
'delivery_batch_size': kwargs.get('delivery_batch_size', PUBSUB.DEFAULT.DELIVERY_BATCH_SIZE),
} # type: stranydict
# This is a subscription for a WebSocket client ..
if use_current_wsx:
service = cast_('Service', kwargs.get('service'))
if use_current_wsx and (not service):
raise Exception('Parameter `service` is required if `use_current_wsx` is True')
# If the caller wants to subscribe a WebSocket, make sure the WebSocket's metadata
# is given to us on input - the call below will raise an exception if it was not,
# otherwise it will return WSX metadata out which we can extract our WebSocket object.
wsx_environ = _find_wsx_environ(service)
wsx = wsx_environ['web_socket']
# All set, we can carry on with other steps now
sub_service_name = PUBSUB.SUBSCRIBE_CLASS.get(PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id)
wsgi_environ = service.wsgi_environ
kwargs_wsgi_environ = kwargs.get('wsgi_environ') or {}
wsgi_environ = wsgi_environ or kwargs_wsgi_environ
wsgi_environ['zato.request_ctx.pubsub.unsub_on_wsx_close'] = kwargs.get('unsub_on_wsx_close')
# .. this is a subscription for any client that is not WebSockets-based
else:
# We do not use WebSockets here
wsx = None
# Non-WSX endpoints always need to be identified by their names
endpoint_name = cast_('str', kwargs.get('endpoint_name'))
if not endpoint_name:
raise Exception('Parameter `endpoint_name` is required for non-WebSockets subscriptions')
else:
endpoint = self.endpoint_api.get_by_name(endpoint_name)
# Required to subscribe non-WSX endpoints
request['endpoint_id'] = endpoint.id
sub_service_name = PUBSUB.SUBSCRIBE_CLASS.get(endpoint.endpoint_type)
wsgi_environ = {} # # type: ignore[no-redef]
# Actually subscribe the caller
response = self.pubsub.invoke_service(
sub_service_name,
request,
wsgi_environ=wsgi_environ,
serialize=False
)
if isinstance(response, dict) and 'response' in response:
response = response['response']
# If this was a WebSocket caller, we can now update its pub/sub metadata
if use_current_wsx:
if wsx:
wsx.set_last_interaction_data('pubsub.subscribe')
return response['sub_key']
# ################################################################################################################################
# ################################################################################################################################
def resume_wsx_subscription(
self,
sub_key, # type: str
service, # type: Service
_find_wsx_environ=find_wsx_environ # type: callable_
) -> 'None':
""" Invoked by WSX clients that want to resume deliveries of their messages after they reconnect.
"""
# Get metadata and the WebSocket itself
wsx_environ = _find_wsx_environ(service)
wsx = wsx_environ['web_socket'] # type: WebSocket
# Actual resume subscription
_ = self.pubsub.invoke_service('zato.pubsub.resume-wsx-subscription', {
'sql_ws_client_id': wsx_environ['sql_ws_client_id'],
'channel_name': wsx_environ['ws_channel_config'].name,
'pub_client_id': wsx_environ['pub_client_id'],
'web_socket': wsx,
'sub_key': sub_key
}, wsgi_environ=service.wsgi_environ)
# If we get here, it means the service succeeded so we can update that WebSocket's pub/sub metadata
wsx.set_last_interaction_data('wsx.resume_wsx_subscription')
# All done, we can store a new entry in logs now
peer_info = wsx.get_peer_info_pretty()
logger.info(MsgConst.wsx_sub_resumed, sub_key, peer_info)
logger_zato.info(MsgConst.wsx_sub_resumed, sub_key, peer_info)
# ################################################################################################################################
# ################################################################################################################################
def create_topic(
self,
*,
name, # type: str
has_gd=False, # type: bool
accept_on_no_sub=True, # type: bool
is_active=True, # type: bool
is_internal=False, # type: bool
is_api_sub_allowed=True, # type: bool
hook_service_id=None, # type: intnone
target_service_name=None, # type: strnone
task_sync_interval=_ps_default.TASK_SYNC_INTERVAL, # type: int
task_delivery_interval=_ps_default.TASK_DELIVERY_INTERVAL, # type: int
depth_check_freq=_ps_default.DEPTH_CHECK_FREQ, # type: int
max_depth_gd=_ps_default.TOPIC_MAX_DEPTH_GD, # type: int
max_depth_non_gd=_ps_default.TOPIC_MAX_DEPTH_NON_GD, # type: int
pub_buffer_size_gd=_ps_default.PUB_BUFFER_SIZE_GD, # type: int
) -> 'None':
_ = self.pubsub.invoke_service('zato.pubsub.topic.create', {
'cluster_id': self.cluster_id,
'name': name,
'is_active': is_active,
'is_internal': is_internal,
'is_api_sub_allowed': is_api_sub_allowed,
'has_gd': has_gd,
'hook_service_id': hook_service_id,
'target_service_name': target_service_name,
'on_no_subs_pub': PUBSUB.ON_NO_SUBS_PUB.ACCEPT.id if accept_on_no_sub else PUBSUB.ON_NO_SUBS_PUB.DROP.id,
'task_sync_interval': task_sync_interval,
'task_delivery_interval': task_delivery_interval,
'depth_check_freq': depth_check_freq,
'max_depth_gd': max_depth_gd,
'max_depth_non_gd': max_depth_non_gd,
'pub_buffer_size_gd': pub_buffer_size_gd,
})
# ################################################################################################################################
# ################################################################################################################################
| 23,857
|
Python
|
.py
| 432
| 45.280093
| 130
| 0.50045
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,390
|
topic.py
|
zatosource_zato/code/zato-server/src/zato/server/pubsub/core/topic.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# pylint: disable=unused-import, redefined-builtin, unused-variable
# stdlib
import logging
# gevent
from zato.common.typing_ import cast_
from zato.server.pubsub.model import Topic
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import anylist, callable_, dict_, stranydict, strintdict
from zato.server.pubsub.core.hook import HookAPI
from zato.server.pubsub.model import inttopicdict, sublist, topiclist
# ################################################################################################################################
# ################################################################################################################################
logger = logging.getLogger('zato_pubsub.ps')
logger_zato = logging.getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
class TopicAPI:
def __init__(
self,
*,
hook_api, # type: HookAPI
server_name, # type: str
server_pid, # type: int
topic_meta_store_frequency, # type: int
subscriptions_by_topic, # type: dict_[str, sublist]
is_allowed_sub_topic_by_endpoint_id_func, # type: callable_
) -> 'None':
self.hook_api = hook_api
self.is_allowed_sub_topic_by_endpoint_id_func = is_allowed_sub_topic_by_endpoint_id_func
self.server_name = server_name
self.server_pid = server_pid
self.topic_meta_store_frequency = topic_meta_store_frequency
# Topic name -> List of Subscription objects
self.subscriptions_by_topic = subscriptions_by_topic
# Topic ID -> Topic object
self.topics = cast_('inttopicdict', {})
# Topic name -> Topic ID
self.topic_name_to_id = {} # type: strintdict
# ################################################################################################################################
def has_topic_by_id(self, topic_id:'int') -> 'bool':
try:
self.topics[topic_id]
except KeyError:
return False
else:
return True
# ################################################################################################################################
def has_topic_by_name(self, topic_name:'str') -> 'bool':
try:
_ = self.get_topic_by_name(topic_name)
except KeyError:
return False
else:
return True
# ################################################################################################################################
def get_topics(self) -> 'inttopicdict':
return self.topics
# ################################################################################################################################
def get_topic_by_name(self, topic_name:'str') -> 'Topic':
topic_id = self.get_topic_id_by_name(topic_name)
return self.topics[topic_id]
# ################################################################################################################################
def get_topic_by_id(self, topic_id:'int') -> 'Topic':
return self.topics[topic_id]
# ################################################################################################################################
def get_topic_id_by_name(self, topic_name:'str') -> 'int':
return self.topic_name_to_id[topic_name]
# ################################################################################################################################
def create_topic_object(self, config:'stranydict') -> 'None':
self.hook_api.set_topic_config_hook_data(config)
config['meta_store_frequency'] = self.topic_meta_store_frequency
topic = Topic(config, self.server_name, self.server_pid)
self.topics[config['id']] = topic
self.topic_name_to_id[config['name']] = config['id']
logger.info('Created topic object `%s` (id:%s) on server `%s` (pid:%s)', topic.name, topic.id,
topic.server_name, topic.server_pid)
# ################################################################################################################################
def delete_topic(self, topic_id:'int', topic_name:'str') -> 'anylist':
del self.topic_name_to_id[topic_name]
subscriptions_by_topic = self.subscriptions_by_topic.pop(topic_name, [])
del self.topics[topic_id]
logger.info('Deleted topic object `%s` (%s), subs:`%s`',
topic_name, topic_id, [elem.sub_key for elem in subscriptions_by_topic])
return subscriptions_by_topic
# ################################################################################################################################
def get_sub_topics_for_endpoint(self, endpoint_id:'int') -> 'topiclist':
""" Returns all topics to which endpoint_id can subscribe.
"""
out = [] # type: topiclist
for topic in self.topics.values():
if self.is_allowed_sub_topic_by_endpoint_id_func(topic.name, endpoint_id):
out.append(topic)
return out
# ################################################################################################################################
# ################################################################################################################################
| 5,969
|
Python
|
.py
| 102
| 51.754902
| 130
| 0.395229
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,391
|
trigger.py
|
zatosource_zato/code/zato-server/src/zato/server/pubsub/core/trigger.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# pylint: disable=unused-import, redefined-builtin, unused-variable
# stdlib
import logging
from traceback import format_exc
# gevent
from gevent import sleep, spawn
from zato.common.typing_ import cast_
from zato.common.util.api import new_cid
# ################################################################################################################################
# ################################################################################################################################
if 0:
from gevent.lock import RLock
from zato.common.typing_ import anydict, callable_, intanydict, intnone
from zato.server.pubsub.model import inttopicdict, sublist
# ################################################################################################################################
# ################################################################################################################################
logger = logging.getLogger('zato_pubsub.ps')
logger_zato = logging.getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
class NotifyPubSubTasksTrigger:
def __init__(
self,
*,
lock, # type: RLock
topics, # type: inttopicdict
sync_max_iters, # type: intnone
invoke_service_func, # type: callable_
set_sync_has_msg_func, # type: callable_
get_subscriptions_by_topic_func, # type: callable_
get_delivery_server_by_sub_key_func, # type: callable_
sync_backlog_get_delete_messages_by_sub_keys_func # type: callable_
) -> 'None':
self.lock = lock
self.topics = topics
self.sync_max_iters = sync_max_iters
self.invoke_service_func = invoke_service_func
self.set_sync_has_msg_func = set_sync_has_msg_func
self.get_subscriptions_by_topic_func = get_subscriptions_by_topic_func
self.get_delivery_server_by_sub_key_func = get_delivery_server_by_sub_key_func
self.sync_backlog_get_delete_messages_by_sub_keys_func = sync_backlog_get_delete_messages_by_sub_keys_func
self.keep_running = True
# ################################################################################################################################
def run(self) -> 'None':
""" A background greenlet which periodically lets delivery tasks know that there are perhaps
new GD messages for the topic that this class represents.
"""
# Local aliases
_current_iter = 0
_new_cid = new_cid
_spawn = cast_('callable_', spawn)
_sleep = cast_('callable_', sleep)
_self_lock = self.lock
_self_topics = self.topics
_logger_info = logger.info
_logger_warn = logger.warning
_logger_zato_warn = logger_zato.warning
_self_invoke_service = self.invoke_service_func
_self_set_sync_has_msg = self.set_sync_has_msg_func
_self_get_subscriptions_by_topic = self.get_subscriptions_by_topic_func
_self_get_delivery_server_by_sub_key = self.get_delivery_server_by_sub_key_func
_sync_backlog_get_delete_messages_by_sub_keys = self.sync_backlog_get_delete_messages_by_sub_keys_func
def _cmp_non_gd_msg(elem:'anydict') -> 'float':
return elem['pub_time']
# Loop forever or until stopped
while self.keep_running:
# Optionally, we may have a limit on how many iterations this loop should last
# and we need to check if we have reached it.
if self.sync_max_iters:
if _current_iter >= self.sync_max_iters:
self.keep_running = False
# This may be handy for logging purposes, even if there is no max. for the loop iters
_current_iter += 1
# Sleep for a while before continuing - the call to sleep is here because this while loop is quite long
# so it would be inconvenient to have it down below.
_sleep(0.01)
# Blocks other pub/sub processes for a moment
with _self_lock:
# Will map a few temporary objects down below
topic_id_dict = {} # type: intanydict
# Get all topics ..
for _topic in _self_topics.values():
# Does the topic require task synchronization now?
if not _topic.needs_task_sync():
continue
else:
_topic.update_task_sync_time()
# OK, the time has come for this topic to sync its state with subscribers
# but still skip it if we know that there have been no messages published to it since the last time.
if not (_topic.sync_has_gd_msg or _topic.sync_has_non_gd_msg):
continue
# There are some messages, let's see if there are subscribers ..
subs = [] # type: sublist
_subs = _self_get_subscriptions_by_topic(_topic.name)
# Filter out subscriptions for whom we have no subscription servers
for _sub in _subs:
if _self_get_delivery_server_by_sub_key(_sub.sub_key):
subs.append(_sub)
# .. if there are any subscriptions at all, we store that information for later use.
if subs:
topic_id_dict[_topic.id] = (_topic.name, subs)
# OK, if we had any subscriptions for at least one topic and there are any messages waiting,
# we can continue.
try:
for topic_id in topic_id_dict:
topic = _self_topics[topic_id]
# .. get the temporary metadata object stored earlier ..
topic_name, subs = topic_id_dict[topic_id]
cid = _new_cid()
_logger_info('Triggering sync for `%s` len_s:%d gd:%d ngd:%d cid:%s',
topic_name, len(subs), topic.sync_has_gd_msg, topic.sync_has_non_gd_msg, cid)
# Build a list of sub_keys for whom we know what their delivery server is which will
# allow us to send messages only to tasks that are known to be up.
sub_keys = [item.sub_key for item in subs]
# Continue only if there are actually any sub_keys left = any tasks up and running ..
if sub_keys:
non_gd_msg_list = _sync_backlog_get_delete_messages_by_sub_keys(topic_id, sub_keys)
# .. also, continue only if there are still messages for the ones that are up ..
if topic.sync_has_gd_msg or topic.sync_has_non_gd_msg:
# Note that we may have both GD and non-GD messages on input
# and we need to have a max that takes both into account.
max_gd = 0
max_non_gd = 0
# If there are any non-GD messages, get their max. pub time
if non_gd_msg_list:
non_gd_msg_list = sorted(non_gd_msg_list, key=_cmp_non_gd_msg)
max_non_gd = non_gd_msg_list[-1]['pub_time']
# This will be always available, even if with a value of 0.0
max_gd = topic.gd_pub_time_max
# Now, we can build a max. pub time that takes GD and non-GD into account.
pub_time_max = max(max_gd, max_non_gd)
non_gd_msg_list_msg_id_list = [elem['pub_msg_id'] for elem in non_gd_msg_list]
_logger_info('Forwarding messages to a task for `%s` ngd-list:%s (sk_list:%s) cid:%s',
topic_name, non_gd_msg_list_msg_id_list, sub_keys, cid)
# .. and notify all the tasks in background.
_ = _spawn(_self_invoke_service, 'zato.pubsub.after-publish', {
'cid': cid,
'topic_id':topic_id,
'topic_name':topic_name,
'subscriptions': subs,
'non_gd_msg_list': non_gd_msg_list,
'has_gd_msg_list': topic.sync_has_gd_msg,
'is_bg_call': True, # This is a background call, i.e. issued by this trigger,
'pub_time_max': pub_time_max, # Last time either a non-GD or GD message was received
})
# OK, we can now reset message flags for the topic
_self_set_sync_has_msg(topic_id, True, False, 'PubSub.loop')
_self_set_sync_has_msg(topic_id, False, False, 'PubSub.loop')
except Exception:
e_formatted = format_exc()
_logger_zato_warn(e_formatted)
_logger_warn(e_formatted)
# ################################################################################################################################
# ################################################################################################################################
| 10,112
|
Python
|
.py
| 159
| 46.987421
| 130
| 0.475098
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,392
|
sql.py
|
zatosource_zato/code/zato-server/src/zato/server/pubsub/core/sql.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
# Zato
from zato.common.odb.query.pubsub.delivery import \
confirm_pubsub_msg_delivered as _confirm_pubsub_msg_delivered, \
get_delivery_server_for_sub_key as _get_delivery_server_for_sub_key, \
get_sql_messages_by_msg_id_list as _get_sql_messages_by_msg_id_list, \
get_sql_messages_by_sub_key as _get_sql_messages_by_sub_key, \
get_sql_msg_ids_by_sub_key as _get_sql_msg_ids_by_sub_key
from zato.common.util.time_ import utcnow_as_ms
# ################################################################################################################################
# ################################################################################################################################
if 0:
from sqlalchemy.orm.session import Session as SASession
from zato.common.typing_ import any_, anytuple, callable_, intset, strlist
# ################################################################################################################################
# ################################################################################################################################
class SQLAPI:
def __init__(
self,
cluster_id, # type: int
new_session_func, # type: callable_
) -> 'None':
self.cluster_id = cluster_id
self.new_session_func = new_session_func
# ################################################################################################################################
def get_delivery_server_for_sub_key(self, sub_key:'str', is_wsx:'bool') -> 'any_':
with closing(self.new_session_func()) as session:
return _get_delivery_server_for_sub_key(session, self.cluster_id, sub_key, is_wsx)
# ################################################################################################################################
def get_sql_messages_by_sub_key(
self,
session, # type: any_
sub_key_list, # type: strlist
last_sql_run, # type: float
pub_time_max, # type: float
ignore_list # type: intset
) -> 'anytuple':
""" Returns all SQL messages queued up for all keys from sub_key_list.
"""
if not session:
session = self.new_session_func()
needs_close = True
else:
needs_close = False
try:
return _get_sql_messages_by_sub_key(session, self.cluster_id, sub_key_list,
last_sql_run, pub_time_max, ignore_list)
finally:
if needs_close:
session.close()
# ################################################################################################################################
def get_initial_sql_msg_ids_by_sub_key(
self,
session:'SASession',
sub_key:'str',
pub_time_max:'float'
) -> 'anytuple':
query = _get_sql_msg_ids_by_sub_key(session, self.cluster_id, sub_key, 0.0, pub_time_max)
return query.all()
# ################################################################################################################################
def get_sql_messages_by_msg_id_list(
self,
session, # type: any_
sub_key, # type: str
pub_time_max, # type: float
msg_id_list # type: strlist
) -> 'anytuple':
query = _get_sql_messages_by_msg_id_list(session, self.cluster_id, sub_key, pub_time_max, msg_id_list)
return query.all()
# ################################################################################################################################
def confirm_pubsub_msg_delivered(
self,
sub_key, # type: str
delivered_pub_msg_id_list # type: strlist
) -> 'None':
""" Sets in SQL delivery status of a given message to True.
"""
with closing(self.new_session_func()) as session:
_confirm_pubsub_msg_delivered(session, self.cluster_id, sub_key, delivered_pub_msg_id_list, utcnow_as_ms())
session.commit()
# ################################################################################################################################
# ################################################################################################################################
| 4,565
|
Python
|
.py
| 88
| 44.977273
| 130
| 0.412438
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,393
|
endpoint.py
|
zatosource_zato/code/zato-server/src/zato/server/pubsub/core/endpoint.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.common.api import PUBSUB
from zato.common.typing_ import cast_
from zato.common.util.api import wait_for_dict_key
from zato.server.pubsub.model import Endpoint
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import anydict, anytuple, callable_, dict_, intdict, intnone, strcalldict
strcalldict = strcalldict
# ################################################################################################################################
# ################################################################################################################################
_pub_role = (PUBSUB.ROLE.PUBLISHER_SUBSCRIBER.id, PUBSUB.ROLE.PUBLISHER.id)
_sub_role = (PUBSUB.ROLE.PUBLISHER_SUBSCRIBER.id, PUBSUB.ROLE.SUBSCRIBER.id)
# ################################################################################################################################
# ################################################################################################################################
class EndpointAPI:
endpoints: 'dict_[int, Endpoint]'
endpoint_impl_getter: 'strcalldict'
sec_id_to_endpoint_id: 'intdict'
service_id_to_endpoint_id: 'intdict'
ws_channel_id_to_endpoint_id: 'intdict'
def __init__(self) -> 'None':
# Endpoint ID -> Endpoint object
self.endpoints = {}
# Getter methods for each endpoint type that return actual endpoints,
# e.g. REST outgoing connections. Values are set by worker store.
self.endpoint_impl_getter = cast_('strcalldict', dict.fromkeys(PUBSUB.ENDPOINT_TYPE()))
# Sec def ID -> Endpoint ID
self.sec_id_to_endpoint_id = {}
# Service ID -> Endpoint ID
self.service_id_to_endpoint_id = {}
# WS chan def ID -> Endpoint ID
self.ws_channel_id_to_endpoint_id = {}
# ################################################################################################################################
def get_by_id(self, endpoint_id:'int') -> 'Endpoint':
return self.endpoints[endpoint_id]
# ################################################################################################################################
def get_by_name(self, endpoint_name:'str') -> 'Endpoint':
for endpoint in self.endpoints.values():
if endpoint.name == endpoint_name:
return endpoint
else:
raise KeyError('Could not find endpoint by name `{}` among `{}`'.format(endpoint_name, self.endpoints))
# ################################################################################################################################
def get_by_ws_channel_id(self, ws_channel_id:'int') -> 'Endpoint':
endpoint_id = self.ws_channel_id_to_endpoint_id[ws_channel_id]
return self.endpoints[endpoint_id]
# ################################################################################################################################
def get_id_by_sec_id(self, sec_id:'int') -> 'int':
return self.sec_id_to_endpoint_id[sec_id]
# ################################################################################################################################
def get_id_by_ws_channel_id(self, ws_channel_id:'int') -> 'intnone':
wait_for_dict_key(self.ws_channel_id_to_endpoint_id, ws_channel_id, timeout=3)
endpoint_id = self.ws_channel_id_to_endpoint_id.get(ws_channel_id)
return endpoint_id
# ################################################################################################################################
def get_id_by_service_id(self, service_id:'int') -> 'int':
return self.service_id_to_endpoint_id[service_id]
# ################################################################################################################################
def create(self, config:'anydict') -> 'None':
endpoint_id = config['id']
security_id = config['security_id']
ws_channel_id = config.get('ws_channel_id')
service_id = config.get('service_id')
self.endpoints[endpoint_id] = Endpoint(config)
if security_id:
self.sec_id_to_endpoint_id[security_id] = endpoint_id
if ws_channel_id:
self.ws_channel_id_to_endpoint_id[ws_channel_id] = endpoint_id
if service_id:
self.service_id_to_endpoint_id[service_id] = endpoint_id
# ################################################################################################################################
def delete(self, endpoint_id:'int') -> 'None':
del self.endpoints[endpoint_id]
sec_id = None
ws_chan_id = None
service_id = None
for key, value in self.sec_id_to_endpoint_id.items():
if value == endpoint_id:
sec_id = key
break
for key, value in self.ws_channel_id_to_endpoint_id.items():
if value == endpoint_id:
ws_chan_id = key
break
for key, value in self.service_id_to_endpoint_id.items():
if value == endpoint_id:
service_id = key
break
if sec_id:
del self.sec_id_to_endpoint_id[sec_id]
if ws_chan_id:
del self.ws_channel_id_to_endpoint_id[ws_chan_id]
if service_id:
del self.service_id_to_endpoint_id[service_id]
# ################################################################################################################################
def _is_allowed(
self,
*,
target, # type: str
name, # type: str
is_pub, # type: bool
security_id, # type: int
ws_channel_id, # type: int
endpoint_id=0, # type: int
_pub_role=_pub_role, # type: anytuple
_sub_role=_sub_role # type: anytuple
) -> 'str | bool':
""" An internal function that decides whether an endpoint, a security definition,
or a WSX channel are allowed to publish or subscribe to topics.
"""
if not endpoint_id:
if not(security_id or ws_channel_id):
raise ValueError(
'Either security_id or ws_channel_id must be given on input instead of `{}` `{}`'.format(
security_id, ws_channel_id))
if security_id:
source, id = self.sec_id_to_endpoint_id, security_id
else:
source, id = self.ws_channel_id_to_endpoint_id, ws_channel_id
endpoint_id = source[id]
# One way or another, we have an endpoint object now ..
endpoint = self.endpoints[endpoint_id]
# .. make sure this endpoint may publish or subscribe, depending on what is needed.
if is_pub:
if not endpoint.role in _pub_role:
return False
else:
if not endpoint.role in _sub_role:
return False
# Alright, this endpoint has the correct role, but are there are any matching patterns for this topic?
for orig, matcher in getattr(endpoint, target):
if matcher.match(name):
return orig
else:
return False
# ################################################################################################################################
def is_allowed_pub_topic(self, *, name:'str', security_id:'int'=0, ws_channel_id:'int'=0) -> 'str | bool':
return self._is_allowed(
target='pub_topic_patterns',
name=name,
is_pub=True,
security_id=security_id,
ws_channel_id=ws_channel_id
)
# ################################################################################################################################
def is_allowed_pub_topic_by_endpoint_id(self, *, name:'str', endpoint_id:'int') -> 'str | bool':
return self._is_allowed(
target='pub_topic_patterns',
name=name,
is_pub=True,
security_id=0,
ws_channel_id=0,
endpoint_id=endpoint_id
)
# ################################################################################################################################
def is_allowed_sub_topic(self, *, name:'str', security_id:'int'=0, ws_channel_id:'int'=0) -> 'str | bool':
return self._is_allowed(
target='sub_topic_patterns',
name=name,
is_pub=False,
security_id=security_id,
ws_channel_id=ws_channel_id
)
# ################################################################################################################################
def is_allowed_sub_topic_by_endpoint_id(self, name:'str', endpoint_id:'int') -> 'str | bool':
return self._is_allowed(
target='sub_topic_patterns',
name=name,
is_pub=False,
security_id=0,
ws_channel_id=0,
endpoint_id=endpoint_id
)
# ################################################################################################################################
def get_impl_getter(self, endpoint_type:'str') -> 'callable_':
return self.endpoint_impl_getter[endpoint_type]
# ################################################################################################################################
def set_impl_getter(self, endpoint_type:'str', impl_getter:'callable_') -> 'None':
self.endpoint_impl_getter[endpoint_type] = impl_getter
# ################################################################################################################################
# ################################################################################################################################
| 10,340
|
Python
|
.py
| 188
| 46.170213
| 130
| 0.418187
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,394
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/pubsub/core/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,395
|
message.py
|
zatosource_zato/code/zato-server/src/zato/server/pubsub/delivery/message.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
# Zato
from zato.common.api import GENERIC, PUBSUB
from zato.common.json_internal import json_loads
from zato.common.pubsub import PubSubMessage
from zato.common.typing_ import cast_, optional
from zato.common.util.time_ import datetime_from_ms
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import anydict, callable_
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato_pubsub.task')
# ################################################################################################################################
# ################################################################################################################################
_zato_mime_type = PUBSUB.MIMEType.Zato
# ################################################################################################################################
# ################################################################################################################################
class Message(PubSubMessage):
""" Wrapper for messages adding __cmp__ which uses a custom comparison protocol,
by priority, then ext_pub_time, then pub_time.
"""
pub_time: 'float'
def __init__(self) -> 'None':
super(Message, self).__init__()
self.sub_key = ''
self.pub_msg_id = ''
self.pub_correl_id = ''
self.in_reply_to = None
self.ext_client_id = ''
self.group_id = ''
self.position_in_group = 0
self.ext_pub_time = 0.0
self.data = None
self.mime_type = '<no-mime-type-set>'
self.expiration = 0
self.expiration_time = 0
self.has_gd = False
self.pub_time_iso = ''
self.ext_pub_time_iso = ''
self.expiration_time_iso = ''
self.recv_time_iso = ''
# ################################################################################################################################
def __lt__(self, other:'Message', max_pri:'int'=9) -> 'bool':
self_priority = max_pri - self.priority
other_priority = max_pri - other.priority
# If priority is different, that is most important
if self_priority < other_priority:
return True
# If we received an external publication time from a publisher,
# this has priority over the time that we established ourselves (which is checked below)
elif self.ext_pub_time and other.ext_pub_time:
return cast_('float', self.ext_pub_time) < cast_('float', other.ext_pub_time)
# Finally, we need to compare the publication times as assigned
# by ourselves. At this point no two messages are to have the same
# publication time because if such a condition is of concern then publishers
# should sent their own times via ext_pub_time.
else:
return self.pub_time < other.pub_time
# ################################################################################################################################
def __repr__(self) -> 'str':
return '<Msg d:{} pub:{!r} pri:{} id:{} extpub:{!r} gd:{}>'.format(
self.data, self.pub_time, self.priority, self.pub_msg_id, self.ext_pub_time, self.has_gd)
# ################################################################################################################################
def add_iso_times(self) -> 'None':
""" Sets additional attributes for datetime in ISO-8601.
"""
self.pub_time_iso = cast_('str', datetime_from_ms(self.pub_time * 1000))
if self.ext_pub_time:
self.ext_pub_time_iso = cast_('str', datetime_from_ms(cast_('float', self.ext_pub_time) * 1000))
if self.expiration_time:
self.expiration_time_iso = cast_('str', datetime_from_ms(self.expiration_time * 1000))
if self.recv_time:
self.recv_time_iso = cast_('str', datetime_from_ms(self.recv_time * 1000))
# ################################################################################################################################
class GDMessage(Message):
""" A guaranteed delivery message initialized from SQL data.
"""
is_gd_message = True
def __init__(self,
sub_key, # type: str
topic_name, # type: str
msg, # type: anydict
_gen_attr=GENERIC.ATTR_NAME, # type: str
_loads=json_loads, # type: callable_
_zato_mime_type=_zato_mime_type # type: str
) -> 'None':
# logger.info('Building task message (gd) from `%s`', msg)
super(GDMessage, self).__init__()
self.endp_msg_queue_id = msg['endp_msg_queue_id']
self.sub_key = sub_key
self.pub_msg_id = msg['pub_msg_id']
self.pub_correl_id = msg['pub_correl_id']
self.in_reply_to = msg['in_reply_to']
self.ext_client_id = msg['ext_client_id']
self.group_id = msg['group_id']
self.position_in_group = msg['position_in_group']
self.pub_time = msg['pub_time']
self.ext_pub_time = msg['ext_pub_time']
self.mime_type = msg['mime_type']
self.priority = msg['priority']
self.expiration = msg['expiration']
self.expiration_time = msg['expiration_time']
self.has_gd = True
self.topic_name = topic_name
self.size = msg['size']
self.published_by_id = msg['published_by_id']
self.sub_pattern_matched = msg['sub_pattern_matched']
self.user_ctx = msg['user_ctx']
self.zato_ctx = msg['zato_ctx']
# Assign data but note that we may still need to modify it
# depending on what zato_ctx contains.
self.data = msg['data']
# This is optional ..
if self.zato_ctx:
self.zato_ctx = _loads(self.zato_ctx) # type: anydict # type: ignore[no-redef]
if self.zato_ctx.get('zato_mime_type') == _zato_mime_type:
self.data = json_loads(self.data)
# Load opaque attributes, if any were provided on input
opaque = getattr(msg, _gen_attr, None)
if opaque:
opaque = _loads(opaque)
for key, value in opaque.items():
setattr(self, key, value)
# Add times in ISO-8601 for external subscribers
self.add_iso_times()
# logger.info('Built task message (gd) from `%s`', self.pub_msg_id)
# ################################################################################################################################
class NonGDMessage(Message):
""" A non-guaranteed delivery message initialized from a Python dict.
"""
is_gd_message = False
def __init__(self,
sub_key, # type: str
server_name, # type: str
server_pid, # type: int
msg, # type: anydict
_def_priority=PUBSUB.PRIORITY.DEFAULT, # type: int
_def_mime_type=PUBSUB.DEFAULT.MIME_TYPE # type: str
) -> 'None':
# logger.info('Building task message (ngd) from `%s`', msg)
super(NonGDMessage, self).__init__()
self.sub_key = sub_key
self.server_name = server_name
self.server_pid = server_pid
self.pub_msg_id = msg['pub_msg_id']
self.pub_correl_id = msg.get('pub_correl_id', '')
self.in_reply_to = msg.get('in_reply_to', '')
self.ext_client_id = msg.get('ext_client_id', '')
self.group_id = msg.get('group_id', '')
self.position_in_group = msg.get('position_in_group', 0)
self.pub_time = msg['pub_time']
self.ext_pub_time = msg.get('ext_pub_time')
self.data = msg['data']
self.mime_type = msg.get('mime_type') or _def_mime_type
self.priority = msg.get('priority') or _def_priority
self.expiration = msg['expiration']
self.expiration_time = msg['expiration_time']
self.has_gd = False
self.topic_name = msg['topic_name']
self.size = msg['size']
self.published_by_id = msg['published_by_id']
self.pub_pattern_matched = msg['pub_pattern_matched']
self.reply_to_sk = msg['reply_to_sk']
self.deliver_to_sk = msg['deliver_to_sk']
self.user_ctx = msg.get('user_ctx')
self.zato_ctx = msg.get('zato_ctx', {})
# msg.sub_pattern_matched is a shared dictionary of patterns for each subscriber - we .pop from it
# so as not to keep this dictionary's contents for no particular reason. Since there can be only
# one delivery task for each sub_key, we can .pop rightaway.
sub_pattern_matched = msg['sub_pattern_matched'] # type: anydict
self.sub_pattern_matched = sub_pattern_matched.pop(self.sub_key)
# Add times in ISO-8601 for external subscribers
self.add_iso_times()
# logger.info('Built task message (ngd) `%s`', self.to_dict(add_id_attrs=True))
# ################################################################################################################################
# ################################################################################################################################
msgnone = optional['Message']
# ################################################################################################################################
# ################################################################################################################################
| 10,127
|
Python
|
.py
| 187
| 46.304813
| 130
| 0.472247
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,396
|
tool.py
|
zatosource_zato/code/zato-server/src/zato/server/pubsub/delivery/tool.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from copy import deepcopy
from logging import getLogger
from traceback import format_exc
from typing import cast, Iterable as iterable_
# gevent
from gevent import spawn
from gevent.lock import RLock
# Zato
from zato.common.odb.api import SQLRow
from zato.common.typing_ import cast_, list_
from zato.common.util.api import grouper
from zato.common.util.time_ import utcnow_as_ms
from zato.server.pubsub.delivery.message import GDMessage, NonGDMessage
from zato.server.pubsub.delivery._sorted_list import SortedList
from zato.server.pubsub.delivery.task import DeliveryTask
# ################################################################################################################################
# ################################################################################################################################
if 0:
from collections.abc import ValuesView
from sqlalchemy.orm.session import Session as SASession
from zato.common.pubsub import HandleNewMessageCtx
from zato.common.typing_ import any_, boolnone, callable_, callnone, dict_, dictlist, intset, set_, strlist, tuple_
from zato.server.pubsub import PubSub
from zato.server.pubsub.delivery.message import Message
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato_pubsub.task')
logger_zato = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
sqlmsglist = list_['SQLRow']
sqlmsgiter = iterable_['SQLRow']
# ################################################################################################################################
# ################################################################################################################################
class PubSubTool:
""" A utility object for pub/sub-related tasks.
"""
def __init__(self,
pubsub, # type: PubSub
parent, # type: any_
endpoint_type, # type: str
is_for_services=False, # type: bool
deliver_pubsub_msg=None # type: callnone
) -> 'None':
self.pubsub = pubsub
self.parent = parent # This is our parent, e.g. an individual WebSocket on whose behalf we execute
self.endpoint_type = endpoint_type
self.server_name = self.pubsub.server.name
self.server_pid = self.pubsub.server.pid
# WSX connections will have their own callback but other connections use the default one
self.deliver_pubsub_msg = deliver_pubsub_msg or self.pubsub.deliver_pubsub_msg # type: callable_
# A broad lock for generic pub/sub matters
self.lock = RLock()
# Each sub_key will get its own lock for operations related to that key only
self.sub_key_locks = {} # type: dict_[str, RLock]
# How many messages to send in a single delivery group,
# may be set individually for each subscription, defaults to 1
self.batch_size = {} # type: dict_[str, int]
# Which sub_keys this pubsub_tool handles
self.sub_keys = set() # type: set_[str]
# A sorted list of message references for each sub_key
self.delivery_lists = {} # type: dict_[str, SortedList]
# A pub/sub delivery task for each sub_key
self.delivery_tasks = {} # type: dict_[str, DeliveryTask]
# Last time we tried to pull GD messages from SQL, by sub_key
self.last_gd_run = {} # type: dict_[str, float]
# Register with this server's pubsub
self.register_pubsub_tool()
# How many times self.handle_new_messages has been called
self.msg_handler_counter = 0
# Is this tool solely dedicated to delivery of messages to Zato services
self.is_for_services = is_for_services
# ################################################################################################################################
def stop(self) -> 'None':
""" Stops all delivery asks belonging to this tool.
"""
for item in self.delivery_tasks.values():
try:
item.stop()
except Exception:
logger.info('Ignoring exception in PubSubTool.stop -> %s', format_exc())
# ################################################################################################################################
def register_pubsub_tool(self) -> 'None':
""" Registers ourselves with this server's pubsub to let the other control when we should shut down
our delivery tasks for each sub_key.
"""
self.pubsub.register_pubsub_tool(self)
# ################################################################################################################################
def get_sub_keys(self) -> 'strlist':
""" Returns all sub keys this task handles, as a list.
"""
with self.lock:
return list(self.sub_keys)
# ################################################################################################################################
def add_sub_key_no_lock(self, sub_key:'str') -> 'None':
""" Adds metadata about a given sub_key - must be called with self.lock held.
"""
# Already seen it - can be ignored
if sub_key in self.sub_keys:
return
self.sub_keys.add(sub_key)
self.batch_size[sub_key] = 1
#
# A dictionary that maps when GD messages were last time fetched from the SQL database for each sub_key.
# Since fetching means we are issuing a single query for multiple sub_keys at a time, we need to fetch only these
# messages that are younger than the oldest value for all of the sub_keys whose messages will be fetched.
#
# Let's say we have three sub_keys: a, b, c
#
# time 0001: pub to a, b, c
# time 0001: store last_gd_run = 0001 for each of a, b, c
# ---
# time 0002: pub to a, b
# time 0002: store last_gd_run = 0002 for a, b
# ---
# time 0003: pub to b, c
# time 0003: store last_gd_run = 0003 for b, c
# ---
# time 0004: pub to c
# time 0004: store last_gd_run = 0004 for c
#
# We now have: {a:0002, b:0003, c:0004}
#
# Let's say we now receive:
#
# time 0005: pub to a, b, c
#
# Because we want to have a single SQL query for all of a, b, c instead of querying the database for each of sub_key,
# we need to look up values stored in this dictionary for each of the sub_key and use the smallest one - in this case
# it would be 0002 for sub_key a. Granted, we know that there won't be any keys for b in the timespan of 0002-0003
# or for c in the duration of 0003-004, so in the case of these other keys reaching but so back in time is a bit too much
# but this is all fine anyway because the most important part is that we can still use a single SQL query.
#
# Similarly, had it been a pub to b, c in time 0005 then we would be using min of b and c which is 0003.
#
# The reason why this is fine is that when we query the database not only do we use this last_gd_run but we also give it
# a delivery status to return messages by (initialized only) and on top of it, we provide it a list of message IDs
# that are currently being delivered by tasks, so in other words, the database will never give us duplicates
# that have been already delivered or are about to be.
#
delivery_list = SortedList()
delivery_lock = RLock()
self.delivery_lists[sub_key] = delivery_list
self.sub_key_locks[sub_key] = delivery_lock
sub = self.pubsub.get_subscription_by_sub_key(sub_key)
if not sub:
all_subs = self.pubsub.get_all_subscriptions()
msg = 'Sub key `%s` not found among `%s`'
logger.warning(msg, sub_key, all_subs)
logger_zato.warning(msg, sub_key, all_subs)
# Return explicitly
return
else:
self.delivery_tasks[sub_key] = DeliveryTask(
pubsub = self.pubsub,
sub_config = sub.config,
sub_key = sub_key,
delivery_lock = delivery_lock,
delivery_list = delivery_list,
deliver_pubsub_msg = self.deliver_pubsub_msg,
confirm_pubsub_msg_delivered_cb = self.confirm_pubsub_msg_delivered,
enqueue_initial_messages_func = self.enqueue_initial_messages,
pubsub_set_to_delete = self.pubsub.set_to_delete,
pubsub_get_before_delivery_hook = self.pubsub.get_before_delivery_hook,
pubsub_invoke_before_delivery_hook = self.pubsub.invoke_before_delivery_hook,
)
# ################################################################################################################################
def add_sub_key(self, sub_key:'str') -> 'None':
""" Same as self.add_sub_key_no_lock but holds self.lock.
"""
with self.lock:
self.add_sub_key_no_lock(sub_key)
self.pubsub.set_pubsub_tool_for_sub_key(sub_key, self)
# ################################################################################################################################
def remove_sub_key(self, sub_key:'str') -> 'None':
with self.lock:
try:
self.sub_keys.remove(sub_key)
del self.batch_size[sub_key]
del self.sub_key_locks[sub_key]
del self.delivery_lists[sub_key]
self.delivery_tasks[sub_key].stop()
del self.delivery_tasks[sub_key]
except Exception:
logger.info('Exception during sub_key removal `%s`, e:`%s`', sub_key, format_exc())
delete_by_sub_key = remove_sub_key
# ################################################################################################################################
def has_sub_key(self, sub_key:'str') -> 'bool':
with self.lock:
return sub_key in self.sub_keys
# ################################################################################################################################
def remove_all_sub_keys(self) -> 'None':
sub_keys = deepcopy(self.sub_keys)
for sub_key in sub_keys:
self.remove_sub_key(sub_key)
# ################################################################################################################################
def clear_task(self, sub_key:'str') -> 'None':
task = self.delivery_tasks[sub_key]
task.clear()
# ################################################################################################################################
def trigger_update_task_sub_config(self, sub_key:'str') -> 'None':
task = self.delivery_tasks[sub_key]
task.update_sub_config()
# ################################################################################################################################
def _add_non_gd_messages_by_sub_key(self, sub_key:'str', messages:'dictlist') -> 'None':
""" Low-level implementation of add_non_gd_messages_by_sub_key, must be called with a lock for input sub_key.
"""
for msg in messages:
# Ignore messages that are replies meant to be delievered only to sub_keys
# other than current one. This may happen because PubSub.trigger_notify_pubsub_tasks
# sends non-GD messages to all sub_keys subscribed to topic, no matter what deliver_to_sk
# of a message is. This is the reason why we need to sort it out here. Eventually,
# PubSub.trigger_notify_pubsub_tasks should be changed to notify sub_keys if deliver_to_sk
# does not point to them.
if msg['deliver_to_sk']:
if sub_key not in msg['deliver_to_sk']:
continue
add = cast_('callable_', self.delivery_lists[sub_key].add)
add(NonGDMessage(sub_key, self.server_name, self.server_pid, msg))
# ################################################################################################################################
def add_non_gd_messages_by_sub_key(self, sub_key:'str', messages:'dictlist') -> 'None':
""" Adds to local delivery queue all non-GD messages from input.
"""
try:
with self.sub_key_locks[sub_key]:
self._add_non_gd_messages_by_sub_key(sub_key, messages)
except Exception:
e = format_exc()
logger.warning(e)
logger_zato.warning(e)
# ################################################################################################################################
def _handle_new_messages(self, ctx:'HandleNewMessageCtx', delta:'int'=60) -> 'None':
""" A callback invoked when there is at least one new message to be handled for input sub_keys.
If has_gd is True, it means that at least one GD message available. If non_gd_msg_list is not empty,
it is a list of non-GD message for sub_keys.
"""
session = None
try:
if ctx.has_gd:
session = self.pubsub.server.odb.session()
else:
if not ctx.non_gd_msg_list:
# This is an unusual situation but not an erroneous one because it is possible
# that we were triggered to deliver messages that have already expired in the meantime,
# in which case we just log on info level rather than warn.
logger.info('No messages received ({}) for cid:`{}`, has_gd:`{}` and sub_key_list:`{}`'.format(
ctx.non_gd_msg_list, ctx.cid, ctx.has_gd, ctx.sub_key_list))
return
logger.info('Handle new messages, cid:%s, gd:%d, sub_keys:%s, len_non_gd:%d bg:%d',
ctx.cid, int(ctx.has_gd), ctx.sub_key_list, len(ctx.non_gd_msg_list), ctx.is_bg_call)
gd_msg_list = {} # type: dict_[str, sqlmsglist]
# We need to have the broad lock first to read in messages for all the sub keys
with self.lock:
# Get messages for all sub_keys on input and break them out by each sub_key separately,
# provided that we have a flag indicating that there should be some GD messages around in the database.
if ctx.has_gd:
gd_messages_by_sk_list = self._fetch_gd_messages_by_sk_list(ctx.sub_key_list, ctx.pub_time_max, session)
gd_messages_by_sk_list = list(gd_messages_by_sk_list)
for msg in gd_messages_by_sk_list:
_sk_msg_list = gd_msg_list.setdefault(msg.sub_key, [])
_sk_msg_list.append(msg)
# Note how we substract delta seconds from current time - this is because
# it is possible that there will be new messages enqueued in between our last
# run and current time's generation - the difference will be likely just a few
# milliseconds but to play it safe we use by default a generous slice of 60 seconds.
# This is fine because any SQL queries depending on this value will also
# include other filters such as delivery_status.
new_now = utcnow_as_ms() - delta
# Go over all sub_keys given on input and carry out all operations while holding a lock for each sub_key
for sub_key in ctx.sub_key_list:
# Accept all input non-GD messages
if ctx.non_gd_msg_list:
self._add_non_gd_messages_by_sub_key(sub_key, ctx.non_gd_msg_list)
# Push all GD messages, if there are any at all for this sub_key
if ctx.has_gd and sub_key in gd_msg_list:
topic_name = self.pubsub.get_topic_name_by_sub_key(sub_key)
self._push_gd_messages_by_sub_key(sub_key, topic_name, gd_msg_list[sub_key])
self.last_gd_run[sub_key] = new_now
logger.info('Storing last_gd_run of `%r` for sub_key:%s (d:%s)', new_now, sub_key, delta)
except Exception:
e = format_exc()
logger.warning(e)
logger_zato.warning(e)
finally:
if session:
session.commit()
session.close()
# ################################################################################################################################
def handle_new_messages(self, ctx:'HandleNewMessageCtx') -> 'None':
self.msg_handler_counter += 1
try:
_ = spawn(self._handle_new_messages, ctx) # noqa: F841
except Exception:
e = format_exc()
logger.warning(e)
logger_zato.warning(e)
# ################################################################################################################################
def _fetch_gd_messages_by_sk_list(self,
sub_key_list, # type: strlist
pub_time_max, # type: float
session=None # type: SASession | None
) -> 'sqlmsgiter':
""" Part of the low-level implementation of enqueue_gd_messages_by_sub_key, must be called with a lock for input sub_key.
"""
# These are messages that we have already queued up and,
# if we happen to pick them up in the database, they should be ignored.
ignore_list = set() # type: intset
for sub_key in sub_key_list:
for msg in self.delivery_lists[sub_key]:
msg = cast(GDMessage, msg)
if msg.has_gd:
ignore_list.add(msg.endp_msg_queue_id)
logger.info('Fetching GD messages by sk_list:`%s`, ignore:`%s`', sub_key_list, ignore_list)
if self.last_gd_run:
if len(sub_key_list) == 1:
# Use .get because it is possible we have not fetched messages for that particular sub_key before,
# i.e. self.last_gd_run may be non-empty because there are last GD runs for other keys,
# just not for this one.
min_last_gd_run = self.last_gd_run.get(sub_key_list[0])
else:
min_last_gd_run = min(value for key, value in self.last_gd_run.items() if key in sub_key_list)
else:
min_last_gd_run = 0.0
# Tell type-checkers that we really have a float here now
min_last_gd_run = cast_(float, min_last_gd_run)
logger.info('Using min last_gd_run `%r`', min_last_gd_run)
for msg in self.pubsub.get_sql_messages_by_sub_key(session, sub_key_list, min_last_gd_run, pub_time_max, ignore_list):
yield msg
# ################################################################################################################################
def _push_gd_messages_by_sub_key(self, sub_key:'str', topic_name:'str', gd_msg_list:'sqlmsgiter') -> 'None':
""" Pushes all input GD messages to a delivery task for the sub_key.
"""
count = 0
msg_ids = [] # type: strlist
for msg in gd_msg_list:
msg_ids.append(msg.pub_msg_id)
gd_msg = GDMessage(sub_key, topic_name, msg.get_value())
delivery_list = self.delivery_lists[sub_key]
delivery_list.add(gd_msg)
# logger.info('Adding a GD message `%s` to delivery_list=%s (%s)', gd_msg.pub_msg_id, hex(id(delivery_list)), sub_key)
count += 1
# logger.info('Pushing %d GD message{}to task:%s; msg_ids:%s'.format(' ' if count==1 else 's '), count, sub_key, msg_ids)
# ################################################################################################################################
def _enqueue_gd_messages_by_sub_key(self, sub_key:'str', gd_msg_list:'sqlmsgiter') -> 'None':
""" Low-level implementation of self.enqueue_gd_messages_by_sub_key which expects the message list on input.
Must be called with self.sub_key_locks[sub_key] held.
"""
topic_name = self.pubsub.get_topic_name_by_sub_key(sub_key)
self._push_gd_messages_by_sub_key(sub_key, topic_name, gd_msg_list)
# ################################################################################################################################
def enqueue_gd_messages_by_sub_key(self, sub_key:'str', session:'SASession | None'=None) -> 'None':
""" Fetches GD messages from SQL for sub_key given on input and adds them to local queue of messages to deliver.
"""
with self.sub_key_locks[sub_key]:
gd_msg_list = self._fetch_gd_messages_by_sk_list([sub_key], utcnow_as_ms(), session)
self._enqueue_gd_messages_by_sub_key(sub_key, gd_msg_list)
# ################################################################################################################################
def enqueue_initial_messages(self, sub_key:'str', topic_name:'str', endpoint_name:'str', _group_size:'int'=400) -> 'None':
""" Looks up any messages for input task in the database and pushes them all and enqueues in batches any found.
"""
with self.sub_key_locks[sub_key]:
pub_time_max = utcnow_as_ms()
session = None
try:
# One SQL session for all queries
session = self.pubsub.server.odb.session()
# Get IDs of any messages already queued up so as to break them out into batches of messages to fetch
msg_ids = self.pubsub.get_initial_sql_msg_ids_by_sub_key(session, sub_key, pub_time_max)
msg_ids = [elem.pub_msg_id for elem in msg_ids]
if msg_ids:
len_msg_ids = len(msg_ids)
suffix = ' ' if len_msg_ids == 1 else 's '
groups = list(grouper(_group_size, msg_ids)) # type: strlist
len_groups = len(groups)
# This, we log using both loggers because we also run during server startup so we should
# let users know that their server has to do something extra
for _logger in logger, logger_zato:
_logger.info('Found %d initial message%sto enqueue for sub_key:`%s` (%s -> %s), g:%d, gs:%d',
len_msg_ids, suffix, sub_key, topic_name, endpoint_name, len_groups, _group_size)
for _, group in enumerate(groups, 1):
group = cast_('strlist', group)
group_msg_ids = [elem for elem in group if elem] # type: strlist
# logger.info('Enqueuing group %d/%d (gs:%d) (%s, %s -> %s) `%s`',
# idx, len_groups, _group_size, sub_key, topic_name, endpoint_name, group_msg_ids)
msg_list = self.pubsub.get_sql_messages_by_msg_id_list(session, sub_key, pub_time_max, group_msg_ids)
self._enqueue_gd_messages_by_sub_key(sub_key, msg_list)
except Exception:
for _logger in logger, logger_zato:
_logger.warning('Could not enqueue initial messages for `%s` (%s -> %s), e:`%s`',
sub_key, topic_name, endpoint_name, format_exc())
finally:
if session:
session.close()
# ################################################################################################################################
def confirm_pubsub_msg_delivered(self, sub_key:'str', delivered_list:'strlist') -> 'None':
self.pubsub.confirm_pubsub_msg_delivered(sub_key, delivered_list)
# ################################################################################################################################
def get_queue_depth(self, sub_key:'str') -> 'tuple_[int, int]':
""" Returns the number of GD and non-GD messages queued up for input sub_key.
"""
return self.delivery_tasks[sub_key].get_queue_depth()
# ################################################################################################################################
def handles_sub_key(self, sub_key:'str') -> 'bool':
with self.lock:
return sub_key in self.sub_keys
# ################################################################################################################################
def get_delivery_task(self, sub_key:'str') -> 'DeliveryTask':
with self.lock:
return self.delivery_tasks[sub_key]
# ################################################################################################################################
def get_delivery_tasks(self) -> 'ValuesView[DeliveryTask]':
with self.lock:
return self.delivery_tasks.values()
# ################################################################################################################################
def delete_messages(self, sub_key:'str', msg_list:'strlist') -> 'None':
""" Marks one or more to be deleted from the delivery task by the latter's sub_key.
"""
self.delivery_tasks[sub_key].delete_messages(msg_list)
# ################################################################################################################################
def get_messages(self, sub_key:'str', has_gd:'boolnone'=None) -> 'list_[Message]':
""" Returns all messages enqueued for sub_key without deleting them from their queue.
"""
return self.delivery_tasks[sub_key].get_messages(has_gd)
# ################################################################################################################################
def pull_messages(self, sub_key:'str', has_gd:'bool'=False) -> 'dictlist':
""" Implements pull-style delivery - returns messages enqueued for sub_key, deleting them in progress.
"""
with self.lock:
return self.delivery_tasks[sub_key].pull_messages()
# ################################################################################################################################
def get_message(self, sub_key:'str', msg_id:'str') -> 'Message':
""" Returns a particular message enqueued for sub_key.
"""
return self.delivery_tasks[sub_key].get_message(msg_id)
# ################################################################################################################################
# ################################################################################################################################
| 27,510
|
Python
|
.py
| 442
| 51.624434
| 130
| 0.495063
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,397
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/pubsub/delivery/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,398
|
task.py
|
zatosource_zato/code/zato-server/src/zato/server/pubsub/delivery/task.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# pylint: disable=unused-import, unused-variable
# stdlib
from logging import getLogger
from threading import current_thread
from traceback import format_exc, format_exception
from typing import Iterable as iterable_
# gevent
from gevent import sleep, spawn
from gevent.lock import RLock
from gevent.thread import getcurrent
# Zato
from zato.common.api import PUBSUB
from zato.common.exception import RuntimeInvocationError
from zato.common.odb.api import SQLRow
from zato.common.typing_ import cast_, list_
from zato.common.util.time_ import utcnow_as_ms
from zato.server.pubsub.model import DeliveryResultCtx
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, anydict, anylist, boolnone, callable_, callnone, dict_, dictlist, \
strlist, tuple_
from zato.server.pubsub import PubSub
from zato.server.pubsub.delivery.message import GDMessage, Message
from zato.server.pubsub.delivery._sorted_list import SortedList
GDMessage = GDMessage
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato_pubsub.task')
logger_zato = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
_hook_action = PUBSUB.HOOK_ACTION
_notify_methods = (PUBSUB.DELIVERY_METHOD.NOTIFY.id, PUBSUB.DELIVERY_METHOD.WEB_SOCKET.id)
run_deliv_sc = PUBSUB.RunDeliveryStatus.StatusCode
ReasonCode = PUBSUB.RunDeliveryStatus.ReasonCode
deliv_exc_msg = 'Exception {}/{} in delivery iter #{} for `{}` sk:{} -> {}'
getcurrent = cast_('callable_', getcurrent)
# ################################################################################################################################
# ################################################################################################################################
sqlmsglist = list_['SQLRow']
gdmsglist = list_['GDMessage']
msgiter = iterable_['Message']
msglist = list_['Message']
sqlmsgiter = iterable_['SQLRow']
# ################################################################################################################################
# ################################################################################################################################
class DeliveryTask:
""" Runs a greenlet responsible for delivery of messages for a given sub_key.
"""
wait_sock_err: 'float'
wait_non_sock_err: 'float'
delivery_max_retry: 'int'
def __init__(
self,
*,
pubsub, # type: PubSub
sub_config, # type: anydict
sub_key, # type: str
delivery_lock, # type: RLock
delivery_list, # type: SortedList
deliver_pubsub_msg, # type: callable_
confirm_pubsub_msg_delivered_cb, # type: callable_
enqueue_initial_messages_func, # type: callable_
pubsub_set_to_delete, # type: callable_
pubsub_get_before_delivery_hook, # type: callable_
pubsub_invoke_before_delivery_hook, # type: callable_
) -> 'None':
self.keep_running = True
self.pubsub = pubsub
self.enqueue_initial_messages_func = enqueue_initial_messages_func
self.pubsub_set_to_delete = pubsub_set_to_delete
self.pubsub_get_before_delivery_hook = pubsub_get_before_delivery_hook
self.pubsub_invoke_before_delivery_hook = pubsub_invoke_before_delivery_hook
self.sub_key = sub_key
self.delivery_lock = delivery_lock
self.delivery_list = delivery_list
self.deliver_pubsub_msg = deliver_pubsub_msg
self.confirm_pubsub_msg_delivered_cb = confirm_pubsub_msg_delivered_cb
self.sub_config = sub_config
self.topic_name = sub_config['topic_name']
self.last_iter_run = utcnow_as_ms()
self.delivery_interval = self.sub_config['task_delivery_interval'] / 1000.0
self.previous_delivery_method = self.sub_config['delivery_method']
self.python_id = str(hex(id(self)))
self.py_object = '<empty>'
# Set attributes that may be potentially changed in runtime by users
self._set_sub_config_attrs()
# This is a total of delivery iterations
self.delivery_iter = 0
# This is a total of message batches processed so far
self.len_batches = 0
# A total of messages processed so far
self.len_delivered = 0
# A list of messages that were requested to be deleted while a delivery was in progress, checked before each delivery.
self.delete_requested:list_['Message'] = []
# This is a lock used for micro-operations such as changing or consulting the contents of self.delete_requested.
self.interrupt_lock = RLock()
# If self.wrap_in_list is True, messages will be always wrapped in a list,
# even if there is only one message to send. Note that self.wrap_in_list will be False
# only if both batch_size is 1 and wrap_one_msg_in_list is True.
if self.sub_config['delivery_batch_size'] == 1:
if self.sub_config['wrap_one_msg_in_list']:
self.wrap_in_list = True
else:
self.wrap_in_list = False
# With batch_size > 1, we always send a list, no matter what.
else:
self.wrap_in_list = True
_ = spawn(self.run) # type: ignore
# ################################################################################################################################
def is_running(self) -> 'bool':
return self.keep_running
# ################################################################################################################################
def _set_sub_config_attrs(self) -> 'None':
self.wait_sock_err = float(self.sub_config['wait_sock_err'])
self.wait_non_sock_err = float(self.sub_config['wait_non_sock_err'])
self.delivery_max_retry = int(self.sub_config.get('delivery_max_retry', 0) or PUBSUB.DEFAULT.DELIVERY_MAX_RETRY)
# ################################################################################################################################
def _delete_messages(self, to_delete:'msgiter') -> 'None':
""" Actually deletes messages - must be called with self.interrupt_lock held.
"""
logger.info('Deleting message(s) `%s` from `%s` (%s)', to_delete, self.sub_key, self.topic_name)
# Mark as deleted in SQL
self.pubsub_set_to_delete(self.sub_key, [msg.pub_msg_id for msg in to_delete])
# Go through each of the messages that is to be deleted ..
for msg in to_delete:
# .. delete it from our in-RAM delivery list ..
self.delivery_list.remove_pubsub_msg(msg)
# ################################################################################################################################
def delete_messages(self, msg_list:'strlist', _notify:'str'=PUBSUB.DELIVERY_METHOD.NOTIFY.id) -> 'None':
""" For notify tasks, requests that all messages from input list be deleted before the next delivery.
Otherwise, deletes the messages immediately.
"""
with self.interrupt_lock:
# Build a list of actual messages to be deleted - we cannot use a msg_id list only
# because the SortedList always expects actual message objects for comparison purposes.
# This will not be called too often so it is fine to iterate over self.delivery_list
# instead of employing look up dicts just in case a message would have to be deleted.
to_delete = cast_('msglist', [])
for msg in self.delivery_list:
if msg.pub_msg_id in msg_list:
# We can trim it since we know it won't appear again
msg_list.remove(msg.pub_msg_id)
to_delete.append(msg)
# We are a task that sends out notifications
if self.sub_config['delivery_method'] == _notify:
logger.info('Marking message(s) to be deleted `%s` from `%s` (%s)', msg_list, self.sub_key, self.topic_name)
self.delete_requested.extend(to_delete)
# We do not send notifications and self.run never runs so we need to delete the messages here
else:
self._delete_messages(to_delete)
# ################################################################################################################################
def get_messages(self, has_gd:'boolnone') -> 'msglist': # type: ignore[valid-type]
""" Returns all messages enqueued in the delivery list, without deleting them from self.delivery_list.
"""
out: 'msglist' # type: ignore[valid-type]
if has_gd is None:
out = [msg for msg in self.delivery_list]
len_out = len(out)
else:
out = []
for msg in self.delivery_list:
if msg.has_gd is has_gd:
out.append(msg)
len_out = len(out)
logger.info('Returning %d message(s) for sub_key `%s` (gd:%s)', len_out, self.sub_key, has_gd)
return out
# ################################################################################################################################
def pull_messages(self) -> 'dictlist':
""" Implements pull-style delivery - returns messages enqueued for sub_key, deleting them in progress.
"""
# Output to produce
out = [] # type: dictlist
messages = [] # type: anylist
# A function wrapper that will append to output
_append_to_out_func = self._append_to_pull_messages(messages)
# Runs the delivery with our custom function that handles all messages to be delivered
_ = self.run_delivery(_append_to_out_func) # noqa: F841
# OK, we have the output and can return it
for elem in messages:
out.append(elem.to_dict())
return out
# ################################################################################################################################
def _append_to_pull_messages(self, out:'any_') -> 'callable_':
def _impl(sub_key:'str', to_deliver:'any_') -> None:
if isinstance(to_deliver, list):
out.extend(to_deliver)
else:
out.append(to_deliver)
return _impl
# ################################################################################################################################
def get_message(self, msg_id:'str') -> 'Message':
""" Returns a particular message enqueued by this delivery task.
"""
for msg in self.delivery_list:
if msg.pub_msg_id == msg_id:
return msg
else:
raise ValueError('No such message {}'.format(msg_id))
# ################################################################################################################################
def _get_reason_code_from_exception(self, e:'Exception') -> 'int':
if isinstance(e, IOError):
reason_code = ReasonCode.Error_IO
elif isinstance(e, RuntimeInvocationError):
reason_code = ReasonCode.Error_Runtime_Invoke
else:
reason_code = ReasonCode.Error_Other
return reason_code
# ################################################################################################################################
def _get_messages_to_delete(
self,
current_batch:'msglist' # type: ignore[valid-type]
) -> 'msglist': # type: ignore[valid-type]
# There may be requests to delete some of messages while we are running and we obtain the list of
# such messages here.
with self.interrupt_lock:
to_delete = self.delete_requested[:]
self.delete_requested.clear()
# Go through each message and check if any has reached our delivery_max_retry.
# Any such message should be deleted so we add it to to_delete. Note that we do it here
# because we want for a sub hook to have access to them.
for msg in current_batch: # type: ignore[attr-defined]
if msg.delivery_count >= self.delivery_max_retry:
to_delete.append(msg)
return to_delete
# ################################################################################################################################
def _invoke_before_delivery_hook(
self,
current_batch, # type: msglist # type: ignore[valid-type]
hook, # type: callable_
to_delete, # type: msglist # type: ignore[valid-type]
to_deliver, # type: msglist # type: ignore[valid-type]
to_skip # type: msglist # type: ignore[valid-type]
) -> 'None':
messages = {
_hook_action.DELETE: to_delete,
_hook_action.DELIVER: to_deliver,
_hook_action.SKIP: to_skip,
} # type: dict_[str, msglist]
# We pass the dict to the hook which will in turn update in place the lists that the dict contains.
# This is why this method does not return anything, i.e. the lists are modified in place.
self.pubsub_invoke_before_delivery_hook(
hook, self.sub_config['topic_id'], self.sub_key, current_batch, messages)
# ################################################################################################################################
def _log_delivered_success(self, len_delivered:'int', delivered_msg_id_list:'strlist') -> 'None':
suffix = ' ' if len_delivered == 1 else 's '
logger.info('Successfully delivered %s message%s%s to %s (%s -> %s) [lend:%d]',
len_delivered, suffix, delivered_msg_id_list, self.sub_key, self.topic_name,
self.sub_config['endpoint_name'], self.len_delivered)
# ################################################################################################################################
def _update_delivery_counters(self, to_deliver:'msglist') -> 'None': # type: ignore[valid-type]
# Update the delivery counter for each message
with self.delivery_lock:
for msg in to_deliver: # type: ignore[attr-defined]
msg.delivery_count += 1
# ################################################################################################################################
def run_delivery(self,
deliver_pubsub_msg=None, # type: callnone
status_code=run_deliv_sc # type: any_
) -> 'DeliveryResultCtx':
""" Actually attempts to deliver messages. Each time it runs, it gets all the messages
that are still to be delivered from self.delivery_list.
"""
# Increase our delivery counter
self.delivery_iter += 1
# Try to deliver a batch of messages or a single message if batch size is 1
# and we should not wrap it in a list.
result = DeliveryResultCtx()
result.delivery_iter = self.delivery_iter
try:
# For pull-type deliveries, this will be given on input. For notify-type deliveries,
# we use the callback assigned to self.
deliver_pubsub_msg = deliver_pubsub_msg if deliver_pubsub_msg else self.deliver_pubsub_msg
# Deliver up to that many messages in one batch
delivery_batch_size = self.sub_config['delivery_batch_size'] # type: int
logger.info('Looking for current batch in delivery_list=%s (%s)', hex(id(self.delivery_list)), self.sub_key)
current_batch = self.delivery_list[:delivery_batch_size]
current_batch = cast_('msglist', current_batch)
# For each message from batch we invoke a hook, if there is any, which will decide
# whether the message should be delivered, skipped in this iteration or perhaps deleted altogether
# without even trying to deliver it. If there is no hook, none of messages will be skipped or deleted.
# An optional pub/sub hook - note that we are checking it here rather than in __init__
# because users may change it any time for a topic.
hook = self.pubsub_get_before_delivery_hook(self.sub_key)
# Look up all the potential messages that we need to delete.
to_delete = self._get_messages_to_delete(current_batch)
# Delete these messages first, before starting any delivery.
if to_delete:
self._delete_messages(to_delete)
# Clear out this list because we will be reusing it later in the delivery hook
to_delete = []
# It is possible that we do not have any messages to deliver here, e.g. because all of them were already deleted
# via self._delete_messages, in which case, we can simply return.
if not self.delivery_list:
result.is_ok = True
result.status_code = status_code.OK
return result
# Unlike to_delete, which has to be computed dynamically,
# these two can be initialized to their respective empty lists directly.
to_deliver:'msglist' = [] # type: ignore[valid-type]
to_skip:'msglist' = [] # type: ignore[valid-type]
# There is a hook so we can invoke it - it will update in place the lists that we pass to it ..
if hook:
self._invoke_before_delivery_hook(current_batch, hook, to_delete, to_deliver, to_skip)
# .. otherwise, without a hook, we will always try to deliver all messages that we have in a given batch
else:
to_deliver[:] = current_batch[:] # type: ignore[index]
# Our hook may have indicated what to delete, in which case, do delete that now.
if to_delete:
self._delete_messages(to_delete)
if to_skip:
logger.info('Skipping messages `%s`', to_skip)
# Update the delivery counter before trying to deliver the messages
self._update_delivery_counters(to_deliver)
# This is the call that actually delivers messages
deliver_pubsub_msg(self.sub_key, to_deliver if self.wrap_in_list else to_deliver[0]) # type: ignore[index]
except Exception as e:
# Do not attempt to deliver any other message in case of an error. Our parent will sleep for a small amount of
# time and then re-run us, thanks to which the next time we run we will again iterate over all the messages
# currently queued up, including the ones that we were not able to deliver in current iteration.
result.reason_code = self._get_reason_code_from_exception(e)
result.status_code = status_code.Error
result.exception_list.append(e)
else:
# On successful delivery, remove these messages from SQL and our own delivery_list
try:
# All message IDs that we have delivered
delivered_msg_id_list = [msg.pub_msg_id for msg in to_deliver] # type: ignore[attr-defined]
with self.delivery_lock:
self.confirm_pubsub_msg_delivered_cb(self.sub_key, delivered_msg_id_list)
except Exception as update_err:
result.status_code = status_code.Error
result.reason_code = ReasonCode.Error_Other
result.exception_list.append(update_err)
else:
with self.delivery_lock:
for msg in to_deliver: # type: ignore[attr-defined]
try:
self.delivery_list.remove_pubsub_msg(msg)
except Exception as remove_err:
result.status_code = status_code.Error
result.exception_list.append(remove_err)
# Status of messages is updated in both SQL and RAM so we can now log success
# unless, for some reason, we were not able to remove the messages from self.delivery_list.
if result.status_code in (status_code.Error, status_code.Warning):
logger.warning('Could not remove delivered messages from self.delivery_list `%s` (%s) -> `%s`',
to_deliver, self.delivery_list, result.exception_list)
else:
# This is reusable.
len_delivered = len(delivered_msg_id_list)
# Log success ..
self._log_delivered_success(len_delivered, delivered_msg_id_list)
# .. update internal metadata ..
self.len_batches += 1
self.len_delivered += len_delivered
# .. and indicate that we have successfully delivered all messages currently queued up
# and our delivery list is currently empty.
result.status_code = status_code.OK
# No matter what, we always have a result object to return
if result.status_code not in (status_code.Error, status_code.Warning):
result.is_ok = True
result.status_code = status_code.OK
return result
# ################################################################################################################################
def _should_wake(self, _now:'callable_'=utcnow_as_ms) -> 'bool':
""" Returns True if the task should be woken up e.g. because its time has come already to process messages,
assumming there are any waiting for it.
"""
# Return quickly if we already know that there are some messages to deliver or clear ..
if self.delivery_list:
return True
# .. otherwise, we will wait until self.delivery_interval lapsed.
now = _now()
diff = round(now - self.last_iter_run, 2)
if diff >= self.delivery_interval:
if self.delivery_list:
logger.info('Waking task:%s now:%s last:%s diff:%s interval:%s len-list:%d',
self.sub_key, now, self.last_iter_run, diff, self.delivery_interval, len(self.delivery_list))
return True
# The above conditions are not met so we explicitly return False
return False
# ################################################################################################################################
def run(self,
default_sleep_time=0.1, # type: float
status_code=run_deliv_sc # type: any_
) -> 'None':
""" Runs the delivery task's main loop.
"""
# Fill out Python-level metadata first
_current_greenlet = cast_('any_', getcurrent())
_greenlet_name = _current_greenlet.name
_greenlet_name = cast_('str', _greenlet_name)
self.py_object = '{}; {}; {}'.format(current_thread().name, _greenlet_name, self.python_id)
logger.info('Starting delivery task for sub_key:`%s` (%s, %s, %s)',
self.sub_key, self.topic_name, self.sub_config['delivery_method'], self.py_object)
# First, make sure that the topic object already exists,
# e.g. it is possible that our task is already started
# even if other in-RAM structures are not populated yet,
# which is why we need to wait for this topic.
_ = self.pubsub.wait_for_topic(self.topic_name)
#
# Before starting anything, check if there are any messages already queued up in the database for this task.
# This may happen, for instance, if:
#
# * Our delivery_method is `pull`
# * Some messages get published to topic but the subscribers never gets them
# * Our server is restarted
# * The server is ultimately brought up and we need to find these messages that were previously
# published but never delivered
#
# Since this is about messages taken from the database, by definition, all of them they must be GD ones.
#
self.enqueue_initial_messages_func(self.sub_key, self.topic_name, self.sub_config['endpoint_name'])
try:
while self.keep_running:
# Reusable.
delivery_method = self.sub_config['delivery_method']
# We are a task that does not notify endpoints, i.e. we are pull-style and our subscribers
# will query us themselves so in this case we can sleep for a while and repeat the loop -
# perhaps before the next iteration of the loop begins someone will change delivery_method
# to one that allows for notifications to be sent. If not, we will be simply looping forever,
# checking periodically below if the delivery method is still the same.
if delivery_method not in _notify_methods:
sleep(5)
continue
# Apparently, our delivery method has changed since the last time our self.sub_config
# was modified, so we can log this fact and store it for later use.
if delivery_method != self.previous_delivery_method:
# First, log what happened ..
self._log_delivery_method_changed(delivery_method)
# .. now, the new value replaces the previous one - possibly to be replaced again and again in the future.
self.previous_delivery_method = delivery_method
# Is there any message that we can try to deliver?
if self._should_wake():
with self.delivery_lock:
# Update last run time to be able to wake up in time for the next delivery
self.last_iter_run = utcnow_as_ms()
# Get the list of all message IDs for which delivery was successful,
# indicating whether all currently lined up messages have been
# successfully delivered.
result = self.run_delivery()
if not self.keep_running:
msg = 'Skipping delivery loop after r:%s, kr:%d [lend:%d]'
logger.info(msg, result, self.keep_running, self.len_delivered)
continue
if result.is_ok:
continue
# This was a runtime invocation error - for instance, a low-level WebSocket exception,
# which is unrecoverable and we need to stop our task. When the client reconnects,
# the delivery will pick up where we left.
elif result.reason_code == ReasonCode.Error_Runtime_Invoke:
self.stop()
# Sleep for a moment because we have just run out of all messages.
elif result.reason_code == ReasonCode.No_Msg:
sleep(default_sleep_time) # pyright: ignore[reportGeneralTypeIssues]
# Otherwise, sleep for a longer time because our endpoint must have returned an error.
# After this sleep, self.run_delivery will again attempt to deliver all messages
# we queued up. Note that we are the only delivery task for this sub_key so when we sleep here
# for a moment, we do not block other deliveries.
else:
# We caught an error or warning ..
if not result.is_ok:
# Reusable.
len_exception_list = len(result.exception_list)
# Log all the exceptions received while trying to deliver the messages ..
self._log_warnings_from_delivery_task(result, len_exception_list)
# .. sleep only if there are still some messages to be delivered,
# .. as it is possible that our lists has been cleared out since the last time we run ..
if self.delivery_list:
# .. sleep for a while but only if this was an error (not a warning).
if result.status_code == status_code.Error:
# .. OK, we can sleep now.
self._sleep_on_delivery_error(result, len_exception_list)
# There was no message to deliver in this turn ..
else:
# .. thus, we can wait until one arrives.
sleep(default_sleep_time) # pyright: ignore[reportGeneralTypeIssues]
except Exception as e:
error_msg = 'Exception in delivery task for sub_key:`%s`, e:`%s`'
e_formatted = format_exc()
logger.warning(error_msg, self.sub_key, e_formatted)
logger_zato.warning(error_msg, self.sub_key, e)
# ################################################################################################################################
def _log_delivery_method_changed(self, current_delivery_method:'str') -> 'None':
logger.info('Changed delivery_method from `%s` to `%s` for `%s` (%s -> %s)`',
self.previous_delivery_method, current_delivery_method, self.sub_key,
self.topic_name, self.sub_config['endpoint_name'])
# ################################################################################################################################
def _log_warnings_from_delivery_task(self, result:'DeliveryResultCtx', len_exception_list:'int') -> 'None':
# .. log all exceptions reported by the delivery task ..
for idx, e in enumerate(result.exception_list, 1):
msg_logger = deliv_exc_msg.format(
idx, len_exception_list, result.delivery_iter, self.topic_name, self.sub_key,
''.join(format_exception(type(e), e, e.__traceback__)))
msg_logger_zato = deliv_exc_msg.format(
idx, len_exception_list, result.delivery_iter, self.topic_name, self.sub_key,
e.args[0])
logger.warning(msg_logger)
logger_zato.warning(msg_logger_zato)
# ################################################################################################################################
def _sleep_on_delivery_error(self, result:'DeliveryResultCtx', len_exception_list:'int') -> 'None':
if result.reason_code == ReasonCode.Error_IO:
sleep_time = self.wait_sock_err
else:
sleep_time = self.wait_non_sock_err
exc_len_one = 'an exception'
exc_len_multi = '{} exceptions'
if len_exception_list == 1:
exc_len_msg = exc_len_one
else:
exc_len_msg = exc_len_multi.format(len_exception_list)
sleep_msg = 'Sleeping for {}s after {} in iter #{}'.format(
sleep_time, exc_len_msg, result.delivery_iter)
logger.warning(sleep_msg)
logger_zato.warning(sleep_msg)
sleep(sleep_time) # pyright: ignore[reportGeneralTypeIssues]
# ################################################################################################################################
def stop(self) -> 'None':
if self.keep_running:
logger.info('Stopping delivery task for sub_key:`%s`', self.sub_key)
self.keep_running = False
# ################################################################################################################################
def clear(self) -> 'None':
# For logging purposes ..
gd, non_gd = self.get_queue_depth()
# .. log details of what we are about to do ..
logger.info('Removing messages from delivery list for sub_key:`%s, gd:%d, ngd:%d `%s`',
self.sub_key, gd, non_gd, [elem.pub_msg_id for elem in self.delivery_list])
# .. indicate what should be deleted in the next iteration of the self.run_delivery loop ..
# self.delete_requested[:] = self.delivery_list[:]
# .. clear the delivery list now ..
self.delivery_list.clear()
# .. and log a higher-level message now.
msg = 'Cleared task messages for sub_key `%s` -> `%s`'
logger.info(msg, self.sub_key, self.py_object)
logger_zato.info(msg, self.sub_key, self.py_object)
# ################################################################################################################################
def update_sub_config(self) -> 'None':
self._set_sub_config_attrs()
# ################################################################################################################################
def get_queue_depth(self) -> 'tuple_[int, int]':
""" Returns the number of GD and non-GD messages in delivery list.
"""
gd = 0
non_gd = 0
for msg in self.delivery_list:
if msg.has_gd:
gd += 1
else:
non_gd += 1
return gd, non_gd
# ################################################################################################################################
def get_gd_queue_depth(self) -> int:
return self.get_queue_depth()[0]
# ################################################################################################################################
def get_non_gd_queue_depth(self) -> 'int':
return self.get_queue_depth()[1]
# ################################################################################################################################
# ################################################################################################################################
| 34,945
|
Python
|
.py
| 565
| 50.20354
| 130
| 0.523337
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,399
|
_sorted_list.py
|
zatosource_zato/code/zato-server/src/zato/server/pubsub/delivery/_sorted_list.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# pyright: reportUnknownArgumentType=false, reportUnknownVariableType=false
# stdlib
from bisect import bisect_left
from logging import getLogger
from typing import Iterator as iterator
# sortedcontainers
from sortedcontainers import SortedList as _SortedList
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_
from zato.server.pubsub.delivery.message import Message
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato_pubsub.task')
# ################################################################################################################################
# ################################################################################################################################
class SortedList(_SortedList):
""" A custom subclass that knows how to remove pubsub messages from SortedList instances.
"""
def __iter__(self) -> 'iterator[Message]':
return super().__iter__()
# ################################################################################################################################
def __getitem__(self, idx:'any_') -> 'any_':
return super().__getitem__(idx)
# ################################################################################################################################
def remove_pubsub_msg(self, msg:'Message') -> 'None':
""" Removes a pubsub message from a SortedList instance - we cannot use the regular .remove method
because it may triggger __cmp__ per https://github.com/grantjenks/sorted_containers/issues/81.
"""
logger.info('In remove_pubsub_msg msg:`%s`, mxs:`%s`', msg.pub_msg_id, self._maxes)
pos = bisect_left(self._maxes, msg)
if pos == len(self._maxes):
raise ValueError('{0!r} not in list (1)'.format(msg))
for _list_idx, _list_msg in enumerate(self._lists[pos]):
if msg.pub_msg_id == _list_msg.pub_msg_id:
idx = _list_idx
self._delete(pos, idx)
break
else:
raise ValueError('{0!r} not in list (2)'.format(msg))
# ################################################################################################################################
# ################################################################################################################################
| 3,002
|
Python
|
.py
| 48
| 57.125
| 130
| 0.366053
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|