id
int64 0
458k
| file_name
stringlengths 4
119
| file_path
stringlengths 14
227
| content
stringlengths 24
9.96M
| size
int64 24
9.96M
| language
stringclasses 1
value | extension
stringclasses 14
values | total_lines
int64 1
219k
| avg_line_length
float64 2.52
4.63M
| max_line_length
int64 5
9.91M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 7
101
| repo_stars
int64 100
139k
| repo_forks
int64 0
26.4k
| repo_open_issues
int64 0
2.27k
| repo_license
stringclasses 12
values | repo_extraction_date
stringclasses 433
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
10,100
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/connector/subprocess_/impl/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
| 238
|
Python
|
.py
| 6
| 38.166667
| 82
| 0.729258
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,101
|
outconn_sftp.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/connector/subprocess_/impl/outconn_sftp.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from datetime import datetime
from tempfile import NamedTemporaryFile
from traceback import format_exc
# Bunch
from bunch import bunchify
# Zato
from zato.common.api import SFTP
from zato.common.json_internal import dumps
from zato.common.sftp import SFTPOutput
from zato.server.connection.connector.subprocess_.base import BaseConnectionContainer, Response
# ################################################################################################################################
if 0:
from bunch import Bunch
Bunch = Bunch
# ################################################################################################################################
# One megabyte = eight thousand kilobits
mb_to_kbit = 8000
# ################################################################################################################################
ip_type_map = {
SFTP.IP_TYPE.IPV4.id: '-4',
SFTP.IP_TYPE.IPV6.id: '-6',
}
log_level_map = {
SFTP.LOG_LEVEL.LEVEL0.id: '',
SFTP.LOG_LEVEL.LEVEL1.id: '-v',
SFTP.LOG_LEVEL.LEVEL2.id: '-vv',
SFTP.LOG_LEVEL.LEVEL3.id: '-vvv',
SFTP.LOG_LEVEL.LEVEL4.id: '-vvvv',
}
# ################################################################################################################################
# ################################################################################################################################
class SFTPConnection:
""" Wraps access to SFTP commands via command line.
"""
command_no = 0
def __init__(self, logger, **config):
self.logger = logger
self.config = bunchify(config) # type: Bunch
# Reject unknown IP types
if self.config.force_ip_type:
if not SFTP.IP_TYPE().is_valid(self.config.force_ip_type):
raise ValueError('Unknown IP type `{!r}`'.format(self.config.force_ip_type))
# Reject unknown logging levels
if self.config.log_level:
if not SFTP.LOG_LEVEL().is_valid(self.config.log_level):
raise ValueError('Unknown log level `{!r}`'.format(self.config.log_level))
self.id = self.config.id # type: int
self.name = self.config.name # type: str
self.is_active = self.config.is_active # type: str
self.host = self.config.host or '' # type: str
self.port = self.config.port or None # type: int
self.username = self.config.username # type: str
self.sftp_command = self.config.sftp_command # type: str
self.ping_command = self.config.ping_command # type: str
self.identity_file = self.config.identity_file or '' # type: str
self.ssh_config_file = self.config.ssh_config_file or '' # type: str
self.log_level = self.config.log_level # type: int
self.should_flush = self.config.should_flush # type: bool
self.buffer_size = self.config.buffer_size # type: int
self.ssh_options = self.config.ssh_options or [] # type: str
self.force_ip_type = self.config.force_ip_type or '' # type: str
self.should_preserve_meta = self.config.should_preserve_meta # type: bool
self.is_compression_enabled = self.config.is_compression_enabled # type: bool
# SFTP expects kilobits instead of megabytes
self.bandwidth_limit = int(float(self.config.bandwidth_limit) * mb_to_kbit) # type: int
# Added for API completeness
self.is_connected = True
self.password = 'dummy-password'
# Create the reusable command object
self.command = self.get_command()
# ################################################################################################################################
def get_command(self):
""" Returns a reusable sh.Command object that can execute multiple different SFTP commands.
"""
# sh
from sh import Command
# A list of arguments that will be added to the base command
args = []
# Buffer size is always available
args.append('-B')
args.append(self.buffer_size)
# Bandwidth limit is always available
args.append('-l')
args.append(self.bandwidth_limit)
# Preserving file and directory metadata is optional
if self.should_preserve_meta:
args.append('-p')
# Immediate flushing is optional
if self.should_flush:
args.append('-f')
# Compression is optional
if self.is_compression_enabled:
args.append('-C')
# Forcing a particular IP version is optional
if self.force_ip_type:
args.append(ip_type_map[self.force_ip_type])
# Port is optional
if self.port:
args.append('-P')
args.append(self.port)
# Identity file is optional
if self.identity_file:
args.append('-i')
args.append(self.identity_file)
# SSH config file is optional
if self.ssh_config_file:
args.append('-F')
args.append(self.ssh_config_file)
# Base command to build additional arguments into
command = Command(self.sftp_command)
command = command.bake(*args)
return command
# ################################################################################################################################
def execute(self, cid, data, log_level=SFTP.LOG_LEVEL.LEVEL4.id):
""" Executes a single or multiple SFTP commands from the input 'data' string.
"""
# Increment the command counter each time .execute is called
self.command_no += 1
self.logger.info('Executing cid:`%s` (%s; %s; %s), data:`%s`', cid, self.id, self.name, self.command_no, data)
# Additional command arguments
args = []
with NamedTemporaryFile(mode='w+', suffix='-zato-sftp.txt') as f:
# Write command to the temporary file
f.write(data)
f.flush()
# Append the file names to the list of arguments SFTP receives
args.append('-b')
args.append(f.name)
# Logging is always available but may map to an empty string
log_level_mapped = log_level_map[log_level]
if log_level_mapped:
args.append(log_level_mapped)
# Both username and host are optional but if they are provided, they must be the last arguments in the command
if self.host:
if self.username:
args.append('{}@{}'.format(self.username, self.host))
else:
args.append(self.host)
out = SFTPOutput(cid, self.command_no)
result = None
try:
# Finally, execute all the commands
result = self.command(*args)
except Exception:
out.is_ok = False
out.details = format_exc()
if result:
out.command = result.cmd
out.stdout = result.stdout
out.stderr = result.stderr
else:
out.is_ok = True
out.command = result.cmd
out.stdout = result.stdout
out.stderr = result.stderr
finally:
self.encode_out(out)
return out
# ################################################################################################################################
def encode_out(self, out):
# type: (SFTPOutput) -> None
# We need to check for None below, particularly in stderr and stdout,
# because they both can be an empty bytes object.
if out.command is not None:
out.command = [elem.decode('utf8') for elem in out.command[:]]
if out.stderr is not None:
out.stderr = out.stderr.decode('utf8')
if out.stdout is not None:
out.stdout = out.stdout.decode('utf8')
# ################################################################################################################################
def connect(self):
# We do not maintain long-running connections but we may still want to ping the remote end
# to make sure we are actually able to connect to it.
out = self.ping()
self.logger.info('SFTP ping; name:`%s`, command:`%s`, stdout:`%s`, stderr:`%s`',
self.name, out.command, out.stdout, out.stderr)
# ################################################################################################################################
def close(self):
# Added for API completeness
pass
# ################################################################################################################################
def ping(self, _utcnow=datetime.utcnow):
return self.execute('ping-{}'.format(_utcnow().isoformat()), self.ping_command)
# ################################################################################################################################
# ################################################################################################################################
class SFTPConnectionContainer(BaseConnectionContainer):
connection_class = SFTPConnection
ipc_name = conn_type = logging_file_name = 'sftp'
remove_id_from_def_msg = False
remove_name_from_def_msg = False
# ################################################################################################################################
def _on_OUTGOING_SFTP_PING(self, msg):
return super(SFTPConnectionContainer, self).on_definition_ping(msg)
# ################################################################################################################################
def _on_OUTGOING_SFTP_DELETE(self, msg):
return super(SFTPConnectionContainer, self).on_definition_delete(msg)
_on_GENERIC_CONNECTION_DELETE = _on_OUTGOING_SFTP_DELETE
# ################################################################################################################################
def _on_OUTGOING_SFTP_CREATE(self, msg):
return super(SFTPConnectionContainer, self).on_definition_create(msg)
_on_GENERIC_CONNECTION_CREATE = _on_OUTGOING_SFTP_CREATE
# ################################################################################################################################
def _on_OUTGOING_SFTP_EDIT(self, msg):
return super(SFTPConnectionContainer, self).on_definition_edit(msg)
_on_GENERIC_CONNECTION_EDIT = _on_OUTGOING_SFTP_EDIT
# ################################################################################################################################
def _on_OUTGOING_SFTP_CHANGE_PASSWORD(self, msg):
return super(SFTPConnectionContainer, self).on_definition_change_password(msg)
_on_GENERIC_CONNECTION_CHANGE_PASSWORD = _on_OUTGOING_SFTP_CHANGE_PASSWORD
# ################################################################################################################################
def _on_OUTGOING_SFTP_EXECUTE(self, msg, is_reconnect=False, _utcnow=datetime.utcnow):
# sh
from sh import ErrorReturnCode
out = {}
connection = self.connections[msg.id] # type: SFTPConnection
start_time = _utcnow()
try:
result = connection.execute(msg.cid, msg.data, msg.log_level) # type: SFTPOutput
except ErrorReturnCode as e:
out['stdout'] = e.stdout
out['stderr'] = e.stderr
except Exception:
out['stderr'] = format_exc()
out['is_ok'] = False
else:
out.update(result.to_dict())
finally:
out['cid'] = msg.cid
out['command_no'] = connection.command_no
out['response_time'] = str(_utcnow() - start_time)
return Response(data=dumps(out))
# ################################################################################################################################
if __name__ == '__main__':
container = SFTPConnectionContainer()
container.run()
# ################################################################################################################################
| 12,672
|
Python
|
.py
| 243
| 43.444444
| 130
| 0.483824
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,102
|
container.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/connector/subprocess_/impl/events/container.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from datetime import datetime
from logging import getLogger
from traceback import format_exc
# Zato
from zato.common.events.common import Action
from zato.common.util.json_ import JSONParser
from zato.common.util.tcp import ZatoStreamServer
from zato.server.connection.connector.subprocess_.base import BaseConnectionContainer
from zato.server.connection.connector.subprocess_.impl.events.database import EventsDatabase, OpCode
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from socket import socket
Bunch = Bunch
socket = socket
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
# For later use
utcnow = datetime.utcnow
# ################################################################################################################################
# ################################################################################################################################
class EventsConnectionContainer(BaseConnectionContainer):
connection_class = object
ipc_name = conn_type = logging_file_name = 'events'
remove_id_from_def_msg = False
remove_name_from_def_msg = False
# ################################################################################################################################
def __init__(self, *args, **kwargs):
# type: (str, int, int) -> None
super().__init__(*args, **kwargs)
# By default, keep running forever
self.keep_running = True
# A reusable JSON parser
self._json_parser = JSONParser()
# Map handler names to actual handler methods
self._action_map = {
Action.Ping: self._on_event_ping,
Action.Push: self._on_event_push,
Action.GetTable: self._on_event_get_table,
}
# ################################################################################################################################
def enrich_options(self):
# type: (dict) -> None
if not self.options['zato_subprocess_mode']:
self.options['fs_data_path'] = '/tmp/dev-events'
self.options['sync_threshold'] = 1
self.options['sync_interval'] = 1
# ################################################################################################################################
def post_init(self):
try:
fs_data_path = self.options['fs_data_path']
sync_threshold = int(self.options['sync_threshold'])
sync_interval = int(self.options['sync_interval'])
self.events_db = EventsDatabase(logger, fs_data_path, sync_threshold, sync_interval)
except Exception:
logger.warning('Exception in post_init -> `%s`', format_exc())
# ################################################################################################################################
def _on_event_ping(self, ignored_data, address_str):
# type: (str) -> str
logger.info('Ping received from `%s`', address_str)
return Action.PingReply
# ################################################################################################################################
def _on_event_push(self, data, ignored_address_str, _opcode=OpCode.Push):
# type: (str, str, str) -> None
# We received JSON bytes so we now need to load a Python object out of it ..
data = self._json_parser.parse(data)
data = data.as_dict() # type: dict
# .. now, we can push it to the database.
self.events_db.access_state(_opcode, data)
# ################################################################################################################################
def _on_event_get_table(self, ignored_address_str, _opcode=OpCode.Tabulate):
# type: (str, str) -> str
data = self.events_db.get_table()
return Action.GetTableReply + data.to_json().encode('utf8')
# ################################################################################################################################
def _on_new_connection(self, socket, address):
# type: (socket, str) -> None
# For later use
address_str = '{}:{}'.format(address[0], address[1])
# A new client connected to our server
logger.info('New stream connection from %s', address_str)
# Get access to the underlying file object
socket_file = socket.makefile(mode='rb')
try:
# Keep running until explicitly requested not to
while self.keep_running:
# We work on a line-by-line basis
line = socket_file.readline()
# No input = client is no longer connected
if not line:
logger.info('Stream client disconnected (%s)', address_str)
break
# Extract the action sent ..
action = line[:2]
# .. find the handler function ..
func = self._action_map.get(action)
# .. no such handler = disconnect the client ..
if not func:
logger.warning('No handler for `%r` found. Disconnecting stream client (%s)', action, address_str)
break
# .. otherwise, handle the action ..
data = line[2:]
try:
response = func(data, address_str) # type: str
except Exception as e:
logger.warning('Exception when calling func `%s` -> %s -> %s -> %s', func, address_str, data, e.args)
# .. not all actions will result in a response ..
if response:
response = response.encode('utf8') if isinstance(response, str) else response
# .. now, we can send the response to the client.
socket.sendall(response)
# If we are here, it means that the client disconnected.
socket_file.close()
except Exception:
logger.warning('Exception in _on_new_connection (%s) -> `%s`', address_str, format_exc())
# ################################################################################################################################
def make_server(self):
return ZatoStreamServer((self.host, self.port), self._on_new_connection)
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
container = EventsConnectionContainer()
container.run()
# ################################################################################################################################
# ################################################################################################################################
| 7,929
|
Python
|
.py
| 135
| 50.037037
| 130
| 0.402844
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,103
|
database.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/connector/subprocess_/impl/events/database.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from datetime import datetime, timedelta
from typing import Optional as optional
# Humanize
from humanize import intcomma as int_to_comma
# Zato
from zato.common.api import Stats
from zato.common.ext.dataclasses import dataclass
from zato.common.in_ram import InRAMStore
# ################################################################################################################################
# ################################################################################################################################
if 0:
from logging import Logger
from pandas import DataFrame
DataFrame = DataFrame
Logger = Logger
# ################################################################################################################################
# ################################################################################################################################
utcnow = datetime.utcnow
# ################################################################################################################################
# ################################################################################################################################
class OpCode:
Push = 'EventsDBPush'
Tabulate = 'EventsDBTabulate'
class Internal:
SaveData = 'InternalSaveData'
SyncState = 'InternalSyncState'
GetFromRAM = 'InternalGetFromRAM'
ReadParqet = 'InternalReadParqet'
CreateNewDF = 'InternalCreateNewDF'
CombineData = 'InternalCombineData'
_op_int_save_data = OpCode.Internal.SaveData
_op_int_sync_state = OpCode.Internal.SyncState
_op_int_get_from_ram = OpCode.Internal.GetFromRAM
_op_int_read_parqet = OpCode.Internal.ReadParqet
_op_int_create_new_df = OpCode.Internal.CreateNewDF
_op_int_combine_data = OpCode.Internal.CombineData
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class Event:
# A unique identifer assigned to this event by Zato
id: str
# A correlation ID assigned by Zato - multiple events may have the same CID
cid: str
# In reply to which previous event this one was generated (also assigned by Zato)
in_reply_to: optional[str]
# To what group the event belongs
group_id: optional[int]
# To what subgroup the event belongs
sub_group_id: optional[int]
# ID of this event as it was assigned by an external system
ext_id: optional[str]
# A correlation ID ID of this event as it was assigned by an external system
ext_cid: optional[str]
# What is ID that the external system is replying to (as understood by the system)
ext_cid: optional[str]
# A group ID this event belongs to, as it was assigned by an external system
ext_group_id: optional[str]
# What triggered this event, in broad terms, e.g. a Zato service
source_type: int
# What the ID of the source is
source_id: str
# What the recipient of this event is, in broad terms, e.g. an external system
recipient_type: int
# What the ID of the recipient is
recipient_id: str
# A further restriction of the source type
source_sub_type: optional[int]
# What Zato user triggered the event
user_id: optional[str]
# Source system of the user_id attribute
user_source: optional[str]
# What external user triggered the event
ext_user_id: optional[str]
# Source system of the ext_user_id attribute
ext_user_source: optional[str]
# Timestamp of this event, as assigned by Zato, e.g.
timestamp: str
# Timestamp of this event, as assigned by an external system
ext_timestamp: optional[str]
# Year of this event, e.g. 2098
year: int
# Month of this event, e.g. 1 for January
month: int
# Day of month of this event, e.g. 29
day: int
# A concatenation of year and month, e.g. 2098-01 For January, 2098
date_ym: str
# A concatenation of year, month and day, e.g. 2098-01-30 For January the 30th, 2098
date_ymd: str
# An hour of day of this event, e.g. 1 for 1 AM or 13 for 13:00 (1 PM)
hour: int
# Day of calendar week of this event, from 1 to 7 where 1=Monday.
day_of_week: int
# Total time this event took
total_time_ms: optional[int] # In milliseconds (lower precision than ns)
# As above, in ns
total_time_ns: optional[int] # In nanoseconds (full precision, if possible)
# What sort of an object this event is about
object_type: optional[str]
# Type of the object the event is about
object_id: optional[str]
# A further restriction of the object's type
object_sub_type: optional[str]
# What category this event belongs to
cat_id: optional[int]
# What subcategory this event belongs to
sub_cat_id: optional[int]
# What category this event belongs to (assigned by an external system)
ext_cat_id: optional[str]
# What subcategory this event belongs to (assigned by an external system)
ext_sub_cat_id: optional[str]
# ################################################################################################################################
# ################################################################################################################################
class EventsDatabase(InRAMStore):
def __init__(self, logger, fs_data_path, sync_threshold, sync_interval, max_retention=Stats.MaxRetention):
super().__init__(sync_threshold, sync_interval)
# Numpy
import numpy as np
# Pandas
import pandas as pd
# Our self.logger object
self.logger = logger
# Top-level directory to keep persistent data in
self.fs_data_path = fs_data_path
# Aggregated usage data is kept here
self.fs_usage_path = os.path.join(self.fs_data_path, 'usage')
# Aggregated response times are kept here
self.fs_response_time_path = os.path.join(self.fs_data_path, 'response-time')
# In-RAM database of events, saved to disk periodically in background
self.in_ram_store = [] # type: list[Event]
# Fow how long to keep statistics in persistent storage
self.max_retention = max_retention # type: int
# Configure our opcodes
self.opcode_to_func[OpCode.Push] = self.push
self.opcode_to_func[OpCode.Tabulate] = self.get_table
# Reusable Panda groupers
self.group_by = {}
# Each aggregated result will have these columns
self.agg_by = {
'item_max': pd.NamedAgg(column='total_time_ms', aggfunc='max'),
'item_min': pd.NamedAgg(column='total_time_ms', aggfunc='min'),
'item_mean': pd.NamedAgg(column='total_time_ms', aggfunc='mean'),
'item_total_time': pd.NamedAgg(column='total_time_ms', aggfunc='sum'),
'item_total_usage': pd.NamedAgg(column='total_time_ms', aggfunc=np.count_nonzero),
}
# Configure our telemetry opcodes
self.telemetry[_op_int_save_data] = 0
self.telemetry[_op_int_sync_state] = 0
self.telemetry[_op_int_get_from_ram] = 0
self.telemetry[_op_int_read_parqet] = 0
self.telemetry[_op_int_create_new_df] = 0
self.telemetry[_op_int_combine_data] = 0
# Configure Panda objects
self.set_up_group_by()
# ################################################################################################################################
def set_up_group_by(self):
# type: () -> None
# Pandas
import pandas as pd
# This can be added manually
self.group_by[Stats.TabulateAggr] = pd.Grouper(key=Stats.TabulateAggr)
# Construct frequency aggregation configuration ..
time_freq_aggr_group_by = [
# This is used by default
Stats.DefaultAggrTimeFreq,
]
# .. and add groupers.
for time_freq in time_freq_aggr_group_by:
group_by = self.get_group_by(time_freq)
self.group_by[time_freq] = group_by
# ################################################################################################################################
def get_group_by(self, time_freq):
# type: (str) -> list
# Pandas
import pandas as pd
return [
pd.Grouper(key='timestamp', freq=time_freq),
pd.Grouper(key='object_id'),
]
# ################################################################################################################################
def push(self, data):
# type: (dict) -> None
self.in_ram_store.append(data)
# ################################################################################################################################
def load_data_from_storage(self):
""" Reads existing data from persistent storage and returns it as a DataFrame.
"""
# Pandas
import pandas as pd
# Let's check if we already have anything in storage ..
if os.path.exists(self.fs_data_path):
# Let the users know what we are doing ..
self.logger.info('Loading DF data from %s', self.fs_data_path)
# .. load existing data from storage ..
start = utcnow()
existing = pd.read_parquet(self.fs_data_path) # type: pd.DataFrame
# .. log the time it took to load the data ..
self.logger.info('DF data read in %s; len_existing=%s', utcnow() - start, int_to_comma(len(existing)))
# .. update counters ..
self.telemetry[_op_int_read_parqet] += 1
else:
# .. create a new DF instead ..
existing = pd.DataFrame()
# .. update counters ..
self.telemetry[_op_int_create_new_df] += 1
# .. return the result, no matter where it came from.
return existing
# ################################################################################################################################
def get_data_from_ram(self):
""" Turns data currently stored in RAM into a DataFrame.
"""
# type: () -> None
# Pandas
import pandas as pd
# Let the users know what we are doing ..
self.logger.info('Building DF out of len_current=%s', int_to_comma(len(self.in_ram_store)))
# .. convert the data collected so far into a DataFrame ..
start = utcnow()
current = pd.DataFrame(self.in_ram_store)
# .. log the time it took build the DataFrame ..
self.logger.info('DF built in %s', utcnow() - start)
# .. update counters ..
self.telemetry[_op_int_get_from_ram] += 1
return current
# ################################################################################################################################
def aggregate(self, data, time_freq=Stats.DefaultAggrTimeFreq):
# Pandas
import pandas as pd
# Check if we have had this particular frequency before ..
group_by = self.group_by.get(time_freq)
# .. if not, set it up now.
if not group_by:
self.group_by[time_freq] = self.get_group_by(time_freq)
group_by = self.group_by[time_freq]
data = data.set_index(pd.DatetimeIndex(data['timestamp']))
data.index.name = 'idx_timestamp'
aggregated = data.\
groupby(group_by).\
agg(**self.agg_by)
return aggregated
# ################################################################################################################################
def combine_data(self, existing, current):
""" Combines on disk and in-RAM data.
"""
# type: (DataFrame, DataFrame) -> DataFrame
# Pandas
import pandas as pd
# Let the user know what we are doing ..
self.logger.info('Combining existing and current data')
# .. combine the existing and current data ..
start = utcnow()
combined = pd.concat([existing, current])
# .. log the time it took to combine the DataFrames..
self.logger.info('DF combined in %s', utcnow() - start)
# .. update counters ..
self.telemetry[_op_int_combine_data] += 1
return combined
# ################################################################################################################################
def trim(self, data, utcnow=utcnow, timedelta=timedelta):
if len(data):
# Check how many of the past events to leave, i.e. events older than this will be discarded
max_retained = utcnow() - timedelta(milliseconds=self.max_retention)
max_retained = max_retained.isoformat()
# .. construct a new dataframe, containing only the events that are younger than max_retained ..
data = data[data['timestamp'] > max_retained]
# .. and return it to our caller.
return data
# ################################################################################################################################
def save_data(self, data):
# type: (DataFrame) -> None
# Let the user know what we are doing ..
self.logger.info('Saving DF to %s', self.fs_data_path)
# .. save the DF to persistent storage ..
start = utcnow()
data.to_parquet(self.fs_data_path)
# .. log the time it took to save to storage ..
self.logger.info('DF saved in %s', utcnow() - start)
# .. update counters ..
self.telemetry[_op_int_save_data] += 1
# ################################################################################################################################
def _sync_state(self, _utcnow=utcnow):
# For later use
now_total = _utcnow()
# Begin with a header to indicate in logs when we start
self.logger.info('********************************************************************************* ')
self.logger.info('*********************** DataFrame (DF) Sync storage ***************************** ')
self.logger.info('********************************************************************************* ')
# Get the existing data from storage
existing = self.load_data_from_storage()
# Get data that is currently in RAM
current = self.get_data_from_ram()
# Combine data from storage and RAM
combined = self.combine_data(existing, current)
# Trim the data to the retention threshold
trimmed = self.trim(combined)
# Save the combined result to storage
self.save_data(trimmed)
# Clear our current dataset
self.in_ram_store[:] = []
# Log the total processing time
self.logger.info('DF total processing time %s', utcnow() - now_total)
# update counters
self.telemetry[_op_int_sync_state] += 1
# ################################################################################################################################
def sync_state(self):
with self.update_lock:
self._sync_state()
# ################################################################################################################################
def get_table(self):
# Prepare configuration ..
group_by = self.group_by[Stats.TabulateAggr]
with self.update_lock:
# .. make sure we have access to the latest data ..
self._sync_state()
# .. read our input data from persistent storage ..
data = self.load_data_from_storage()
# .. tabulate all the statistics found ..
tabulated = data.\
groupby(group_by).\
agg(**self.agg_by)
# .. convert rows to columns which is what our callers expect ..
tabulated = tabulated.transpose()
# .. finally, return the result.
return tabulated
# ################################################################################################################################
def run(self):
pass
# ################################################################################################################################
# ################################################################################################################################
| 16,918
|
Python
|
.py
| 337
| 42.608309
| 130
| 0.50137
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,104
|
channel.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/http_soap/channel.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
from datetime import datetime
from gzip import GzipFile
from hashlib import sha256
from http.client import BAD_REQUEST, FORBIDDEN, INTERNAL_SERVER_ERROR, METHOD_NOT_ALLOWED, NOT_FOUND, UNAUTHORIZED
from io import StringIO
from traceback import format_exc
# regex
from regex import compile as regex_compile
# Zato
from zato.common.api import CHANNEL, CONTENT_TYPE, DATA_FORMAT, HL7, HTTP_SOAP, MISC, RATE_LIMIT, SEC_DEF_TYPE, SIMPLE_IO, \
SSO, TRACE1, URL_PARAMS_PRIORITY, ZATO_NONE
from zato.common.audit_log import DataReceived, DataSent
from zato.common.const import ServiceConst
from zato.common.exception import HTTP_RESPONSES, ServiceMissingException
from zato.common.hl7 import HL7Exception
from zato.common.json_internal import dumps, loads
from zato.common.json_schema import DictError as JSONSchemaDictError, ValidationException as JSONSchemaValidationException
from zato.common.marshal_.api import Model, ModelValidationError
from zato.common.rate_limiting.common import AddressNotAllowed, BaseException as RateLimitingException, RateLimitReached
from zato.common.typing_ import cast_
from zato.common.util.auth import enrich_with_sec_data, extract_basic_auth
from zato.common.util.exception import pretty_format_exception
from zato.common.util.http_ import get_form_data as util_get_form_data, QueryDict
from zato.cy.reqresp.payload import SimpleIOPayload as CySimpleIOPayload
from zato.server.connection.http_soap import BadRequest, ClientHTTPError, Forbidden, MethodNotAllowed, NotFound, \
TooManyRequests, Unauthorized
from zato.server.groups.ctx import SecurityGroupsCtx
from zato.server.service.internal import AdminService
# ################################################################################################################################
if 0:
from zato.broker.client import BrokerClient
from zato.common.typing_ import any_, anydict, anytuple, callable_, dictnone, stranydict, strlist, strstrdict
from zato.server.service import Service
from zato.server.base.parallel import ParallelServer
from zato.server.base.worker import WorkerStore
from zato.server.connection.http_soap.url_data import URLData
BrokerClient = BrokerClient
ParallelServer = ParallelServer
Service = Service
URLData = URLData
# ################################################################################################################################
logger = logging.getLogger('zato_rest')
_logger_is_enabled_for = logger.isEnabledFor
_logging_info = logging.INFO
split_re = regex_compile('........?').findall # type: ignore
# ################################################################################################################################
accept_any_http = HTTP_SOAP.ACCEPT.ANY
accept_any_internal = HTTP_SOAP.ACCEPT.ANY_INTERNAL
# ################################################################################################################################
# https://tools.ietf.org/html/rfc6585
TOO_MANY_REQUESTS = 429
_status_bad_request = '{} {}'.format(BAD_REQUEST, HTTP_RESPONSES[BAD_REQUEST])
_status_internal_server_error = '{} {}'.format(INTERNAL_SERVER_ERROR, HTTP_RESPONSES[INTERNAL_SERVER_ERROR])
_status_not_found = '{} {}'.format(NOT_FOUND, HTTP_RESPONSES[NOT_FOUND])
_status_method_not_allowed = '{} {}'.format(METHOD_NOT_ALLOWED, HTTP_RESPONSES[METHOD_NOT_ALLOWED])
_status_unauthorized = '{} {}'.format(UNAUTHORIZED, HTTP_RESPONSES[UNAUTHORIZED])
_status_forbidden = '{} {}'.format(FORBIDDEN, HTTP_RESPONSES[FORBIDDEN])
_status_too_many_requests = '{} {}'.format(TOO_MANY_REQUESTS, HTTP_RESPONSES[TOO_MANY_REQUESTS])
# ################################################################################################################################
stack_format = None
_utcnow=datetime.utcnow
# ################################################################################################################################
_data_format_hl7 = HL7.Const.Version.v2.id
# ################################################################################################################################
_basic_auth = SEC_DEF_TYPE.BASIC_AUTH
_jwt = SEC_DEF_TYPE.JWT
_sso_ext_auth = _basic_auth, _jwt
# ################################################################################################################################
status_response = {}
for code, response in HTTP_RESPONSES.items():
status_response[code] = '{} {}'.format(code, response)
# ################################################################################################################################
class ModuleCtx:
Channel = CHANNEL.HTTP_SOAP
No_URL_Match = (None, False)
Rate_Limit_HTTP = RATE_LIMIT.OBJECT_TYPE.HTTP_SOAP
Rate_Limit_SSO_User = RATE_LIMIT.OBJECT_TYPE.SSO_USER
Exception_Separator = '*' * 80
SIO_JSON = SIMPLE_IO.FORMAT.JSON
SIO_FORM_DATA = SIMPLE_IO.FORMAT.FORM_DATA
Dict_Like = {DATA_FORMAT.JSON, DATA_FORMAT.DICT, DATA_FORMAT.FORM_DATA}
Form_Data_Content_Type = ('application/x-www-form-urlencoded', 'multipart/form-data')
# ################################################################################################################################
response_404 = 'URL not found (CID:{})'
response_404_log = 'URL not found `%s` (Method:%s; Accept:%s; CID:%s)'
# ################################################################################################################################
def client_json_error(cid:'str', details:'any_') -> 'str':
# This may be a tuple of arguments to an exception object
if isinstance(details, tuple):
exc_details = []
for item in details: # type: ignore
if isinstance(item, bytes):
item = item.decode('utf8')
exc_details.append(item)
else:
exc_details = details
if isinstance(exc_details, bytes):
exc_details = exc_details.decode('utf8')
message = {'result':'Error', 'cid':cid} # type: stranydict
if details:
message['details'] = exc_details
return dumps(message)
# ################################################################################################################################
client_error_wrapper = {
DATA_FORMAT.JSON: client_json_error,
HL7.Const.Version.v2.id: client_json_error,
}
# ################################################################################################################################
def get_client_error_wrapper(transport:'str', data_format:'str') -> 'callable_':
try:
result = client_error_wrapper[transport]
return result
except KeyError:
# Any KeyError must be caught by the caller
return client_error_wrapper[data_format]
# ################################################################################################################################
class _CachedResponse:
""" A wrapper for responses served from caches.
"""
__slots__ = ('payload', 'content_type', 'headers', 'status_code')
def __init__(self, payload:'any_', content_type:'str', headers:'stranydict', status_code:'int') -> 'None':
self.payload = payload
self.content_type = content_type
self.headers = headers
self.status_code = status_code
# ################################################################################################################################
class _HashCtx:
""" Encapsulates information needed to compute a hash value of an incoming request.
"""
def __init__(
self,
raw_request:'str',
channel_item:'any_',
channel_params:'stranydict',
wsgi_environ:'stranydict'
) -> 'None':
self.raw_request = raw_request
self.channel_item = channel_item
self.channel_params = channel_params
self.wsgi_environ = wsgi_environ
# ################################################################################################################################
class RequestDispatcher:
""" Dispatches all the incoming HTTP requests to appropriate handlers.
"""
def __init__(
self,
*,
server:'ParallelServer',
url_data:'URLData',
request_handler:'RequestHandler',
simple_io_config:'stranydict',
return_tracebacks:'bool',
default_error_message:'str',
http_methods_allowed:'strlist'
) -> 'None':
self.server = server
self.url_data = url_data
self.request_handler = request_handler
self.simple_io_config = simple_io_config
self.return_tracebacks = return_tracebacks
self.default_error_message = default_error_message
self.http_methods_allowed = http_methods_allowed
# To reduce the number of attribute lookups
self._sso_api_user = getattr(self.server, 'sso_api', None)
# ################################################################################################################################
def dispatch(
self,
cid:'str',
req_timestamp:'str',
wsgi_environ:'stranydict',
worker_store:'WorkerStore',
user_agent:'str',
remote_addr:'str'
) -> 'any_':
# Reusable
_has_log_info = _logger_is_enabled_for(_logging_info)
# Needed as one of the first steps
http_method = wsgi_environ['REQUEST_METHOD']
http_method = http_method if isinstance(http_method, str) else http_method.decode('utf8')
http_accept = wsgi_environ.get('HTTP_ACCEPT') or accept_any_http
http_accept = http_accept.replace('*', accept_any_internal).replace('/', 'HTTP_SEP')
# Needed in later steps
path_info = wsgi_environ['PATH_INFO']
wsgi_raw_uri = wsgi_environ['RAW_URI']
wsgi_remote_port = wsgi_environ['REMOTE_PORT']
# Immediately reject the request if it is not a support HTTP method, no matter what channel
# it would have otherwise matched.
if http_method not in self.http_methods_allowed:
wsgi_environ['zato.http.response.status'] = _status_method_not_allowed
return client_json_error(cid, 'Unsupported HTTP method')
# Can we recognize this URL path?
# This gives us the URL info and security data - but note that here
# we still haven't validated credentials, only matched the URL.
# Credentials are checked in a call to self.url_data.check_security
url_match, channel_item = self.url_data.match(path_info, http_method, http_accept) # type: ignore
url_match = cast_('str', url_match)
channel_item = cast_('anydict', channel_item)
# .. the item itself may be None in case it is a 404 ..
if channel_item:
channel_name = channel_item['name']
else:
channel_name = '(None)'
# This is needed in parallel.py's on_wsgi_request
wsgi_environ['zato.channel_item'] = channel_item
# Read the raw data
payload = wsgi_environ['wsgi.input'].read()
# Store for later use prior to any kind of parsing
wsgi_environ['zato.http.raw_request'] = payload
# Assume that by default we are not authenticated / authorized
auth_result = None
# This dictionary may be populated by a service with HTTP headers,
# which the headers will be still in the dictionary even if the service
# raises an exception. In this way we can return both the headers
# and a non-200 response to the caller.
zato_response_headers_container = {}
# .. before proceeding, log what we have learned so far about the request ..
# .. but do not do it for paths that are explicitly configured to be ignored ..
if _has_log_info:
if not path_info in self.server.rest_log_ignore:
msg = f'REST cha → cid={cid}; {http_method} {wsgi_raw_uri} name={channel_name}; len={len(payload)}; '
msg += f'agent={user_agent}; remote-addr={remote_addr}:{wsgi_remote_port}'
logger.info(msg)
# .. we have a match and ee can possibly handle the incoming request ..
if url_match not in ModuleCtx.No_URL_Match:
try:
# Raise 404 if the channel is inactive
if not channel_item['is_active']:
logger.warning('url_data:`%s` is not active, raising NotFound', url_match)
raise NotFound(cid, 'Channel inactive')
# This the string pointing to the URL path that we matched
match_target = channel_item['match_target']
# This is the channel's security definition, if any
sec = self.url_data.url_sec[match_target] # type: ignore
# This may point to security groups attached to this channel
security_groups_ctx = channel_item.get('security_groups_ctx')
# Assume we have no form (POST) data by default.
post_data = {}
# Extract the form (POST) data in case we expect it and the content type indicates it will exist.
if channel_item['data_format'] == ModuleCtx.SIO_FORM_DATA:
if wsgi_environ.get('CONTENT_TYPE', '').startswith(ModuleCtx.Form_Data_Content_Type):
post_data = util_get_form_data(wsgi_environ)
# This is handy if someone invoked URLData's OAuth API manually
wsgi_environ['zato.oauth.post_data'] = post_data
#
# This will check credentials based on a security definition attached to the channel
#
if sec.sec_def != ZATO_NONE or sec.sec_use_rbac is True:
# Do check credentials based on a security definition
auth_result = self.url_data.check_security(
sec,
cid,
channel_item,
path_info,
payload,
wsgi_environ,
post_data,
worker_store,
enforce_auth=True
)
#
# This will check credentials based on security groups potentially assigned to the channel ..
#
if security_groups_ctx:
# .. if we do not have any members, we do not check anything ..
if security_groups_ctx.has_members():
# .. this will raise an exception if the validation fails.
self.check_security_via_groups(cid, channel_item['name'], security_groups_ctx, wsgi_environ)
#
# If we are here, it means that credentials are correct or they were not required
#
# Check rate limiting now - this could not have been done earlier because we wanted
# for security checks to be made first. Otherwise, someone would be able to invoke
# our endpoint without credentials as many times as it is needed to exhaust the rate limit,
# denying in this manner access to genuine users.
if channel_item.get('is_rate_limit_active'):
self.server.rate_limiting.check_limit(
cid, ModuleCtx.Rate_Limit_HTTP, channel_item['name'], wsgi_environ['zato.http.remote_addr'])
# Store data received in audit log now - again, just like we rate limiting, we did not want to do it too soon.
if channel_item.get('is_audit_log_received_active'):
# Describe our event ..
data_event = DataReceived()
data_event.type_ = ModuleCtx.Channel
data_event.object_id = channel_item['id']
data_event.data = payload
data_event.timestamp = req_timestamp
data_event.msg_id = cid
# .. and store it in the audit log.
self.server.audit_log.store_data_received(data_event)
# Security definition-based checks went fine but it is still possible
# that this sec_def is linked to an SSO user whose rate limits we need to check.
# Check SSO-related limits only if SSO is enabled
if self._sso_api_user:
# Not all sec_def types may have associated SSO users
if sec.sec_def != ZATO_NONE:
if sec.sec_def.sec_type in _sso_ext_auth:
# JWT comes with external sessions whereas Basic Auth does not
if auth_result and sec.sec_def.sec_type and hasattr(auth_result, 'raw_token'):
ext_session_id = auth_result.raw_token
else:
ext_session_id = None
# Try to log in the user to SSO by that account's external credentials.
self.server.sso_tool.on_external_auth(
sec.sec_def.sec_type, sec.sec_def.id, sec.sec_def.username, cid,
wsgi_environ, ext_session_id)
if channel_item['merge_url_params_req']:
channel_params = self.request_handler.create_channel_params(
url_match, # type: ignore
channel_item,
wsgi_environ,
payload,
post_data
)
else:
channel_params = {}
# This is the call that obtains a response.
response = self.request_handler.handle(cid, url_match, channel_item, wsgi_environ,
payload, worker_store, self.simple_io_config, post_data, path_info, channel_params,
zato_response_headers_container)
# Add the default headers.
wsgi_environ['zato.http.response.headers']['Content-Type'] = response.content_type
wsgi_environ['zato.http.response.headers'].update(response.headers)
wsgi_environ['zato.http.response.status'] = status_response[response.status_code]
if channel_item['content_encoding'] == 'gzip':
s = StringIO()
with GzipFile(fileobj=s, mode='w') as f: # type: ignore
_ = f.write(response.payload)
response.payload = s.getvalue()
s.close()
wsgi_environ['zato.http.response.headers']['Content-Encoding'] = 'gzip'
# Store data sent in audit
if channel_item.get('is_audit_log_sent_active'):
# Describe our event ..
data_event = DataSent()
data_event.type_ = ModuleCtx.Channel
data_event.object_id = channel_item['id']
data_event.data = response.payload # type: ignore
data_event.timestamp = _utcnow()
data_event.msg_id = 'zrp{}'.format(cid) # This is a response to this CID
data_event.in_reply_to = cid
# .. and store it in the audit log.
self.server.audit_log.store_data_sent(data_event)
# Finally, return payload to the client, potentially deserializing it from CySimpleIO first.
if isinstance(response.payload, CySimpleIOPayload):
payload = response.payload.getvalue()
if isinstance(payload, dict):
if 'response' in payload:
payload = payload['response']
payload = dumps(payload)
else:
payload = response.payload
return payload
except Exception as e:
_format_exc = format_exc()
status = _status_internal_server_error
if isinstance(e, (ClientHTTPError, ModelValidationError)):
response = e.msg
status_code = e.status
# TODO: Refactor this series of if/else's into a lookup dict.
if isinstance(e, Unauthorized):
status = _status_unauthorized
if e.challenge:
wsgi_environ['zato.http.response.headers']['WWW-Authenticate'] = e.challenge
elif isinstance(e, (BadRequest, ModelValidationError)):
status = _status_bad_request
# This is the channel that Dashboard uses and we want to return
# all the details in such cases because it is useful during development
if channel_item['name'] == MISC.DefaultAdminInvokeChannel:
response = e.msg
else:
# Note that SSO channels do not return details
url_path = channel_item['url_path'] # type: str
needs_msg = e.needs_msg and (not url_path.startswith(SSO.Default.RESTPrefix))
response = e.msg if needs_msg else 'Bad request'
elif isinstance(e, NotFound):
status = _status_not_found
elif isinstance(e, MethodNotAllowed):
status = _status_method_not_allowed
elif isinstance(e, Forbidden):
status = _status_forbidden
elif isinstance(e, TooManyRequests):
status = _status_too_many_requests
else:
# JSON Schema validation
if isinstance(e, JSONSchemaValidationException):
status_code = _status_bad_request
needs_prefix = False if e.needs_err_details else True
response = JSONSchemaDictError(
cid, e.needs_err_details, e.error_msg, needs_prefix=needs_prefix).serialize(to_string=True)
# Rate limiting and whitelisting
elif isinstance(e, RateLimitingException):
response, status_code, status = self._on_rate_limiting_exception(e)
# HL7
elif channel_item['data_format'] == _data_format_hl7:
response, status_code, status = self._on_hl7_exception(e, channel_item)
else:
status_code = INTERNAL_SERVER_ERROR
# Same comment as in BadRequest, ModelValidationError above
if channel_item['name'] == MISC.DefaultAdminInvokeChannel:
wsgi_environ['zato.http.response.headers']['X-Zato-Message'] = str(e.args)
response = pretty_format_exception(e, cid)
else:
response = e.args if self.return_tracebacks else self.default_error_message
# Check whether this was a JSON-based channel, in which case our response should
# have a JSON data format on ouput too.
if channel_item['data_format'] == DATA_FORMAT.JSON:
wsgi_environ['zato.http.response.headers']['Content-Type'] = CONTENT_TYPE['JSON']
# We need a traceback unless we merely report information about a missing service,
# which may happen if enmasse runs before such a service has been deployed.
needs_traceback = not isinstance(e, ServiceMissingException)
if needs_traceback:
_exc_string = stack_format(e, style='color', show_vals='like_source', truncate_vals=5000,
add_summary=True, source_lines=20) if stack_format else _format_exc # type: str
# Log what happened
logger.info(
'Caught an exception, cid:`%s`, status_code:`%s`, `%s`', cid, status_code, _exc_string)
try:
error_wrapper = get_client_error_wrapper(channel_item['transport'], channel_item['data_format'])
except KeyError:
# It is not a data format that we have a wrapper for.
if logger.isEnabledFor(TRACE1):
msg = 'No client error wrapper for transport:`{}`, data_format:`{}`'.format(
channel_item.get('transport'), channel_item.get('data_format'))
logger.log(TRACE1, msg)
else:
response = error_wrapper(cid, response)
wsgi_environ['zato.http.response.status'] = status
return response
finally:
# No matter if we had an exception or not, we can add the headers that the service potentially produced.
if zato_response_headers_container:
wsgi_environ['zato.http.response.headers'].update(zato_response_headers_container)
# This is 404, no such URL path.
else:
# Indicate HTTP 404
wsgi_environ['zato.http.response.status'] = _status_not_found
# This is returned to the caller - note that it does not echo back the URL requested ..
response = response_404.format(cid)
# .. this goes to logs and it includes the URL sent by the client.
logger.warning(response_404_log, path_info, wsgi_environ.get('REQUEST_METHOD'), wsgi_environ.get('HTTP_ACCEPT'), cid)
# This is the payload for the caller
return response
# ################################################################################################################################
def check_security_via_groups(
self,
cid:'str',
channel_name:'str',
security_groups_ctx:'SecurityGroupsCtx',
wsgi_environ:'stranydict'
) -> 'None':
# Local variables
sec_def = None
# Extract Basic Auth information from input ..
basic_auth_info = wsgi_environ.get('HTTP_AUTHORIZATION')
# .. extract API key information too ..
apikey_header_value = wsgi_environ.get(self.server.api_key_header_wsgi)
# .. we cannot have both on input ..
if basic_auth_info and apikey_header_value:
logger.warn('Received both Basic Auth and API key (groups)')
raise BadRequest(cid)
# Handle Basic Auth via groups ..
if basic_auth_info:
# .. extract credentials ..
username, password = extract_basic_auth(cid, basic_auth_info)
# .. run the validation now ..
if security_id := security_groups_ctx.check_security_basic_auth(cid, channel_name, username, password):
sec_def = self.url_data.basic_auth_get_by_id(security_id)
else:
logger.warn('Invalid Basic Auth credentials (groups)')
raise Forbidden(cid)
# Handle API keys via groups ..
elif apikey_header_value:
# .. run the validation now ..
if security_id := security_groups_ctx.check_security_apikey(cid, channel_name, apikey_header_value):
sec_def = self.url_data.apikey_get_by_id(security_id)
else:
logger.warn('Invalid API key (groups)')
raise Forbidden(cid)
else:
logger.warn('Received neither Basic Auth nor API key (groups)')
raise Forbidden(cid)
# Now we can enrich the WSGI environment with information
# that will become self.channel.security for services.
if sec_def:
enrich_with_sec_data(wsgi_environ, sec_def, sec_def['sec_type'])
# ################################################################################################################################
def _on_rate_limiting_exception(self, e:'Exception') -> 'anytuple':
if isinstance(e, RateLimitReached):
status_code = TOO_MANY_REQUESTS
status = _status_too_many_requests
elif isinstance(e, AddressNotAllowed):
status_code = FORBIDDEN
status = _status_forbidden
else:
status_code = BAD_REQUEST
status = _status_bad_request
return 'Error {}'.format(status), status_code, status
# ################################################################################################################################
def _on_hl7_exception(self, e:'Exception', channel_item:'stranydict') -> 'anytuple':
if channel_item['should_return_errors'] and isinstance(e, HL7Exception):
details = '`{}`; data:`{}`'.format(e.args[0], e.data)
else:
details = ''
return details, BAD_REQUEST, _status_bad_request
# ################################################################################################################################
class RequestHandler:
""" Handles individual HTTP requests to a given service.
"""
def __init__(self, server:'ParallelServer') -> 'None':
self.server = server
# ################################################################################################################################
def _set_response_data(self, service:'Service', **kwargs:'any_'):
""" A callback invoked by the services after it is done producing the response.
"""
data_format = kwargs.get('data_format', '')
transport = kwargs.get('transport', '')
self.set_payload(service.response, data_format, transport, service)
self.set_content_type(service.response, data_format)
return service.response
# ################################################################################################################################
def _get_flattened(self, params:'str') -> 'anydict':
""" Returns a QueryDict of parameters with single-element lists unwrapped to point to the sole element directly.
"""
out = {} # type: anydict
if params:
query_params = QueryDict(params, encoding='utf-8')
for key, value in query_params.lists():
if len(value) > 1:
out[key] = value
else:
out[key] = value[0]
return out
# ################################################################################################################################
def create_channel_params(
self,
path_params:'strstrdict',
channel_item:'any_',
wsgi_environ:'stranydict',
raw_request:'str',
post_data:'dictnone'=None,
) -> 'strstrdict':
""" Collects parameters specific to this channel (HTTP) and updates wsgi_environ
with HTTP-specific data.
"""
_qs = self._get_flattened(wsgi_environ.get('QUERY_STRING', ''))
# Our caller has already parsed POST for us so we just use it as is
if post_data:
post = post_data
else:
# We cannot parse incoming data if we know for sure that an explicit
# data format was set for channel.
post = self._get_flattened(raw_request) if not channel_item.data_format else {}
if channel_item.url_params_pri == URL_PARAMS_PRIORITY.QS_OVER_PATH:
if _qs:
path_params.update((key, value) for key, value in _qs.items())
channel_params = path_params
else:
if _qs:
channel_params = {key:value for key, value in _qs.items()}
else:
channel_params = {}
channel_params.update(path_params)
wsgi_environ['zato.http.GET'] = _qs
wsgi_environ['zato.http.POST'] = post
return channel_params
# ################################################################################################################################
def get_response_from_cache(
self,
service:'Service',
raw_request:'str',
channel_item:'any_',
channel_params:'stranydict',
wsgi_environ:'stranydict'
) -> 'anytuple':
""" Returns a cached response for incoming request or None if there is nothing cached for it.
By default, an incoming request's hash is calculated by sha256 over a concatenation of:
* WSGI REQUEST_METHOD # E.g. GET or POST
* WSGI PATH_INFO # E.g. /my/api
* sorted(zato.http.GET) # E.g. ?foo=123&bar=456 (query string aka channel_params)
* payload bytes # E.g. '{"customer_id":"123"}' - a string object, before parsing
Note that query string is sorted which means that ?foo=123&bar=456 is equal to ?bar=456&foo=123,
that is, the order of parameters in query string does not matter.
"""
if service.get_request_hash:# type: ignore
hash_value = service.get_request_hash(
_HashCtx(raw_request, channel_item, channel_params, wsgi_environ) # type: ignore
)
else:
query_string = str(sorted(channel_params.items()))
data = '%s%s%s%s' % (wsgi_environ['REQUEST_METHOD'], wsgi_environ['PATH_INFO'], query_string, raw_request)
hash_value = sha256(data.encode('utf8')).hexdigest()
hash_value = '-'.join(split_re(hash_value)) # type: ignore
# No matter if hash value is default or from service, always prefix it with channel's type and ID
cache_key = 'http-channel-%s-%s' % (channel_item['id'], hash_value)
# We have the key so now we can check if there is any matching response already stored in cache
response = self.server.get_from_cache(channel_item['cache_type'], channel_item['cache_name'], cache_key)
# If there is any response, we can now load into a format that our callers expect
if response:
response = loads(response)
response = _CachedResponse(response['payload'], response['content_type'], response['headers'],
response['status_code'])
return cache_key, response
# ################################################################################################################################
def set_response_in_cache(self, channel_item:'any_', key:'str', response:'any_'):
""" Caches responses from this channel's invocation for as long as the cache is configured to keep it.
"""
self.server.set_in_cache(channel_item['cache_type'], channel_item['cache_name'], key, dumps({
'payload': response.payload,
'content_type': response.content_type,
'headers': response.headers,
'status_code': response.status_code,
}))
# ################################################################################################################################
def handle(
self,
cid:'str',
url_match:'any_',
channel_item:'any_',
wsgi_environ:'stranydict',
raw_request:'str',
worker_store:'WorkerStore',
simple_io_config:'stranydict',
post_data:'dictnone',
path_info:'str',
channel_params:'stranydict',
zato_response_headers_container:'stranydict',
) -> 'any_':
""" Create a new instance of a service and invoke it.
"""
service, is_active = self.server.service_store.new_instance(channel_item.service_impl_name)
if not is_active:
logger.warning('Could not invoke an inactive service:`%s`, cid:`%s`', service.get_name(), cid)
raise NotFound(cid, response_404.format(
path_info, wsgi_environ.get('REQUEST_METHOD'), wsgi_environ.get('HTTP_ACCEPT'), cid))
# This is needed for type checking to make sure the name is bound
cache_key = ''
# If caching is configured for this channel, we need to first check if there is no response already
if channel_item['cache_type']:
cache_key, response = self.get_response_from_cache(service, raw_request, channel_item, channel_params, wsgi_environ)
if response:
return response
# Add any path params matched to WSGI environment so it can be easily accessible later on
wsgi_environ['zato.http.path_params'] = url_match
# If this is a POST / form submission then it becomes our payload
if channel_item['data_format'] == ModuleCtx.SIO_FORM_DATA:
wsgi_environ['zato.request.payload'] = post_data
# No cache for this channel or no cached response, invoke the service then.
response = service.update_handle(self._set_response_data, service, raw_request,
CHANNEL.HTTP_SOAP, channel_item.data_format, channel_item.transport, self.server,
cast_('BrokerClient', worker_store.broker_client),
worker_store, cid, simple_io_config, wsgi_environ=wsgi_environ,
url_match=url_match, channel_item=channel_item, channel_params=channel_params,
merge_channel_params=channel_item.merge_url_params_req,
params_priority=channel_item.params_pri,
zato_response_headers_container=zato_response_headers_container)
# Cache the response if needed (cache_key was already created on return from get_response_from_cache)
if channel_item['cache_type']:
self.set_response_in_cache(channel_item, cache_key, response)
# Having used the cache or not, we can return the response now
return response
# ################################################################################################################################
def _needs_admin_response(
self,
service_instance:'Service',
service_invoker_name:'str'=ServiceConst.ServiceInvokerName
) -> 'bool':
return isinstance(service_instance, AdminService) and service_instance.name != service_invoker_name
# ################################################################################################################################
def set_payload(
self,
response:'any_',
data_format:'str',
transport:'str',
service_instance:'Service'
) -> 'None':
""" Sets the actual payload to represent the service's response out of what the service produced.
This includes converting dictionaries into JSON or adding Zato metadata.
"""
if self._needs_admin_response(service_instance):
if data_format in {ModuleCtx.SIO_JSON, ModuleCtx.SIO_FORM_DATA}:
zato_env = {'zato_env':{'result':response.result, 'cid':service_instance.cid, 'details':response.result_details}}
is_not_str = not isinstance(response.payload, str)
if is_not_str and response.payload:
payload = response.payload.getvalue(False)
payload.update(zato_env)
else:
payload = zato_env
response.payload = dumps(payload)
else:
if not isinstance(response.payload, str):
if isinstance(response.payload, dict) and data_format in ModuleCtx.Dict_Like:
response.payload = dumps(response.payload)
else:
if response.payload:
if isinstance(response.payload, Model):
value = response.payload.to_json()
else:
if hasattr(response.payload, 'getvalue'):
value = response.payload.getvalue() # type: ignore
else:
value = dumps(response.payload)
else:
value = ''
response.payload = value
# ################################################################################################################################
def set_content_type(
self,
response:'any_',
data_format:'str'
) -> 'None':
""" Sets a response's content type if one hasn't been supplied by the user.
"""
# A user provided his or her own content type ..
if response.content_type_changed:
content_type = response.content_type
else:
# .. or they did not so let's find out if we're using SimpleIO ..
if data_format == SIMPLE_IO.FORMAT.JSON:
content_type = self.server.json_content_type
# .. alright, let's use the default value after all.
else:
content_type = response.content_type
response.content_type = content_type
# ################################################################################################################################
| 41,583
|
Python
|
.py
| 730
| 44.49863
| 130
| 0.539907
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,105
|
url_data.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/http_soap/url_data.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
from base64 import b64encode
from operator import itemgetter
from threading import RLock
from traceback import format_exc
from uuid import uuid4
# Python 2/3 compatibility
from zato.common.ext.future.utils import iteritems, iterkeys
from zato.common.py23_.past.builtins import unicode
from six import PY2
# Zato
from zato.bunch import Bunch
from zato.common.api import CHANNEL, CONNECTION, MISC, RATE_LIMIT, SEC_DEF_TYPE, URL_TYPE, ZATO_NONE
from zato.common.vault_ import VAULT
from zato.common.broker_message import code_to_name, SECURITY, VAULT as VAULT_BROKER_MSG
from zato.common.dispatch import dispatcher
from zato.common.util.api import parse_tls_channel_security_definition, update_apikey_username_to_channel, wait_for_dict_key
from zato.common.util.auth import enrich_with_sec_data, on_basic_auth
from zato.common.util.url_dispatcher import get_match_target
from zato.server.connection.http_soap import Forbidden, Unauthorized
from zato.server.jwt_ import JWT
from zato.url_dispatcher import CyURLData, Matcher
# ################################################################################################################################
# Type checking
import typing
if typing.TYPE_CHECKING:
from zato.server.base.worker import WorkerStore
# For pyflakes
WorkerStore = WorkerStore
# ################################################################################################################################
if PY2:
from oauth.oauth import OAuthDataStore, OAuthConsumer, OAuthRequest, OAuthServer, OAuthSignatureMethod_HMAC_SHA1, \
OAuthSignatureMethod_PLAINTEXT, OAuthToken
else:
class _Placeholder:
def __init__(self, *ignored_args, **ignored_kwargs):
pass
def _placeholder(self, *ignored_args, **ignored_kwargs):
pass
add_signature_method = _placeholder
OAuthDataStore = OAuthConsumer = OAuthRequest = OAuthServer = OAuthSignatureMethod_HMAC_SHA1 = \
OAuthSignatureMethod_PLAINTEXT = OAuthToken = _Placeholder
# ################################################################################################################################
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class URLData(CyURLData, OAuthDataStore):
""" Performs URL matching and security checks.
"""
def __init__(self, worker, channel_data=None, url_sec=None, basic_auth_config=None, jwt_config=None, ntlm_config=None, \
oauth_config=None, apikey_config=None, aws_config=None, \
tls_channel_sec_config=None, tls_key_cert_config=None, \
vault_conn_sec_config=None, kvdb=None, broker_client=None, odb=None, jwt_secret=None, vault_conn_api=None):
super(URLData, self).__init__(channel_data)
self.worker = worker # type: WorkerStore
self.url_sec = url_sec
self.basic_auth_config = basic_auth_config # type: dict
self.jwt_config = jwt_config # type: dict
self.ntlm_config = ntlm_config # type: dict
self.oauth_config = oauth_config # type: dict
self.apikey_config = apikey_config # type: dict
self.aws_config = aws_config # type: dict
self.tls_channel_sec_config = tls_channel_sec_config # type: dict
self.tls_key_cert_config = tls_key_cert_config # type: dict
self.vault_conn_sec_config = vault_conn_sec_config # type: dict
self.kvdb = kvdb
self.broker_client = broker_client
self.odb = odb
self.jwt_secret = jwt_secret
self.vault_conn_api = vault_conn_api
self.rbac_auth_type_hooks = self.worker.server.fs_server_config.rbac.auth_type_hook
self.sec_config_getter = Bunch()
self.sec_config_getter[SEC_DEF_TYPE.BASIC_AUTH] = self.basic_auth_get
self.sec_config_getter[SEC_DEF_TYPE.APIKEY] = self.apikey_get
self.sec_config_getter[SEC_DEF_TYPE.JWT] = self.jwt_get
self.url_sec_lock = RLock()
self.update_lock = RLock()
self._target_separator = MISC.SEPARATOR
self._oauth_server = OAuthServer(self)
self._oauth_server.add_signature_method(OAuthSignatureMethod_HMAC_SHA1())
self._oauth_server.add_signature_method(OAuthSignatureMethod_PLAINTEXT())
dispatcher.listen_for_updates(SECURITY, self.dispatcher_callback)
dispatcher.listen_for_updates(VAULT_BROKER_MSG, self.dispatcher_callback)
# Needs always to be sorted by name in case of conflicts in paths resolution
self.sort_channel_data()
# Set up audit log
for channel_item in channel_data:
self._set_up_audit_log(channel_item, False)
# ################################################################################################################################
def set_security_objects(self, *, url_sec, basic_auth_config, jwt_config, ntlm_config,
oauth_config, apikey_config, aws_config, tls_channel_sec_config, tls_key_cert_config, vault_conn_sec_config):
self.url_sec = url_sec
self.basic_auth_config = basic_auth_config
self.jwt_config = jwt_config
self.ntlm_config = ntlm_config
self.oauth_config = oauth_config
self.apikey_config = apikey_config
self.aws_config = aws_config
self.tls_channel_sec_config = tls_channel_sec_config
self.tls_key_cert_config = tls_key_cert_config
self.vault_conn_sec_config = vault_conn_sec_config
# ################################################################################################################################
def dispatcher_callback(self, event, ctx, **opaque):
getattr(self, 'on_broker_msg_{}'.format(code_to_name[event]))(ctx)
# ################################################################################################################################
# OAuth data store API
def _lookup_oauth(self, username, class_):
# usernames are unique so we know the first match is ours
for sec_config in self.oauth_config.values():
if sec_config.config.username == username:
return class_(sec_config.config.username, sec_config.config.password)
def lookup_consumer(self, key):
return self._lookup_oauth(key, OAuthConsumer)
def lookup_token(self, token_type, token_field):
return self._lookup_oauth(token_field, OAuthToken)
def lookup_nonce(self, oauth_consumer, oauth_token, nonce):
for sec_config in self.oauth_config.values():
if sec_config.config.username == oauth_consumer.key:
# The nonce was reused
existing_nonce = self.kvdb.has_oauth_nonce(oauth_consumer.key, nonce)
if existing_nonce:
return nonce
else:
# No such nonce so we add it to the store
self.kvdb.add_oauth_nonce(
oauth_consumer.key, nonce, sec_config.config.max_nonce_log)
def fetch_request_token(self, oauth_consumer, oauth_callback):
"""-> OAuthToken."""
raise NotImplementedError
def fetch_access_token(self, oauth_consumer, oauth_token, oauth_verifier):
"""-> OAuthToken."""
raise NotImplementedError
def authorize_request_token(self, oauth_token, user):
"""-> OAuthToken."""
raise NotImplementedError
# ################################################################################################################################
def authenticate_web_socket(self, cid, sec_def_type, auth, sec_name, vault_conn_default_auth_method,
initial_http_wsgi_environ, initial_headers=None, _basic_auth=SEC_DEF_TYPE.BASIC_AUTH, _jwt=SEC_DEF_TYPE.JWT,
_vault_sec_def_type=SEC_DEF_TYPE.VAULT,
_vault_ws=VAULT.WEB_SOCKET):
""" Authenticates a WebSocket-based connection using HTTP Basic Auth credentials.
"""
headers = initial_headers if initial_headers is not None else {}
headers['zato.ws.initial_http_wsgi_environ'] = initial_http_wsgi_environ
if sec_def_type == _basic_auth:
auth_func = self._handle_security_basic_auth
get_func = self.basic_auth_get
username = auth['username'] or 'url_data.wsx.invalid.username'
secret = auth['secret'] or 'url_data.wsx.invalid.password.{}'.format(uuid4().hex)
username = username if isinstance(username, unicode) else username.decode('utf8')
secret = secret if isinstance(secret, unicode) else secret.decode('utf8')
auth_info = '{}:{}'.format(username, secret)
auth_info = auth_info.encode('utf8')
auth = b64encode(auth_info)
headers['HTTP_AUTHORIZATION'] = 'Basic {}'.format(auth.decode('utf8'))
elif sec_def_type == _jwt:
auth_func = self._handle_security_jwt
get_func = self.jwt_get
headers['HTTP_AUTHORIZATION'] ='Bearer {}'.format(auth['secret'])
else:
raise ValueError('Unrecognized sec_def_type:`{}`'.format(sec_def_type))
return auth_func(cid, get_func(sec_name)['config'], None, None, headers, enforce_auth=False)
# ################################################################################################################################
def _handle_security_apikey(self, cid, sec_def, path_info, body, wsgi_environ, ignored_post_data=None, enforce_auth=True):
""" Performs the authentication against an API key in a specified HTTP header.
"""
# Find out if the header was provided at all
if sec_def['header'] not in wsgi_environ:
if enforce_auth:
msg = '401 Unauthorized path_info:`{}`, cid:`{}`'.format(path_info, cid)
error_msg = '401 Unauthorized'
logger.error(msg + ' (No header)')
raise Unauthorized(cid, error_msg, None)
else:
return False
expected_key = sec_def.get('password', '')
# Passwords are not required
if expected_key and wsgi_environ[sec_def['header']] != expected_key:
if enforce_auth:
msg = '401 Unauthorized path_info:`{}`, cid:`{}`'.format(path_info, cid)
error_msg = '401 Unauthorized'
logger.error(msg + ' (Password)')
raise Unauthorized(cid, error_msg, None)
else:
return False
return True
# ################################################################################################################################
def _handle_security_basic_auth(self, cid, sec_def, path_info, body, wsgi_environ, ignored_post_data=None,
enforce_auth=True):
""" Performs the authentication using HTTP Basic Auth.
"""
env = {'HTTP_AUTHORIZATION':wsgi_environ.get('HTTP_AUTHORIZATION')}
url_config = {'basic-auth-username':sec_def.username, 'basic-auth-password':sec_def.password}
result = on_basic_auth(cid, env, url_config, False)
if not result:
if enforce_auth:
msg_log = 'Unauthorized; path_info:`{}`, cid:`{}`, sec-wall code:`{}`, description:`{}`\n'.format(
path_info, cid, result.code, result.description)
msg_exc = 'Unauthorized; cid={}'.format(cid)
logger.error(msg_log)
raise Unauthorized(cid, msg_exc, 'Basic realm="{}"'.format(sec_def.realm))
else:
return False
return True
# ################################################################################################################################
def _handle_security_jwt(self, cid, sec_def, path_info, body, wsgi_environ, ignored_post_data=None, enforce_auth=True):
""" Performs the authentication using a JavaScript Web Token (JWT).
"""
authorization = wsgi_environ.get('HTTP_AUTHORIZATION')
if not authorization:
if enforce_auth:
msg = 'UNAUTHORIZED path_info:`{}`, cid:`{}`'.format(path_info, cid)
logger.error(msg)
raise Unauthorized(cid, msg, 'JWT')
else:
return False
if not authorization.startswith('Bearer '):
if enforce_auth:
msg = 'UNAUTHORIZED path_info:`{}`, cid:`{}`'.format(path_info, cid)
logger.error(msg)
raise Unauthorized(cid, msg, 'JWT')
else:
return False
token = authorization.split('Bearer ', 1)[1]
result = JWT(self.odb, self.worker.server.decrypt, self.jwt_secret).validate(
sec_def.username, token.encode('utf8'))
if not result.valid:
if enforce_auth:
msg = 'UNAUTHORIZED path_info:`{}`, cid:`{}`'.format(path_info, cid)
logger.error(msg)
raise Unauthorized(cid, msg, 'JWT')
else:
return False
return result
# ################################################################################################################################
def _handle_security_oauth(self, cid, sec_def, path_info, body, wsgi_environ, post_data, enforce_auth=True):
""" Performs the authentication using OAuth.
"""
http_url = '{}://{}{}'.format(wsgi_environ['wsgi.url_scheme'],
wsgi_environ['HTTP_HOST'], wsgi_environ['RAW_URI'])
# The underlying library needs Authorization instead of HTTP_AUTHORIZATION
http_auth_header = wsgi_environ.get('HTTP_AUTHORIZATION')
if not http_auth_header:
if enforce_auth:
msg = 'No Authorization header in wsgi_environ:[%r]'
logger.error(msg, wsgi_environ)
raise Unauthorized(cid, 'No Authorization header found', 'OAuth')
else:
return False
wsgi_environ['Authorization'] = http_auth_header
oauth_request = OAuthRequest.from_request(
wsgi_environ['REQUEST_METHOD'], http_url, wsgi_environ, post_data.copy(),
wsgi_environ['QUERY_STRING'])
if oauth_request is None:
msg = 'No sig could be built using wsgi_environ:[%r], post_data:[%r]'
logger.error(msg, wsgi_environ, post_data)
if enforce_auth:
raise Unauthorized(cid, 'No parameters to build signature found', 'OAuth')
else:
return False
try:
self._oauth_server.verify_request(oauth_request)
except Exception as e:
if enforce_auth:
msg = 'Signature verification failed, wsgi_environ:`%r`, e:`%s`, e.message:`%s`'
logger.error(msg, wsgi_environ, format_exc(e), e.message)
raise Unauthorized(cid, 'Signature verification failed', 'OAuth')
else:
return False
else:
# Store for later use, custom channels may want to inspect it later on
wsgi_environ['zato.oauth.request'] = oauth_request
return True
# ################################################################################################################################
def _handle_security_tls_channel_sec(self, cid, sec_def, ignored_path_info, ignored_body, wsgi_environ,
ignored_post_data=None, enforce_auth=True):
user_msg = 'You are not allowed to access this resource'
for header, expected_value in sec_def.value.items():
given_value = wsgi_environ.get(header)
if expected_value != given_value:
if enforce_auth:
logger.error(
'%s, header:`%s`, expected:`%s`, given:`%s` (%s)', user_msg, header, expected_value, given_value, cid)
raise Unauthorized(cid, user_msg, 'zato-tls-channel-sec')
else:
return False
return True
# ################################################################################################################################
def check_rbac_delegated_security(self, sec, cid, channel_item, path_info, payload, wsgi_environ, post_data, worker_store,
sep=MISC.SEPARATOR, plain_http=URL_TYPE.PLAIN_HTTP, _empty_client_def=tuple()): # noqa: C408
auth_result = False
http_method = wsgi_environ.get('REQUEST_METHOD')
http_method_permission_id = worker_store.rbac.http_permissions.get(http_method)
if not http_method_permission_id:
logger.error('Invalid HTTP method `%s`, cid:`%s`', http_method, cid)
raise Forbidden(cid, 'You are not allowed to access this URL\n')
for role_id, perm_id, resource_id in iterkeys(worker_store.rbac.registry._allowed):
if auth_result:
return auth_result
if perm_id == http_method_permission_id and resource_id == channel_item['service_id']:
for client_def in worker_store.rbac.role_id_to_client_def.get(role_id, _empty_client_def):
_, sec_type, sec_name = client_def.split(sep)
_sec = Bunch()
_sec.is_active = True
_sec.transport = plain_http
_sec.sec_use_rbac = False
_sec.sec_def = self.sec_config_getter[sec_type](sec_name)['config']
auth_result = self.check_security(
_sec, cid, channel_item, path_info, payload, wsgi_environ, post_data, worker_store,
enforce_auth=False)
if auth_result:
# If input sec object is a dict/Bunch-like one, it means that we have just confirmed
# credentials of the underlying security definition behind an RBAC one,
# in which case we need to overwrite the sec object's sec_def attribute and make it
# point to the one that we have just found. Otherwise, it would still point to ZATO_NONE.
if hasattr(sec, 'keys'):
sec.sec_def = _sec['sec_def']
enrich_with_sec_data(wsgi_environ, _sec.sec_def, sec_type)
break
if auth_result:
return auth_result
else:
logger.warning('None of RBAC definitions allowed request in, cid:`%s`', cid)
# We need to return 401 Unauthorized but we need to send a challenge, i.e. authentication type
# that this channel can be accessed through so we as the last resort, we invoke a hook
# service which decides what it should be. If there is no hook, we default to 'zato'.
if channel_item['url_path'] in self.rbac_auth_type_hooks:
service_name = self.rbac_auth_type_hooks[channel_item['url_path']]
response = self.worker.invoke(service_name, {'channel_item':channel_item}, serialize=False)
response = response.getvalue(serialize=False)
auth_type = response['response']['auth_type']
else:
auth_type = 'zato'
raise Unauthorized(cid, 'You are not allowed to access this resource', auth_type)
# ################################################################################################################################
def check_security(self, sec, cid, channel_item, path_info, payload, wsgi_environ, post_data, worker_store, *,
enforce_auth=True, _object_type=RATE_LIMIT.OBJECT_TYPE.SEC_DEF):
""" Authenticates and authorizes a given request. Returns None on success
"""
if sec.sec_use_rbac:
return self.check_rbac_delegated_security(
sec, cid, channel_item, path_info, payload, wsgi_environ, post_data, worker_store)
sec_def, sec_def_type = sec.sec_def, sec.sec_def['sec_type']
handler_name = '_handle_security_%s' % sec_def_type.replace('-', '_')
auth_result = getattr(self, handler_name)(cid, sec_def, path_info, payload, wsgi_environ, post_data, enforce_auth)
if not auth_result:
return False
# Ok, we now know that the credentials are valid so we can check RBAC permissions if need be.
if channel_item.get('has_rbac'):
is_allowed = worker_store.rbac.is_http_client_allowed(
'sec_def:::{}:::{}'.format(sec.sec_def['sec_type'], sec.sec_def['name']), wsgi_environ['REQUEST_METHOD'],
channel_item.service_id)
if not is_allowed:
raise Forbidden(cid, 'You are not allowed to access this URL\n')
if sec_def.get('is_rate_limit_active'):
self.worker.server.rate_limiting.check_limit(cid, _object_type, sec_def.name, wsgi_environ['zato.http.remote_addr'])
enrich_with_sec_data(wsgi_environ, sec_def, sec_def_type)
return auth_result
# ################################################################################################################################
def _update_url_sec(self, msg, sec_def_type, delete=False):
""" Updates URL security definitions that use the security configuration
of the name and type given in 'msg' so that existing definitions use
the new configuration or, optionally, deletes the URL security definition
altogether if 'delete' is True.
"""
items = list(iteritems(self.url_sec))
for target_match, url_info in items:
sec_def = url_info.get('sec_def')
if not sec_def:
if url_info.get('data_format') != 'xml':
self.logger.warn('Missing sec_def for url_info -> %s', url_info)
return
if sec_def != ZATO_NONE and sec_def.sec_type == sec_def_type:
name = msg.get('old_name') if msg.get('old_name') else msg.get('name')
if sec_def.name == name:
if delete:
del self.url_sec[target_match]
else:
for key, _ignored_new_value in msg.items():
if key in sec_def:
sec_def[key] = msg[key]
# ################################################################################################################################
def _delete_channel_data(self, sec_type, sec_name):
match_idx = ZATO_NONE
for item in self.channel_data:
if item.get('sec_type') == sec_type and item['security_name'] == sec_name:
match_idx = self.channel_data.index(item)
# No error, let's delete channel info
if match_idx != ZATO_NONE:
self.channel_data.pop(match_idx)
# ################################################################################################################################
def _update_apikey(self, name, config):
config.orig_header = config.header
update_apikey_username_to_channel(config)
self.apikey_config[name] = Bunch()
self.apikey_config[name].config = config
def apikey_get(self, name):
""" Returns the configuration of the API key of the given name.
"""
wait_for_dict_key(self.apikey_config, name)
with self.url_sec_lock:
return self.apikey_config.get(name)
def apikey_get_by_id(self, def_id):
""" Same as apikey_get but returns information by definition ID.
"""
with self.url_sec_lock:
return self._get_sec_def_by_id(self.apikey_config, def_id)
def on_broker_msg_SECURITY_APIKEY_CREATE(self, msg, *args):
""" Creates a new API key security definition.
"""
with self.url_sec_lock:
self._update_apikey(msg.name, msg)
def on_broker_msg_SECURITY_APIKEY_EDIT(self, msg, *args):
""" Updates an existing API key security definition.
"""
with self.url_sec_lock:
del self.apikey_config[msg.old_name]
self._update_apikey(msg.name, msg)
self._update_url_sec(msg, SEC_DEF_TYPE.APIKEY)
def on_broker_msg_SECURITY_APIKEY_DELETE(self, msg, *args):
""" Deletes an API key security definition.
"""
with self.url_sec_lock:
self._delete_channel_data('apikey', msg.name)
del self.apikey_config[msg.name]
self._update_url_sec(msg, SEC_DEF_TYPE.APIKEY, True)
def on_broker_msg_SECURITY_APIKEY_CHANGE_PASSWORD(self, msg, *args):
""" Changes password of an API key security definition.
"""
wait_for_dict_key(self.apikey_config, msg.name)
with self.url_sec_lock:
self.apikey_config[msg.name]['config']['password'] = msg.password
self._update_url_sec(msg, SEC_DEF_TYPE.APIKEY)
# ################################################################################################################################
def _update_aws(self, name, config):
self.aws_config[name] = Bunch()
self.aws_config[name].config = config
def aws_get(self, name):
""" Returns the configuration of the AWS security definition of the given name.
"""
wait_for_dict_key(self.aws_config, name)
with self.url_sec_lock:
return self.aws_config.get(name)
def on_broker_msg_SECURITY_AWS_CREATE(self, msg, *args):
""" Creates a new AWS security definition.
"""
with self.url_sec_lock:
self._update_aws(msg.name, msg)
def on_broker_msg_SECURITY_AWS_EDIT(self, msg, *args):
""" Updates an existing AWS security definition.
"""
with self.url_sec_lock:
del self.aws_config[msg.old_name]
self._update_aws(msg.name, msg)
def on_broker_msg_SECURITY_AWS_DELETE(self, msg, *args):
""" Deletes an AWS security definition.
"""
with self.url_sec_lock:
self._delete_channel_data('aws', msg.name)
del self.aws_config[msg.name]
def on_broker_msg_SECURITY_AWS_CHANGE_PASSWORD(self, msg, *args):
""" Changes password of an AWS security definition.
"""
wait_for_dict_key(self.aws_config, msg.name)
with self.url_sec_lock:
self.aws_config[msg.name]['config']['password'] = msg.password
# ################################################################################################################################
def _get_sec_def_by_id(self, def_type, def_id):
with self.url_sec_lock:
for item in def_type.values():
if item.config['id'] == def_id:
return item.config
# ################################################################################################################################
def _update_basic_auth(self, name, config):
self.basic_auth_config[name] = Bunch()
self.basic_auth_config[name].config = config
def basic_auth_get(self, name):
""" Returns the configuration of the HTTP Basic Auth security definition of the given name.
"""
wait_for_dict_key(self.basic_auth_config._impl, name)
with self.url_sec_lock:
return self.basic_auth_config.get(name)
def basic_auth_get_by_id(self, def_id):
""" Same as basic_auth_get but returns information by definition ID.
"""
with self.url_sec_lock:
return self._get_sec_def_by_id(self.basic_auth_config, def_id)
def on_broker_msg_SECURITY_BASIC_AUTH_CREATE(self, msg, *args):
""" Creates a new HTTP Basic Auth security definition.
"""
with self.url_sec_lock:
self._update_basic_auth(msg.name, msg)
def on_broker_msg_SECURITY_BASIC_AUTH_EDIT(self, msg, *args):
""" Updates an existing HTTP Basic Auth security definition.
"""
with self.url_sec_lock:
current_config = self.basic_auth_config[msg.old_name]
msg.password = current_config.config.password
del self.basic_auth_config[msg.old_name]
self._update_basic_auth(msg.name, msg)
self._update_url_sec(msg, SEC_DEF_TYPE.BASIC_AUTH)
def on_broker_msg_SECURITY_BASIC_AUTH_DELETE(self, msg, *args):
""" Deletes an HTTP Basic Auth security definition.
"""
with self.url_sec_lock:
self._delete_channel_data('basic_auth', msg.name)
del self.basic_auth_config[msg.name]
self._update_url_sec(msg, SEC_DEF_TYPE.BASIC_AUTH, True)
# This will delete a link from this account an SSO user,
# assuming that SSO is enabled (in which case it is not None).
if self.worker.server.sso_api:
self.worker.server.sso_api.user.on_broker_msg_SSO_LINK_AUTH_DELETE(SEC_DEF_TYPE.BASIC_AUTH, msg.id)
def on_broker_msg_SECURITY_BASIC_AUTH_CHANGE_PASSWORD(self, msg, *args):
""" Changes password of an HTTP Basic Auth security definition.
"""
wait_for_dict_key(self.basic_auth_config, msg.name)
with self.url_sec_lock:
self.basic_auth_config[msg.name]['config']['password'] = msg.password
self._update_url_sec(msg, SEC_DEF_TYPE.BASIC_AUTH)
# ################################################################################################################################
def _update_jwt(self, name, config):
self.jwt_config[name] = Bunch()
self.jwt_config[name].config = config
def jwt_get(self, name):
""" Returns configuration of a JWT security definition of the given name.
"""
wait_for_dict_key(self.jwt_config, name)
with self.url_sec_lock:
return self.jwt_config.get(name)
def jwt_get_by_id(self, def_id):
""" Same as jwt_get but returns information by definition ID.
"""
with self.url_sec_lock:
return self._get_sec_def_by_id(self.jwt_config, def_id)
def on_broker_msg_SECURITY_JWT_CREATE(self, msg, *args):
""" Creates a new JWT security definition.
"""
with self.url_sec_lock:
self._update_jwt(msg.name, msg)
def on_broker_msg_SECURITY_JWT_EDIT(self, msg, *args):
""" Updates an existing JWT security definition.
"""
with self.url_sec_lock:
del self.jwt_config[msg.old_name]
self._update_jwt(msg.name, msg)
self._update_url_sec(msg, SEC_DEF_TYPE.JWT)
def on_broker_msg_SECURITY_JWT_DELETE(self, msg, *args):
""" Deletes a JWT security definition.
"""
with self.url_sec_lock:
self._delete_channel_data('jwt', msg.name)
del self.jwt_config[msg.name]
self._update_url_sec(msg, SEC_DEF_TYPE.JWT, True)
# This will delete a link from this account an SSO user,
# assuming that SSO is enabled (in which case it is not None).
if self.worker.server.sso_api:
self.worker.server.sso_api.user.on_broker_msg_SSO_LINK_AUTH_DELETE(SEC_DEF_TYPE.JWT, msg.id)
def on_broker_msg_SECURITY_JWT_CHANGE_PASSWORD(self, msg, *args):
""" Changes password of a JWT security definition.
"""
wait_for_dict_key(self.jwt_config, msg.name)
with self.url_sec_lock:
self.jwt_config[msg.name]['config']['password'] = msg.password
self._update_url_sec(msg, SEC_DEF_TYPE.JWT)
# ################################################################################################################################
def _update_ntlm(self, name, config):
self.ntlm_config[name] = Bunch()
self.ntlm_config[name].config = config
def ntlm_get(self, name):
""" Returns the configuration of the NTLM security definition of the given name.
"""
wait_for_dict_key(self.ntlm_config, name)
with self.url_sec_lock:
return self.ntlm_config.get(name)
def on_broker_msg_SECURITY_NTLM_CREATE(self, msg, *args):
""" Creates a new NTLM security definition.
"""
with self.url_sec_lock:
self._update_ntlm(msg.name, msg)
def on_broker_msg_SECURITY_NTLM_EDIT(self, msg, *args):
""" Updates an existing NTLM security definition.
"""
with self.url_sec_lock:
current_config = self.ntlm_config[msg.old_name]
msg.password = current_config.config.password
del self.ntlm_config[msg.old_name]
self._update_ntlm(msg.name, msg)
self._update_url_sec(msg, SEC_DEF_TYPE.NTLM)
def on_broker_msg_SECURITY_NTLM_DELETE(self, msg, *args):
""" Deletes an NTLM security definition.
"""
with self.url_sec_lock:
self._delete_channel_data('ntlm', msg.name)
del self.ntlm_config[msg.name]
self._update_url_sec(msg, SEC_DEF_TYPE.NTLM, True)
def on_broker_msg_SECURITY_NTLM_CHANGE_PASSWORD(self, msg, *args):
""" Changes password of an NTLM security definition.
"""
wait_for_dict_key(self.ntlm_config, msg.name)
with self.url_sec_lock:
self.ntlm_config[msg.name]['config']['password'] = msg.password
self._update_url_sec(msg, SEC_DEF_TYPE.NTLM)
# ################################################################################################################################
def _update_oauth(self, name, config):
self.oauth_config[name] = Bunch()
self.oauth_config[name].config = config
def oauth_get(self, name):
""" Returns the configuration of the OAuth account of the given name.
"""
wait_for_dict_key(self.oauth_config, name)
with self.url_sec_lock:
return self.oauth_config.get(name)
def oauth_get_by_id(self, def_id):
""" Same as oauth_get but returns information by definition ID.
"""
with self.url_sec_lock:
return self._get_sec_def_by_id(self.oauth_config, def_id)
def on_broker_msg_SECURITY_OAUTH_CREATE(self, msg, *args):
""" Creates a new OAuth account.
"""
with self.url_sec_lock:
self._update_oauth(msg.name, msg)
def on_broker_msg_SECURITY_OAUTH_EDIT(self, msg, *args):
""" Updates an existing OAuth account.
"""
with self.url_sec_lock:
current_config = self.oauth_config[msg.old_name]
msg.password = current_config.config.password
del self.oauth_config[msg.old_name]
self._update_oauth(msg.name, msg)
self._update_url_sec(msg, SEC_DEF_TYPE.OAUTH)
def on_broker_msg_SECURITY_OAUTH_DELETE(self, msg, *args):
""" Deletes an OAuth account.
"""
with self.url_sec_lock:
self._delete_channel_data('oauth', msg.name)
del self.oauth_config[msg.name]
self._update_url_sec(msg, SEC_DEF_TYPE.OAUTH, True)
def on_broker_msg_SECURITY_OAUTH_CHANGE_PASSWORD(self, msg, *args):
""" Changes the password of an OAuth account.
"""
wait_for_dict_key(self.oauth_config, msg.name)
with self.url_sec_lock:
self.oauth_config[msg.name]['config']['password'] = msg.password
self._update_url_sec(msg, SEC_DEF_TYPE.OAUTH)
# ################################################################################################################################
def _update_tls_channel_sec(self, name, config):
self.tls_channel_sec_config[name] = Bunch()
self.tls_channel_sec_config[name].config = config
self.tls_channel_sec_config[name].config.value = dict(parse_tls_channel_security_definition(config.value))
def tls_channel_security_get(self, name):
wait_for_dict_key(self.tls_channel_sec_config, name)
with self.url_sec_lock:
return self.tls_channel_sec_config.get(name)
def on_broker_msg_SECURITY_TLS_CHANNEL_SEC_CREATE(self, msg, *args):
""" Creates a new security definition based on TLS certificates.
"""
with self.url_sec_lock:
self._update_tls_channel_sec(msg.name, msg)
def on_broker_msg_SECURITY_TLS_CHANNEL_SEC_EDIT(self, msg, *args):
""" Updates an existing security definition based on TLS certificates.
"""
with self.url_sec_lock:
del self.tls_channel_sec_config[msg.old_name]
self._update_tls_channel_sec(msg.name, msg)
self._update_url_sec(msg, SEC_DEF_TYPE.TLS_CHANNEL_SEC)
def on_broker_msg_SECURITY_TLS_CHANNEL_SEC_DELETE(self, msg, *args):
""" Deletes a security definition based on TLS certificates.
"""
with self.url_sec_lock:
del self.tls_channel_sec_config[msg.name]
self._update_url_sec(msg, SEC_DEF_TYPE.TLS_CHANNEL_SEC, True)
# ################################################################################################################################
def _update_tls_key_cert(self, name, config):
self.tls_key_cert_config[name] = Bunch()
self.tls_key_cert_config[name].config = config
# ################################################################################################################################
def tls_key_cert_get(self, name):
wait_for_dict_key(self.tls_key_cert_config, name)
with self.url_sec_lock:
return self.tls_key_cert_config.get(name)
# ################################################################################################################################
def on_broker_msg_SECURITY_TLS_KEY_CERT_CREATE(self, msg, *args):
""" Creates a new TLS key/cert security definition.
"""
with self.url_sec_lock:
self._update_tls_key_cert(msg.name, msg)
# ################################################################################################################################
def on_broker_msg_SECURITY_TLS_KEY_CERT_EDIT(self, msg, *args):
""" Updates an existing TLS key/cert security definition.
"""
with self.url_sec_lock:
del self.tls_key_cert_config[msg.old_name]
self._update_tls_key_cert(msg.name, msg)
self._update_url_sec(msg, SEC_DEF_TYPE.TLS_KEY_CERT)
# ################################################################################################################################
def on_broker_msg_SECURITY_TLS_KEY_CERT_DELETE(self, msg, *args):
""" Deletes an TLS key/cert security definition.
"""
with self.url_sec_lock:
del self.tls_key_cert_config[msg.name]
self._update_url_sec(msg, SEC_DEF_TYPE.TLS_KEY_CERT, True)
# ################################################################################################################################
def get_channel_by_name(self, name, _channel=CONNECTION.CHANNEL):
# type: (unicode, unicode) -> dict
for item in self.channel_data:
if item['connection'] == _channel:
if item['name'] == name:
return item
# ################################################################################################################################
def sort_channel_data(self):
""" Sorts channel items by name and then re-arranges the result so that user-facing services are closer to the begining
of the list which makes it faster to look them up - searches in the list are O(n).
"""
channel_data = []
user_services = []
internal_services = []
for item in self.channel_data:
if item['is_internal']:
internal_services.append(item)
else:
user_services.append(item)
user_services.sort(key=itemgetter('name'))
internal_services.sort(key=itemgetter('name')) # Internal services will never conflict in names but let's do it anyway
channel_data.extend(user_services)
channel_data.extend(internal_services)
self.channel_data[:] = channel_data
# ################################################################################################################################
def _channel_item_from_msg(self, msg, match_target, old_data=None):
""" Creates a channel info bunch out of an incoming CREATE_EDIT message.
"""
old_data = old_data or {}
channel_item = {}
for name in('connection', 'content_type', 'data_format', 'host', 'id', 'has_rbac', 'impl_name', 'is_active',
'is_internal', 'merge_url_params_req', 'method', 'name', 'params_pri', 'ping_method', 'pool_size', 'service_id',
'service_name', 'soap_action', 'soap_version', 'transport', 'url_params_pri', 'url_path', 'sec_use_rbac',
'cache_type', 'cache_id', 'cache_name', 'cache_expiry', 'content_encoding', 'match_slash', 'hl7_version',
'json_path', 'should_parse_on_input', 'should_validate', 'should_return_errors', 'data_encoding',
'is_audit_log_sent_active', 'is_audit_log_received_active', 'max_len_messages_sent', 'max_len_messages_received',
'max_bytes_per_message_sent', 'max_bytes_per_message_received', 'security_groups', 'security_groups_ctx'):
channel_item[name] = msg.get(name)
if msg.get('security_id'):
channel_item['sec_type'] = msg['sec_type']
channel_item['security_id'] = msg['security_id']
channel_item['security_name'] = msg['security_name']
if security_groups := msg.get('security_groups'):
channel_item['security_groups'] = security_groups
self.worker.server.security_groups_ctx_builder.populate_members()
security_groups_ctx = self.worker.server.security_groups_ctx_builder.build_ctx(channel_item['id'], security_groups)
channel_item['security_groups_ctx'] = security_groups_ctx
# For JSON-RPC
channel_item['service_whitelist'] = msg.get('service_whitelist', [])
channel_item['service_impl_name'] = msg['impl_name']
channel_item['match_target'] = match_target
channel_item['match_target_compiled'] = Matcher(channel_item['match_target'], channel_item['match_slash'])
# For rate limiting
for name in('is_rate_limit_active', 'rate_limit_def', 'rate_limit_type', 'rate_limit_check_parent_def'):
channel_item[name] = msg.get(name)
return channel_item
# ################################################################################################################################
def _sec_info_from_msg(self, msg):
""" Creates a security info bunch out of an incoming CREATE_EDIT message.
"""
sec_info = Bunch()
sec_info.id = msg.id
sec_info.is_active = msg.is_active
sec_info.data_format = msg.data_format
sec_info.transport = msg.transport
sec_info.sec_use_rbac = msg.sec_use_rbac
if msg.get('security_name'):
sec_info.sec_def = Bunch()
sec_config = getattr(self, '{}_config'.format(msg['sec_type']))
config_item = sec_config[msg['security_name']]
for k, _v in iteritems(config_item['config']):
sec_info.sec_def[k] = config_item['config'][k]
else:
sec_info.sec_def = ZATO_NONE
return sec_info
# ################################################################################################################################
def _set_up_audit_log(self, channel_item, is_edit):
# type: (dict, bool)
# Set up audit log if it is enabled
if channel_item.get('is_audit_log_sent_active') or channel_item.get('is_audit_log_received_active'):
self.worker.server.set_up_object_audit_log(
CHANNEL.HTTP_SOAP, channel_item['id'], channel_item, is_edit)
# ################################################################################################################################
def _create_channel(self, msg, old_data):
""" Creates a new channel, both its core data and the related security definition.
Clears out URL cache for that entry, if it existed at all.
"""
# If we are editing an object, old_data will be populated, otherwise, it is an empty dict
is_edit = bool(old_data)
match_target = get_match_target(msg, http_methods_allowed_re=self.worker.server.http_methods_allowed_re)
channel_item = self._channel_item_from_msg(msg, match_target, old_data)
self.channel_data.append(channel_item)
self.url_sec[match_target] = self._sec_info_from_msg(msg)
self._remove_from_cache(match_target)
self.sort_channel_data()
# Set up rate limiting, if it is enabled
if channel_item.get('is_rate_limit_active'):
self.worker.server.set_up_object_rate_limiting(
RATE_LIMIT.OBJECT_TYPE.HTTP_SOAP, channel_item['name'], config_=channel_item)
# Set up audit log
self._set_up_audit_log(channel_item, is_edit)
# ################################################################################################################################
def _delete_channel(self, msg):
""" Deletes a channel, both its core data and the related security definition. Clears relevant
entry in URL cache. Returns the deleted data.
"""
old_match_target = get_match_target({
'http_method': msg.get('old_http_method'),
'http_accept': msg.get('old_http_accept'),
'soap_action': msg.get('old_soap_action'),
'url_path': msg.get('old_url_path'),
}, http_methods_allowed_re=self.worker.server.http_methods_allowed_re)
# Delete from URL cache
self._remove_from_cache(old_match_target)
# In case of an internal error, we won't have the match all
match_idx = ZATO_NONE
for item in self.channel_data:
if item['match_target'] == old_match_target:
match_idx = self.channel_data.index(item)
# No error, let's delete channel info
if match_idx != ZATO_NONE:
old_data = self.channel_data.pop(match_idx)
else:
old_data = {}
# Channel's security now
del self.url_sec[old_match_target]
# Re-sort all elements to match against
self.sort_channel_data()
# Delete rate limiting configuration
self.worker.server.delete_object_rate_limiting(RATE_LIMIT.OBJECT_TYPE.HTTP_SOAP, msg.name)
# Delete audit log configuration
self.worker.server.audit_log.delete_container(CHANNEL.HTTP_SOAP, msg.id)
return old_data
# ################################################################################################################################
def on_broker_msg_CHANNEL_HTTP_SOAP_CREATE_EDIT(self, msg, *args):
""" Creates or updates an HTTP/SOAP channel.
"""
with self.url_sec_lock:
# Only edits have 'old_name', creates don't. So for edits we delete
# the channel and later recreate it while create actions do not have anything to delete.
if msg.get('old_name'):
old_data = self._delete_channel(msg)
else:
old_data = {}
self._create_channel(msg, old_data)
def on_broker_msg_CHANNEL_HTTP_SOAP_DELETE(self, msg, *args):
""" Deletes an HTTP channel.
"""
with self.url_sec_lock:
self._delete_channel(msg)
# ################################################################################################################################
def on_broker_msg_MSG_JSON_POINTER_EDIT(self, msg):
pass
def on_broker_msg_MSG_JSON_POINTER_DELETE(self, msg):
pass
# ################################################################################################################################
def on_broker_msg_SECURITY_TLS_CA_CERT_CREATE(self, msg):
# Ignored, does nothing.
pass
on_broker_msg_SECURITY_TLS_CA_CERT_DELETE = on_broker_msg_SECURITY_TLS_CA_CERT_EDIT = on_broker_msg_SECURITY_TLS_CA_CERT_CREATE
# ################################################################################################################################
# ################################################################################################################################
| 49,240
|
Python
|
.py
| 872
| 46.170872
| 131
| 0.546794
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,106
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/http_soap/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Zato
from zato.common import exception
# Backward compatibility - in 3.0 these exceptions were moved from here to zato.common.exception
ClientHTTPError = exception.ClientHTTPError
BadRequest = exception.BadRequest
Conflict = exception.Conflict
Forbidden = exception.Forbidden
MethodNotAllowed = exception.MethodNotAllowed
NotFound = exception.NotFound
Unauthorized = exception.Unauthorized
TooManyRequests = exception.TooManyRequests
| 677
|
Python
|
.py
| 17
| 38.470588
| 96
| 0.819572
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,107
|
outgoing.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/http_soap/outgoing.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from copy import deepcopy
from datetime import datetime
from http.client import OK
from io import StringIO
from logging import DEBUG, getLogger
from traceback import format_exc
from urllib.parse import urlencode
# gevent
from gevent.lock import RLock
# requests
from requests import Response as _RequestsResponse
from requests.adapters import HTTPAdapter
from requests.exceptions import Timeout as RequestsTimeout
from requests.sessions import Session as RequestsSession
# requests-ntlm
from requests_ntlm import HttpNtlmAuth
# requests-toolbelt
from requests_toolbelt import MultipartEncoder
# Zato
from zato.common.api import ContentType, CONTENT_TYPE, DATA_FORMAT, NotGiven, SEC_DEF_TYPE, URL_TYPE
from zato.common.exception import Inactive, TimeoutException
from zato.common.json_ import dumps, loads
from zato.common.marshal_.api import extract_model_class, is_list, Model
from zato.common.typing_ import cast_
from zato.common.util.api import get_component_name
from zato.common.util.config import extract_param_placeholders
from zato.common.util.open_ import open_rb
from zato.server.connection.queue import ConnectionQueue
# ################################################################################################################################
# ################################################################################################################################
if 0:
from sqlalchemy.orm.session import Session as SASession
from zato.common.bearer_token import BearerTokenInfoResult
from zato.common.typing_ import any_, callnone, dictnone, list_, stranydict, strdictnone, strstrdict, type_
from zato.server.base.parallel import ParallelServer
from zato.server.config import ConfigDict
ConfigDict = ConfigDict
ParallelServer = ParallelServer
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato_rest')
has_debug = logger.isEnabledFor(DEBUG)
# ################################################################################################################################
# ################################################################################################################################
soapenv11_namespace = 'http://schemas.xmlsoap.org/soap/envelope/'
soapenv12_namespace = 'http://www.w3.org/2003/05/soap-envelope'
# ################################################################################################################################
# ################################################################################################################################
_API_Key = SEC_DEF_TYPE.APIKEY
_Basic_Auth = SEC_DEF_TYPE.BASIC_AUTH
_NTLM = SEC_DEF_TYPE.NTLM
_OAuth = SEC_DEF_TYPE.OAUTH
_TLS_Key_Cert = SEC_DEF_TYPE.TLS_KEY_CERT
_WSS = SEC_DEF_TYPE.WSS
# ################################################################################################################################
# ################################################################################################################################
class Response(_RequestsResponse):
data: 'strdictnone'
# ################################################################################################################################
# ################################################################################################################################
class HTTPSAdapter(HTTPAdapter):
""" An adapter which exposes a method for clearing out the underlying pool. Useful with HTTPS as it allows to update TLS
material on the fly.
"""
def clear_pool(self):
self.poolmanager.clear()
# ################################################################################################################################
# ################################################################################################################################
class BaseHTTPSOAPWrapper:
""" Base class for HTTP/SOAP connection wrappers.
"""
def __init__(
self,
config, # type: stranydict
_requests_session=None, # type: SASession | None
server=None # type: ParallelServer | None
) -> 'None':
self.config = config
self.config['timeout'] = float(self.config['timeout']) if self.config['timeout'] else 0
self.config_no_sensitive = deepcopy(self.config)
self.config_no_sensitive['password'] = '***'
self.RequestsSession = RequestsSession or _requests_session
self.server = cast_('ParallelServer', server)
self.session = RequestsSession()
self.https_adapter = HTTPSAdapter()
self.session.mount('https://', self.https_adapter)
self._component_name = get_component_name()
self.default_content_type = self.get_default_content_type()
self.address = ''
self.path_params = []
self.base_headers = {}
self.sec_type = self.config['sec_type']
self.soap = {}
self.soap['1.1'] = {}
self.soap['1.1']['content_type'] = 'text/xml; charset=utf-8'
self.soap['1.1']['message'] = """<?xml version="1.0" encoding="utf-8"?>
<s11:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:s11="%s">
{header}
<s11:Body>{data}</s11:Body>
</s11:Envelope>""" % (soapenv11_namespace,)
self.soap['1.1']['header_template'] = """<s11:Header xmlns:wsse="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd" >
<wsse:Security>
<wsse:UsernameToken>
<wsse:Username>{Username}</wsse:Username>
<wsse:Password Type="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordText">{Password}</wsse:Password>
</wsse:UsernameToken>
</wsse:Security>
</s11:Header>
"""
self.soap['1.2'] = {}
self.soap['1.2']['content_type'] = 'application/soap+xml; charset=utf-8'
self.soap['1.2']['message'] = """<?xml version="1.0" encoding="utf-8"?>
<s12:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:s12="%s">{header}
<s12:Body>{data}</s12:Body>
</s12:Envelope>""" % (soapenv12_namespace,)
self.soap['1.2']['header_template'] = """<s12:Header xmlns:wsse="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd" >
<wsse:Security>
<wsse:UsernameToken>
<wsse:Username>{Username}</wsse:Username>
<wsse:Password Type="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordText">{Password}</wsse:Password>
</wsse:UsernameToken>
</wsse:Security>
</s12:Header>
"""
self.set_address_data()
self.set_auth()
# ################################################################################################################################
def set_auth(self) -> 'None':
# Local variables
self.requests_auth = None
self.username = None
# #######################################
#
# API Keys
#
# #######################################
if self.sec_type == _API_Key:
username = self.config.get('orig_username')
if not username:
username = self.config['username']
self.base_headers[username] = self.config['password']
# #######################################
#
# HTTP Basic Auth
#
# #######################################
elif self.sec_type in {_Basic_Auth}:
self.requests_auth = self.auth
self.username = self.requests_auth[0]
# #######################################
#
# NTLM
#
# #######################################
elif self.sec_type == _NTLM:
_username, _password = self.auth
_requests_auth = HttpNtlmAuth(_username, _password)
self.requests_auth = _requests_auth
self.username = _username
# #######################################
#
# WS-Security
#
# #######################################
elif self.sec_type == _WSS:
self.soap[self.config['soap_version']]['header'] = \
self.soap[self.config['soap_version']]['header_template'].format(
Username=self.config['username'], Password=self.config['password'])
# ################################################################################################################################
def _get_auth(self) -> 'any_':
""" Returns a username and password pair or None, if no security definition has been attached.
"""
if self.sec_type in {_Basic_Auth, _NTLM}:
auth = (self.config['username'], self.config['password'])
else:
auth = None
return auth
auth = property(fget=_get_auth, doc=_get_auth.__doc__)
# ################################################################################################################################
def invoke_http(
self,
cid:'str',
method:'str',
address:'str',
data:'str',
headers:'strstrdict',
hooks:'any_',
*args:'any_',
**kwargs:'any_'
) -> '_RequestsResponse':
# Local variables
params = kwargs.get('params')
json = kwargs.pop('json', None)
cert = self.config['tls_key_cert_full_path'] if self.sec_type == _TLS_Key_Cert else None
if ('ZATO_SKIP_TLS_VERIFY' in os.environ) or ('Zato_Skip_TLS_Verify' in os.environ):
tls_verify = False
else:
tls_verify = self.config.get('tls_verify', True)
tls_verify = tls_verify if isinstance(tls_verify, bool) else tls_verify.encode('utf-8')
# This is optional and, if not given, we will use the security configuration from self.config
sec_def_name = kwargs.pop('sec_def_name', NotGiven)
# If we have a security definition name on input, it must be a Bearer token (OAuth)
if sec_def_name is not NotGiven:
_sec_type = _OAuth
else:
sec_def_name = self.config['security_name']
_sec_type = self.sec_type
# Force type hints
sec_def_name = cast_('str', sec_def_name)
# Reusable
is_bearer_token = _sec_type == _OAuth
# OAuth scopes can be provided on input even if we do not have a Bearer token definition attached,
# which is why we .pop them here, to make sure they do not propagate to the requests library.
scopes = kwargs.pop('auth_scopes', '')
try:
# Bearer tokens are obtained dynamically ..
if is_bearer_token:
# .. this is reusable ..
sec_def = self.server.security_facade.get_bearer_token_by_name(sec_def_name)
# .. each OAuth definition will use a specific data format ..
data_format = sec_def['data_format']
# .. otherwise, we can check if they are provided in the security definition itself ..
if not scopes:
scopes = sec_def.get('scopes') or ''
scopes = scopes.splitlines()
scopes = ' '.join(scopes)
# .. get a Bearer token ..
result = self._get_bearer_token_auth(sec_def_name, scopes, data_format)
# .. populate headers ..
headers['Authorization'] = f'Bearer {result.info.token}'
# .. this is needed for later use ..
token_expires_in_sec = result.cache_expiry
token_is_cache_hit = result.is_cache_hit
token_cache_hits = result.cache_hits
# This is needed by request
auth = None
# .. we enter here if this is not a Bearer token definition ..
else:
# .. otherwise, the credentials will have been already obtained
# .. but note that Suds connections don't have requests_auth, hence the getattr call ..
auth = getattr(self, 'requests_auth', None)
# .. we have no token to report about.
token_expires_in_sec = None
token_is_cache_hit = None
token_cache_hits = None
# .. basic details about what we are sending what we are sending ..
msg = f'REST out → cid={cid}; {method} {address}; name:{self.config["name"]}; params={params}; len={len(data)}' + \
f'; sec={sec_def_name} ({_sec_type})'
# .. optionally, log details of the Bearer token ..
if is_bearer_token:
msg += f'; expiry={token_expires_in_sec}; tok-from-cache={token_is_cache_hit}; tok-cache-hits={token_cache_hits}'
# .. log the information about our request ..
logger.info(msg)
# .. do send it ..
response = self.session.request(
method, address, data=data, json=json, auth=auth, headers=headers, hooks=hooks,
cert=cert, verify=tls_verify, timeout=self.config['timeout'], *args, **kwargs)
# .. log what we received ..
msg = f'REST out ← cid={cid}; {response.status_code} time={response.elapsed}; len={len(response.text)}'
logger.info(msg)
# .. and return it.
return response
except RequestsTimeout:
raise TimeoutException(cid, format_exc())
# ################################################################################################################################
def _get_bearer_token_auth(self, sec_def_name:'str', scopes:'str', data_format:'str') -> 'BearerTokenInfoResult':
# This will get the token from cache or from the remote auth. server ..
result = self.server.bearer_token_manager.get_bearer_token_info_by_sec_def_name(sec_def_name, scopes, data_format)
# .. which we can return to our caller.
return result
# ################################################################################################################################
def ping(self, cid:'str', return_response:'bool'=False, log_verbose:'bool'=False, *, ping_path:'str'='/') -> 'any_':
""" Pings a given HTTP/SOAP resource
"""
logger.info('Pinging:`%s`', self.config_no_sensitive)
# Session object will write some info to it ..
verbose = StringIO()
start = datetime.utcnow()
ping_method = self.config['ping_method'] or 'HEAD'
def zato_pre_request_hook(hook_data:'stranydict', *args:'any_', **kwargs:'any_') -> 'None':
entry = '{} (UTC)\n{} {}\n'.format(datetime.utcnow().isoformat(),
ping_method, hook_data['request'].url)
_ = verbose.write(entry)
# .. potential wrapper paths must be replaced ..
ping_path = ping_path or '/'
address = self.address.replace(r'{_zato_path}', ping_path)
# .. invoke the other end ..
response = self.invoke_http(cid, ping_method, address, '', self._create_headers(cid, {}),
{'zato_pre_request':zato_pre_request_hook})
# .. store additional info, get and close the stream.
_ = verbose.write('Code: {}'.format(response.status_code))
_ = verbose.write('\nResponse time: {}'.format(datetime.utcnow() - start))
value = verbose.getvalue()
verbose.close()
if log_verbose:
func = logger.info if response.status_code == OK else logger.warning
func(value)
return response if return_response else value
# ################################################################################################################################
def get_default_content_type(self) -> 'str':
if self.config['content_type']:
return self.config['content_type']
# For requests other than SOAP, set content type only if we know the data format
if self.config['data_format']:
# Not SOAP
if self.config['transport'] == URL_TYPE.PLAIN_HTTP:
# JSON
return CONTENT_TYPE.JSON # type: ignore
# SOAP
elif self.config['transport'] == URL_TYPE.SOAP:
# SOAP 1.1
if self.config['soap_version'] == '1.1':
return CONTENT_TYPE.SOAP11 # type: ignore
# SOAP 1.2
else:
return CONTENT_TYPE.SOAP12 # type: ignore
# If we are here, assume it is regular text by default
return 'text/plain'
# ################################################################################################################################
def _create_headers(self, cid:'str', user_headers:'strstrdict', now:'str'='') -> 'strstrdict':
headers = deepcopy(self.base_headers)
headers.update({
'X-Zato-CID': cid,
'X-Zato-Component': self._component_name,
'X-Zato-Msg-TS': now or datetime.utcnow().isoformat(),
})
if self.config.get('transport') == URL_TYPE.SOAP:
headers['SOAPAction'] = self.config.get('soap_action')
content_type = user_headers.pop('Content-Type', self.default_content_type)
if content_type:
headers['Content-Type'] = content_type
headers.update(user_headers)
return headers
# ################################################################################################################################
def set_address_data(self) -> 'None':
"""Sets the full address to invoke and parses input URL's configuration,
to extract any named parameters that will have to be passed in by users
during actual calls to the resource.
"""
# Set the full adddress ..
self.address = '{}{}'.format(self.config['address_host'], self.config['address_url_path'])
# .. and parse out placeholders for path parameters.
for param_name in extract_param_placeholders(self.config['address_url_path']):
self.path_params.append(param_name[1:-1])
# ################################################################################################################################
# ################################################################################################################################
class HTTPSOAPWrapper(BaseHTTPSOAPWrapper):
""" A thin wrapper around the API exposed by the 'requests' package.
"""
def __init__(
self,
server, # type: ParallelServer
config, # type: stranydict
requests_module=None # type: any_
) -> 'None':
super(HTTPSOAPWrapper, self).__init__(config, requests_module, server)
self.server = server
# ################################################################################################################################
def __str__(self) -> 'str':
return '<{} at {}, config:[{}]>'.format(self.__class__.__name__, hex(id(self)), self.config_no_sensitive)
__repr__ = __str__
# ################################################################################################################################
def format_address(self, cid:'str', params:'stranydict') -> 'tuple[str, stranydict]':
""" Formats a URL path to an external resource. Note that exceptions raised
do not contain anything except for CID. This is in order to keep any potentially
sensitive data from leaking to clients.
"""
if not params:
logger.warning('CID:`%s` No parameters given for URL path:`%r`', cid, self.config['address_url_path'])
raise ValueError('CID:`{}` No parameters given for URL path'.format(cid))
path_params = {}
try:
for name in self.path_params: # type: ignore
path_params[name] = params.pop(name)
return (self.address.format(**path_params), dict(params))
except(KeyError, ValueError):
logger.warning('CID:`%s` Could not build URL address `%r` path:`%r` with params:`%r`, e:`%s`',
cid, self.address, self.config['address_url_path'], params, format_exc())
raise ValueError('CID:`{}` Could not build URL path'.format(cid))
# ################################################################################################################################
def _impl(self) -> 'RequestsSession':
""" Returns the self.session object through which access to HTTP/SOAP resources is provided.
"""
return self.session
impl = property(fget=_impl, doc=_impl.__doc__)
# ################################################################################################################################
def _enforce_is_active(self) -> 'None':
if not self.config['is_active']:
raise Inactive(self.config['name'])
# ################################################################################################################################
def _soap_data(self, data:'str | bytes', headers:'stranydict') -> 'tuple[any_, stranydict]':
""" Wraps the data in a SOAP-specific messages and adds the headers required.
"""
needs_soap_wrapper = False
soap_config:'strstrdict' = self.soap[self.config['soap_version']]
# The idea here is that even though there usually won't be the Content-Type
# header provided by the user, we shouldn't overwrite it if one has been
# actually passed in.
if not headers.get('Content-Type'):
headers['Content-Type'] = soap_config['content_type']
# We do not need an envelope if the data already has one ..
if isinstance(data, bytes):
if b':Envelope' in data:
return data, headers # type: ignore
else:
needs_soap_wrapper = True
else:
if ':Envelope' in data:
return data, headers # type: ignore
else:
needs_soap_wrapper = True
if needs_soap_wrapper:
return soap_config['message'].format(header='', data=data), headers
else:
return data, headers
# ################################################################################################################################
def http_request(
self,
method:'str',
cid:'str',
data:'any_'='',
params:'dictnone'=None,
*args:'any_',
**kwargs:'any_'
) -> 'Response':
# First, make sure that the connection is active
self._enforce_is_active()
# Local variables
_is_soap = self.config['transport'] == 'soap'
# Pop it here for later use because we cannot pass it to the requests module
model = kwargs.pop('model', None)
# We do not serialize ourselves data based on this content type,
# leaving it up to the underlying HTTP library to do it ..
needs_serialize_based_on_content_type = self.config.get('content_type') != ContentType.FormURLEncoded
# .. otherwise, our input data may need to be serialized ..
if needs_serialize_based_on_content_type:
# .. but we never serialize string objects,
# .. assuming they already represent what ought to be sent as-is ..
needs_request_serialize = not isinstance(data, str)
# .. if we are here, we know check further if serialization is required ..
if needs_request_serialize:
# .. we are explicitly told to send JSON ..
if self.config['data_format'] == DATA_FORMAT.JSON:
# .. models need to be converted to dicts before they can be serialized ..
if isinstance(data, Model):
data = data.to_dict()
# .. do serialize to JSON now ..
data = dumps(data)
# .. we are explicitly told to submit form-like data ..
elif self.config['data_format'] == DATA_FORMAT.FORM_DATA:
data = urlencode(data)
# .. check if we have custom headers on input ..
headers = kwargs.pop('headers', None) or {}
# .. build a default set of headers now ..
headers = self._create_headers(cid, headers)
# .. SOAP requests need to be specifically formatted now ..
if _is_soap:
data, headers = self._soap_data(data, headers)
# .. check if we have custom query parameters ..
params = params or {}
# .. if the address is a template, format it with input parameters ..
if self.path_params:
address, qs_params = self.format_address(cid, params) # type: ignore
else:
address, qs_params = self.address, dict(params)
# .. make sure that Unicode objects are turned into bytes ..
if needs_serialize_based_on_content_type and (not _is_soap):
if isinstance(data, str):
data = data.encode('utf-8')
# .. do invoke the connection ..
response = self.invoke_http(cid, method, address, data, headers, {}, params=qs_params, *args, **kwargs)
# .. by default, we have no parsed response at all, ..
# .. which means that we can assume it will be the same as the raw, text response ..
response.data = response.text # type: ignore
# .. check if we are explicitly told that we handle JSON ..
_has_data_format_json = self.config['data_format'] == DATA_FORMAT.JSON
# .. check if we perhaps received JSON in the response ..
_has_json_content_type = 'application/json' in (response.headers.get('Content-Type') or '') # type: ignore
# .. are we actually handling JSON in this response .. ?
_is_json:'bool' = _has_data_format_json or _has_json_content_type # type: ignore
# .. if yes, try to parse the response accordingly ..
if _is_json:
try:
response.data = loads(response.text or '""') # type: ignore
except ValueError as e:
raise Exception('Could not parse JSON response `{}`; e:`{}`'.format(response.text, e.args[0]))
# .. if we have a model class on input, deserialize the received response into one ..
if model:
response.data = self.server.marshal_api.from_dict(None, response.data, model) # type: ignore
# .. now, return the response to the caller.
return cast_('Response', response)
# ################################################################################################################################
def get(self, cid:'str', params:'dictnone'=None, *args:'any_', **kwargs:'any_') -> 'Response':
return self.http_request('GET', cid, '', params, *args, **kwargs)
def delete(self, cid:'str', data:'any_'='', params:'dictnone'=None, *args:'any_', **kwargs:'any_') -> 'Response':
return self.http_request('DELETE', cid, data, params, *args, **kwargs)
def options(self, cid:'str', data:'any_'='', params:'dictnone'=None, *args:'any_', **kwargs:'any_') -> 'Response':
return self.http_request('OPTIONS', cid, data, params, *args, **kwargs)
def post(self, cid:'str', data:'any_'='', params:'dictnone'=None, *args:'any_', **kwargs:'any_') -> 'Response':
return self.http_request('POST', cid, data, params, *args, **kwargs)
send = post
def put(self, cid:'str', data:'str'='', params:'dictnone'=None, *args:'any_', **kwargs:'any_') -> 'Response':
return self.http_request('PUT', cid, data, params, *args, **kwargs)
def patch(self, cid:'str', data:'str'='', params:'dictnone'=None, *args:'any_', **kwargs:'any_') -> 'Response':
return self.http_request('PATCH', cid, data, params, *args, **kwargs)
def upload(
self,
cid, # type: str
item, # type: str
field_name = 'data', # type: str
mime_type = 'text/plain' # type: str
) -> 'Response':
# Make sure such a file exists
if not os.path.exists(item):
raise Exception('File to upload not found -> `%s`', item)
# Ensure that the path actually is a file
if not os.path.isfile(item):
raise Exception('Path is not a file -> `%s`', item)
# Extract the file
file_name = os.path.basename(item)
# At this point, we have collected everything needed to upload the file and we can proceed
with open_rb(item) as file_to_upload:
# Build a list of fields to be encoded as a multi-part upload
fields = {
field_name: (file_name, file_to_upload, mime_type)
}
# .. this is the object that builds a multi-part message out of the file ..
encoder = MultipartEncoder(fields=fields)
# .. build user headers based on what the encoder produced ..
headers = {
'Content-Type': encoder.content_type
}
# .. now, we can invoke the remote endpoint with our file on input.
return self.post(cid, data=encoder, headers=headers)
# ################################################################################################################################
def rest_call(
self,
*,
cid, # type: str
data='', # type: ignore
model=None, # type: type_[Model] | None
callback, # type: callnone
params=None, # type: strdictnone
headers=None, # type: strdictnone
method='', # type: str
sec_def_name=None, # type: any_
auth_scopes=None, # type: any_
log_response=True, # type: bool
) -> 'any_':
# Invoke the system ..
try:
response:'Response' = self.http_request(
method,
cid,
data=data,
sec_def_name=sec_def_name,
auth_scopes=auth_scopes,
params=params,
headers=headers,
)
except Exception as e:
logger.warn('Caught an exception -> %s -> %s', e, format_exc())
else:
# .. optionally, log what we received ..
if log_response:
logger.info('REST call response received -> %s', response.text)
if not response.ok:
raise Exception(response.text)
# .. extract the underlying data ..
response_data = response.data # type: ignore
# .. if we have a model, do make use of it here ..
if model:
# .. if this model is actually a list ..
if is_list(model, True): # type: ignore
# .. extract the underlying model ..
model_class:'type_[Model]' = extract_model_class(model) # type: ignore
# .. build a list that we will map the response to ..
data:'list_[Model]' = [] # type: ignore
# .. go through everything we had in the response ..
for item in response_data: # type: ignore
# .. build an actual model instance ..
_item = model_class.from_dict(item)
# .. and append it to the data that we are producing ..
data.append(_item) # type: ignore
else:
data:'Model' = model.from_dict(response_data)
# .. if there is no model, use the response as-is ..
else:
data = response_data # type: ignore
# .. run our callback, if there is any ..
if callback:
data = callback(data, cid=cid, id=id, model=model, callback=callback)
# .. and return the data to our caller ..
return data
RESTWrapper = HTTPSOAPWrapper
# ################################################################################################################################
# ################################################################################################################################
class SudsSOAPWrapper(BaseHTTPSOAPWrapper):
""" A thin wrapper around the suds SOAP library
"""
def __init__(self, config:'stranydict') -> 'None':
super(SudsSOAPWrapper, self).__init__(config)
self.set_auth()
self.update_lock = RLock()
self.config = config
self.config['timeout'] = float(self.config['timeout'])
self.config_no_sensitive = deepcopy(self.config)
self.config_no_sensitive['password'] = '***'
self.address = '{}{}'.format(self.config['address_host'], self.config['address_url_path'])
self.conn_type = 'Suds SOAP'
self.client = ConnectionQueue(
self.server,
self.config['is_active'],
self.config['pool_size'],
self.config['queue_build_cap'],
self.config['id'],
self.config['name'],
self.conn_type,
self.address,
self.add_client
)
# ################################################################################################################################
def set_auth(self) -> 'None':
""" Configures the security for requests, if any is to be configured at all.
"""
self.suds_auth = {'username':self.config['username'], 'password':self.config['password']}
# ################################################################################################################################
def add_client(self) -> 'None':
logger.info('About to add a client to `%s` (%s)', self.address, self.conn_type)
try:
# Lazy-imported here to make sure gevent monkey patches everything well in advance
from suds.client import Client
from suds.transport.https import HttpAuthenticated
# from suds.transport.https import WindowsHttpAuthenticated
from suds.wsse import Security, UsernameToken
client = None
transport = None
if self.sec_type == _Basic_Auth:
transport = HttpAuthenticated(**self.suds_auth)
elif self.sec_type == _NTLM:
# Suds
from suds.transport.http import HttpTransport
class WindowsHttpAuthenticated(HttpAuthenticated):
def u2handlers(self):
from ntlm3 import HTTPNtlmAuthHandler
handlers = HttpTransport.u2handlers(self)
handlers.append(HTTPNtlmAuthHandler.HTTPNtlmAuthHandler(self.pm))
return handlers
transport = WindowsHttpAuthenticated(**self.suds_auth)
elif self.sec_type == _WSS:
security = Security()
token = UsernameToken(self.suds_auth['username'], self.suds_auth['password'])
security.tokens.append(token)
client = Client(self.address, autoblend=True, wsse=security)
if self.sec_type in {_Basic_Auth, _NTLM}:
client = Client(self.address, autoblend=True, transport=transport)
# Still could be either none at all or WSS
if not self.sec_type:
client = Client(self.address, autoblend=True, timeout=self.config['timeout'])
if client:
_ = self.client.put_client(client)
else:
logger.warning('SOAP client to `%s` is None', self.address)
except Exception:
logger.warning('Error while adding a SOAP client to `%s` (%s) e:`%s`', self.address, self.conn_type, format_exc())
# ################################################################################################################################
def build_client_queue(self) -> 'None':
with self.update_lock:
self.client.build_queue()
# ################################################################################################################################
# ################################################################################################################################
| 36,951
|
Python
|
.py
| 678
| 44.390855
| 158
| 0.505144
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,108
|
object_.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/kvdb/object_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
# Zato
from zato.server.connection.kvdb.core import BaseRepo
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, anylist
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
class ObjectRepo(BaseRepo):
""" Stores arbitrary objects as key/value pairs, in RAM only, without backing persistent storage.
"""
def __init__(
self,
name='<ObjectRepo-name>', # type: str
data_path='<ObjectRepo-data_path>' # type: str
) -> 'None':
super().__init__(name, data_path)
# ################################################################################################################################
def _get(self, object_id:'str', default:'any_'=None, raise_if_not_found:'bool'=False) -> 'any_':
value = self.in_ram_store.get(object_id)
if value:
return value
else:
if raise_if_not_found:
raise KeyError('Object not found `{}`'.format(object_id))
# ################################################################################################################################
def _set(self, object_id:'str', value:'any_') -> 'None':
# type: (object, object) -> None
self.in_ram_store[object_id] = value
self.post_modify_state()
# ################################################################################################################################
def _delete(self, object_id:'str') -> 'None':
# type: (str) -> None
self.in_ram_store.pop(object_id, None)
# ################################################################################################################################
def _remove_all(self) -> 'None':
self.in_ram_store[:] = []
# ################################################################################################################################
def _get_size(self) -> 'int':
# type: () -> int
return len(self.in_ram_store)
# ################################################################################################################################
def _get_many(self, object_id_list:'anylist', add_object_id_key:'bool'=True) -> 'anylist':
# type: (list) -> list
out = {}
for object_id in object_id_list: # type: str
value = self.in_ram_store.get(object_id)
if value:
value['object_id'] = object_id
out[object_id] = value
return out
# ################################################################################################################################
# ################################################################################################################################
| 3,702
|
Python
|
.py
| 63
| 52.714286
| 130
| 0.289787
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,109
|
api.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/kvdb/api.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.server.connection.kvdb.core import ObjectCtx, KVDB
from zato.server.connection.kvdb.list_ import ListRepo
from zato.server.connection.kvdb.number import IntData, NumberRepo
# For flake8
IntData = IntData
KVDB = KVDB # type: ignore
ListRepo = ListRepo
NumberRepo = NumberRepo
ObjectCtx = ObjectCtx
| 470
|
Python
|
.py
| 15
| 30.066667
| 66
| 0.784922
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,110
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/kvdb/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,111
|
number.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/kvdb/number.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import sys
from datetime import datetime
from logging import getLogger
from operator import add as op_add, gt as op_gt, lt as op_lt, sub as op_sub
# Zato
from zato.common.api import StatsKey
from zato.common.typing_ import dataclass
from zato.server.connection.kvdb.core import BaseRepo
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, anydict, callable_, callnone
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
utcnow = datetime.utcnow
_stats_key_current_value = StatsKey.CurrentValue
_stats_key_per_key_min = StatsKey.PerKeyMin
_stats_key_per_key_max = StatsKey.PerKeyMax
_stats_key_per_key_mean = StatsKey.PerKeyMean
_stats_key_per_key_value = StatsKey.PerKeyValue
_stats_key_per_key_last_timestamp = StatsKey.PerKeyLastTimestamp
_stats_key_per_key_last_duration = StatsKey.PerKeyLastDuration
max_value = sys.maxsize
# ################################################################################################################################
# ################################################################################################################################
@dataclass
class IntData:
value: int
timestamp: str
last_duration: int
# ################################################################################################################################
# ################################################################################################################################
class NumberRepo(BaseRepo):
""" Stores integer counters for string labels.
"""
def __init__(
self,
name, # type: str
data_path, # type: str
sync_threshold=120_000, # type: int
sync_interval=120_000, # type: int
max_value=max_value, # type: int
allow_negative=True # type: bool
) -> 'None':
super().__init__(name, data_path, sync_threshold, sync_interval)
# We will never allow for a value to be greater than that
self.max_value = max_value
# If False, value will never be less than zero
self.allow_negative = allow_negative
# Main in-RAM database of objects
self.in_ram_store = {
_stats_key_current_value: {},
} # type: anydict
self.current_value = self.in_ram_store[_stats_key_current_value] # type: anydict
# ################################################################################################################################
def _change_value(
self,
value_op, # type: callable_
cmp_op, # type: callable_
value_limit, # type: int
key, # type: any_
change_by, # type: int
value_limit_condition=None, # type: callnone
default_value=0 # type: int
) -> 'int':
# Get current value ..
current_data = self.current_value.get(key) # type: any_
# .. or set a default to 0, if nothing is found ..
if not current_data:
# .. zero out all the counters ..
current_data = {
_stats_key_per_key_value: default_value,
_stats_key_per_key_last_timestamp: utcnow().isoformat(),
_stats_key_per_key_last_duration: None,
_stats_key_per_key_min: None,
_stats_key_per_key_max: None,
_stats_key_per_key_mean: None,
}
# .. and assign them to our key ..
self.current_value[key] = current_data
# .. get the new value ..
current_data[_stats_key_per_key_value] = value_op(current_data[_stats_key_per_key_value], change_by)
# .. does the new value exceed the limit? ..
is_limit_exceeded = cmp_op(current_data[_stats_key_per_key_value], value_limit)
# .. we may have a condition function that tells us whether to allow the new value beyond the limit ..
if value_limit_condition and value_limit_condition():
# Do nothing because we already have the new value
# and we merely confirmed that it should not be changed
# due to its reaching a limit.
pass
# .. otherwise, without such a function, we do not allow it ..
else:
if is_limit_exceeded:
current_data[_stats_key_per_key_value] = value_limit
# .. update the last used time as well ..
current_data[_stats_key_per_key_last_timestamp] = utcnow().isoformat()
# .. store the new value in RAM ..
self.current_value[key] = current_data
# .. update metadata ..
self.post_modify_state()
# .. finally, return the value set.
return current_data[_stats_key_per_key_value]
# ################################################################################################################################
def _is_negative_allowed(self) -> 'bool':
return self.allow_negative
# ################################################################################################################################
def _incr(self, key:'str', change_by:'int'=1) -> 'int':
value_op = op_add
cmp_op = op_gt
value_limit = self.max_value
return self._change_value(value_op, cmp_op, value_limit, key, change_by)
# ################################################################################################################################
def _decr(self, key:'str', change_by:'int'=1) -> 'int':
value_op = op_sub
cmp_op = op_lt
value_limit = 0
return self._change_value(value_op, cmp_op, value_limit, key, change_by, self._is_negative_allowed)
# ################################################################################################################################
def _get(self, key:'str') -> 'anydict':
return self.current_value.get(key) # type: ignore
# ################################################################################################################################
def _remove_all(self) -> 'None':
self.current_value.clear()
# ################################################################################################################################
def _clear(self):
# type: () -> None
for key in self.in_ram_store: # type: str
self.in_ram_store[key] = 0
# ################################################################################################################################
def set_last_duration(self, key:'str', current_duration:'float') -> 'None':
# Numpy
import numpy as np
with self.update_lock:
per_key_dict = self.current_value[key]
previous_duration = per_key_dict[_stats_key_per_key_last_duration]
if previous_duration:
to_compare = [previous_duration, current_duration]
new_min = min(to_compare)
new_max = max(to_compare)
new_mean = np.mean(to_compare)
else:
new_min = current_duration
new_max = current_duration
new_mean = current_duration
# We need to check the exact class here instead of using isinstance(new_mean, float)
# because numpy.float64 is a subclass of float. It is good when the mean
# is used in computations but when it comes to JSON serialisation it really
# needs to be a float rather than np.float64. That is why here we turn float64 into a real float.
uses_numpy = new_mean.__class__ is np.float64
new_mean = new_mean.item() if uses_numpy else new_mean # type: ignore
per_key_dict[_stats_key_per_key_last_duration] = current_duration
per_key_dict[_stats_key_per_key_min] = new_min
per_key_dict[_stats_key_per_key_max] = new_max
per_key_dict[_stats_key_per_key_mean] = new_mean
# ################################################################################################################################
# ################################################################################################################################
| 9,169
|
Python
|
.py
| 165
| 47.606061
| 130
| 0.433717
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,112
|
core.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/kvdb/core.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import os
from logging import getLogger
# gevent
from gevent.lock import RLock
# orjson
from orjson import dumps as json_dumps
# Zato
from zato.common.api import ZatoKVDB
from zato.common.in_ram import InRAMStore
from zato.common.ext.dataclasses import dataclass
from zato.common.util.json_ import json_loads
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, anylist, stranydict, strnone
from zato.server.connection.kvdb.list_ import ListRepo
from zato.server.connection.kvdb.number import NumberRepo
from zato.server.connection.kvdb.object_ import ObjectRepo
ListRepo = ListRepo
NumberRepo = NumberRepo
ObjectRepo = ObjectRepo
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class ObjectCtx:
# A unique identifer assigned to this event by Zato
id: 'str'
# A correlation ID assigned by Zato - multiple events may have the same CID
cid: 'strnone' = None
# Timestamp of this event, as assigned by Zato
timestamp: 'strnone' = None
# The actual business data
data: 'any_' = None
# ################################################################################################################################
# ################################################################################################################################
class BaseRepo(InRAMStore):
def __init__(
self,
name, # type: str
data_path, # type: str
sync_threshold=ZatoKVDB.DefaultSyncThreshold, # type: int
sync_interval=ZatoKVDB.DefaultSyncInterval # type: int
) -> 'None':
super().__init__(sync_threshold, sync_interval)
# In-RAM database of objects
self.in_ram_store = {}
# Used to synchronise updates
self.lock = RLock()
# Our user-visible name
self.name = name
# Where we persist data on disk
self.data_path = data_path
# ################################################################################################################################
def _append(self, *args:'any_', **kwargs:'any_') -> 'ObjectCtx':
raise NotImplementedError('BaseRepo._append')
def _get(self, *args:'any_', **kwargs:'any_') -> 'ObjectCtx':
raise NotImplementedError('BaseRepo._get')
def _set(self, *args:'any_', **kwargs:'any_') -> 'None':
raise NotImplementedError('BaseRepo._set')
def _get_list(self, *args:'any_', **kwargs:'any_') -> 'list[ObjectCtx]':
raise NotImplementedError('BaseRepo._get_list')
def _delete(self, *args:'any_', **kwargs:'any_') -> 'list[ObjectCtx]':
raise NotImplementedError('BaseRepo._delete')
def _remove_all(self, *args:'any_', **kwargs:'any_') -> 'None':
raise NotImplementedError('BaseRepo._remove_all')
def _clear(self, *args:'any_', **kwargs:'any_') -> 'None':
raise NotImplementedError('BaseRepo._clear')
def _get_size(self, *args:'any_', **kwargs:'any_') -> 'int':
raise NotImplementedError('BaseRepo._get_size')
def _incr(self, *args:'any_', **kwargs:'any_') -> 'int':
raise NotImplementedError('BaseRepo._incr')
def _decr(self, *args:'any_', **kwargs:'any_') -> 'int':
raise NotImplementedError('BaseRepo._decr')
# ################################################################################################################################
def append(self, *args:'any_', **kwargs:'any_'):
with self.update_lock:
return self._append(*args, **kwargs)
# ################################################################################################################################
def get(self, *args:'any_', **kwargs:'any_'):
with self.update_lock:
return self._get(*args, **kwargs)
# ################################################################################################################################
def set(self, *args:'any_', **kwargs:'any_'):
with self.update_lock:
return self._set(*args, **kwargs)
# ################################################################################################################################
def get_list(self, *args:'any_', **kwargs:'any_'):
with self.update_lock:
return self._get_list(*args, **kwargs)
# ################################################################################################################################
def delete(self, *args:'any_', **kwargs:'any_'):
with self.update_lock:
return self._delete(*args, **kwargs)
# ################################################################################################################################
def remove_all(self, *args:'any_', **kwargs:'any_'):
with self.update_lock:
return self._remove_all(*args, **kwargs)
# ################################################################################################################################
def clear(self, *args:'any_', **kwargs:'any_'):
with self.update_lock:
return self._clear(*args, **kwargs)
# ################################################################################################################################
def get_size(self, *args:'any_', **kwargs:'any_'):
with self.update_lock:
return self._get_size(*args, **kwargs)
# ################################################################################################################################
def incr(self, key, *args:'any_', **kwargs:'any_'):
lock = self.get_lock(key)
with lock:
return self._incr(key, *args, **kwargs)
# ################################################################################################################################
def decr(self, key, *args:'any_', **kwargs:'any_'):
lock = self.get_lock(key)
with lock:
return self._decr(key, *args, **kwargs)
# ################################################################################################################################
def _loads(self, data:'bytes') -> 'None':
try:
data_ = json_loads(data) # type: dict
except Exception as e:
logger.info('KVDB load error (%s -> %s) -> %s', self.name, self.data_path, e)
else:
if data_:
# We may have already some pre-defined keys in RAM that we only need to update ..
if self.in_ram_store:
for key, value in data_.items():
self.in_ram_store[key].update(value)
# .. otherwise, we load all the data as is because we assume know there are no keys in RAM yet.
self.in_ram_store.update(data_)
# ################################################################################################################################
def loads(self, data:'bytes') -> 'None':
with self.update_lock:
return self._loads(data)
# ################################################################################################################################
def load_data(self) -> 'None':
with self.update_lock:
if os.path.exists(self.data_path):
with open(self.data_path, 'rb') as f:
data = f.read()
if data:
self._loads(data)
else:
logger.info('Skipping repo data path `%s` (%s)', self.data_path, self.name)
# ################################################################################################################################
def _dumps(self):
# type: () -> bytes
return json_dumps(self.in_ram_store)
# ################################################################################################################################
def dumps(self):
# type: () -> bytes
with self.update_lock:
return self._dumps()
# ################################################################################################################################
def save_data(self) -> 'None':
with self.update_lock:
with open(self.data_path, 'wb') as f:
data = self._dumps()
f.write(data)
# ################################################################################################################################
def set_data_path(self, data_path:'str') -> 'None':
self.data_path = data_path
# ################################################################################################################################
def sync_state(self) -> 'None':
self.save_data()
# ################################################################################################################################
def _get_many(self, object_id_list, add_object_id_key=True):
# type: (list) -> list
out = {}
for object_id in object_id_list: # type: str
value = self.in_ram_store.get(object_id)
if value:
value['object_id'] = object_id
out[object_id] = value
return out
# ################################################################################################################################
def get_many(self, *args:'any_', **kwargs:'any_') -> 'anylist':
with self.update_lock:
return self._get_many(*args, **kwargs)
# ################################################################################################################################
# ################################################################################################################################
class KVDB:
""" Manages KVDB repositories.
"""
def __init__(self):
# Maps str -> repository objects
self.repo = {} # type: stranydict
# ################################################################################################################################
def internal_create_list_repo(
self,
repo_name, # type: str
data_path='', # type: str
max_size=1000, # type: int
page_size=50 # type: int
) -> 'ListRepo':
# Zato
from zato.server.connection.kvdb.list_ import ListRepo
repo = ListRepo(repo_name, data_path, max_size, page_size)
return self.repo.setdefault(repo_name, repo)
# ################################################################################################################################
def internal_create_number_repo(
self,
repo_name, # type: str
data_path='', # type: str
max_size=1000, # type: int
page_size=50 # type: int
) -> 'NumberRepo':
# Zato
from zato.server.connection.kvdb.number import NumberRepo
repo = NumberRepo(repo_name, data_path, max_size, page_size)
return self.repo.setdefault(repo_name, repo)
# ################################################################################################################################
def internal_create_object_repo(
self,
repo_name, # type: str
data_path='' # type: str
) -> 'ObjectRepo':
# Zato
from zato.server.connection.kvdb.object_ import ObjectRepo
repo = ObjectRepo(repo_name, data_path)
return self.repo.setdefault(repo_name, repo)
# ################################################################################################################################
def get(self, repo_name:'str') -> 'any_':
return self.repo.get(repo_name)
# ################################################################################################################################
def append(self, repo_name:'str', ctx:'ObjectCtx') -> 'None':
repo = self.repo[repo_name] # type: ListRepo
repo.append(ctx)
# ################################################################################################################################
def get_object(self, repo_name:'str', object_id:'str') -> 'ObjectCtx':
repo = self.repo[repo_name] # type: ListRepo
return repo.get(object_id)
# ################################################################################################################################
def get_list(self, repo_name:'str', cur_page:'int'=1, page_size:'int'=50) -> 'anylist':
repo = self.repo[repo_name] # type: ListRepo
return repo.get_list(cur_page, page_size)
# ################################################################################################################################
def delete(self, repo_name:'str', object_id:'str') -> 'any_':
repo = self.repo[repo_name] # type: ListRepo
return repo.delete(object_id)
# ################################################################################################################################
def remove_all(self, repo_name:'str') -> 'None':
repo = self.repo[repo_name] # type: ListRepo
repo.remove_all()
# ################################################################################################################################
def get_size(self, repo_name:'str') -> 'int':
repo = self.repo[repo_name] # type: ListRepo
return repo.get_size()
# ################################################################################################################################
# ################################################################################################################################
| 14,657
|
Python
|
.py
| 258
| 49.503876
| 130
| 0.360442
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,113
|
list_.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/kvdb/list_.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
# gevent
from gevent.lock import RLock
# Zato
from zato.common.util.search import SearchResults
from zato.server.connection.kvdb.core import BaseRepo, ObjectCtx
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
class ListRepo(BaseRepo):
""" Stores arbitrary objects, as a list, in RAM only, without backing persistent storage.
"""
def __init__(
self,
name='<ListRepo-name>', # type: str
data_path='<ListRepo-data_path>', # type: str
max_size=1000, # type: int
page_size=50 # type: int
) -> 'None':
super().__init__(name, data_path)
# How many objects we will keep at most
self.max_size = max_size
# How many objects to return at most in list responses
self.page_size = page_size
# In-RAM database of objects
self.in_ram_store = [] # type: list[ObjectCtx]
# Used to synchronise updates
self.lock = RLock()
# ################################################################################################################################
def _append(self, ctx:'ObjectCtx') -> 'ObjectCtx':
# Push new data ..
self.in_ram_store.append(ctx)
# .. and ensure our max_size is not exceeded ..
if len(self.in_ram_store) > self.max_size:
# .. we maintain a FIFO list, deleting the oldest entriest first.
del self.in_ram_store[self.max_size:]
return ctx
# ################################################################################################################################
def _get(self, object_id:'str') -> 'any_':
for item in self.in_ram_store: # type: ObjectCtx
if item.id == object_id:
return item
else:
raise KeyError('Object not found `{}`'.format(object_id))
# ################################################################################################################################
def _get_list(self, cur_page:'int'=1, page_size:'int'=50) -> 'dict':
search_results = SearchResults.from_list(self.in_ram_store, cur_page, page_size)
return search_results.to_dict()
# ################################################################################################################################
def _delete(self, object_id:'str') -> 'any_':
for item in self.in_ram_store: # type: ObjectCtx
if item.id == object_id:
self.in_ram_store.remove(item)
return item
# ################################################################################################################################
def _remove_all(self) -> 'None':
self.in_ram_store[:] = []
# ################################################################################################################################
def _get_size(self) -> 'int':
return len(self.in_ram_store)
# ################################################################################################################################
# ################################################################################################################################
| 4,220
|
Python
|
.py
| 74
| 50.689189
| 130
| 0.33982
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,114
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/zmq_/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,115
|
outgoing.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/zmq_/outgoing.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import logging
# Zato
from zato.common.api import NO_DEFAULT_VALUE
from zato.server.store import BaseAPI, BaseStore
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class ZMQFacade:
""" A ZeroMQ message API for services.
"""
def __init__(self, zmq_out_api):
self.zmq_out_api = zmq_out_api
# ################################################################################################################################
def __getitem__(self, name):
return self.zmq_out_api.connectors[name]
# ################################################################################################################################
def send(self, msg, out_name, *args, **kwargs):
""" Preserved for backwards-compatibility with Zato < 3.0.
"""
if self.zmq_out_api == NO_DEFAULT_VALUE:
raise ValueError('ZeroMQ connections are disabled - ensure that component_enabled.zeromq in server.conf is True')
conn = self.zmq_out_api.connectors[out_name]
conn.send(msg, *args, **kwargs)
# ################################################################################################################################
def conn(self):
""" Returns self. Added to make the facade look like other outgoing
connection wrappers.
"""
return self
# ################################################################################################################################
# ################################################################################################################################
class ZMQAPI(BaseAPI):
""" API to obtain ZeroMQ connections through.
"""
# ################################################################################################################################
# ################################################################################################################################
class ZMQConnStore(BaseStore):
""" Stores outgoing connections to ZeroMQ.
"""
def create_impl(self, config, config_no_sensitive):
pass
# ################################################################################################################################
# ################################################################################################################################
| 3,008
|
Python
|
.py
| 51
| 54.588235
| 130
| 0.316076
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,116
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/jms_wmq/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,117
|
outgoing.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/jms_wmq/outgoing.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Zato
from zato.common.exception import ConnectorClosedException, IBMMQException
# ################################################################################################################################
if 0:
from zato.server.service import Service
Service = Service
# ################################################################################################################################
class WMQFacade:
""" A IBM MQ facade for services so they aren't aware that sending WMQ
messages actually requires us to use the Zato broker underneath.
"""
# ################################################################################################################################
def __init__(self, service):
# Current service on whose behalf we execute
self.service = service # type: Service
# ################################################################################################################################
def send(self, msg, outconn_name, queue_name, correlation_id='', msg_id='', reply_to='', expiration=None, priority=None,
delivery_mode=None, raise_on_error=True):
""" Puts a message on an IBM MQ MQ queue.
"""
try:
return self.service.server.connector_ibm_mq.send_wmq_message({
'data': msg,
'outconn_name': outconn_name,
'queue_name': queue_name,
'correlation_id': correlation_id,
'msg_id': msg_id,
'reply_to': reply_to,
'expiration': expiration,
'priority': priority,
'delivery_mode': delivery_mode,
})
except ConnectorClosedException as e:
msg = 'IBM MQ connector is unavailable ({} -> {}); `{}'.format(outconn_name, queue_name, e.inner_exc.args[0])
if raise_on_error:
raise IBMMQException(msg)
else:
self.service.logger.info(msg)
# ################################################################################################################################
def conn(self):
""" Returns self. Added to make the facade look like other outgoing connection wrappers.
"""
return self
# ################################################################################################################################
| 2,648
|
Python
|
.py
| 50
| 44.68
| 130
| 0.427188
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,118
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/jms_wmq/jms/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# JMS constants
DELIVERY_MODE_NON_PERSISTENT = 1
DELIVERY_MODE_PERSISTENT = 2
RECEIVE_TIMEOUT_INDEFINITE_WAIT = 0
RECEIVE_TIMEOUT_NO_WAIT = -1
DEFAULT_DELIVERY_MODE = DELIVERY_MODE_PERSISTENT
DEFAULT_TIME_TO_LIVE = 0
class BaseException(Exception):
""" Base class for all JMS-related exceptions.
"""
class NoMessageAvailableException(BaseException):
""" Raised when the jms_template's call to receive returned no message
in the expected wait interval.
"""
class WebSphereMQException(BaseException):
""" Class for exceptions related to WebSphereMQ only.
"""
def __init__(self, message=None, completion_code=None, reason_code=None):
BaseException.__init__(self, message)
self.message = message
self.completion_code = completion_code
self.reason_code = reason_code
| 987
|
Python
|
.py
| 27
| 32.851852
| 77
| 0.729758
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,119
|
core.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/jms_wmq/jms/core.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
Copyright 2006-2008 SpringSource (http://springsource.com), All Rights Reserved
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# stdlib
import logging
import locale
from base64 import b64encode
from binascii import hexlify
from string import Template
from io import StringIO
# Python 2/3 compatibility
from zato.common.py23_.past.builtins import basestring, unicode
from zato.common.py23_ import pickle_dumps
# Zato
from zato.server.connection.jms_wmq.jms import DEFAULT_DELIVERY_MODE, BaseException
# ################################################################################################################################
logger_zato = logging.getLogger('zato')
# ################################################################################################################################
# These attributes have special meaning to connection factories.
reserved_attributes = {
'text', 'jms_correlation_id', 'jms_delivery_mode', 'jms_destination', 'jms_expiration',
'jms_message_id', 'jms_priority', 'jms_redelivered', 'jms_reply_to', 'jms_timestamp', 'max_chars_printed',
'JMS_IBM_Report_Exception', 'JMS_IBM_Report_Expiration', 'JMS_IBM_Report_COA', 'JMS_IBM_Report_COD', 'JMS_IBM_Report_PAN',
'JMS_IBM_Report_NAN', 'JMS_IBM_Report_Pass_Msg_ID', 'JMS_IBM_Report_Pass_Correl_ID', 'JMS_IBM_Report_Discard_Msg',
'JMSXGroupID', 'JMSXGroupSeq', 'JMS_IBM_Feedback', 'JMS_IBM_Last_Msg_In_Group', 'JMSXUserID', 'JMS_IBM_PutTime',
'JMS_IBM_PutDate', 'JMSXAppID'
}
# ################################################################################################################################
# Magic methods are also forbidden.
reserved_attributes.update(set(dir(object) + ['__weakref__', '__dict__', '__module__']))
# ################################################################################################################################
text_message_template = """
JMS message class: jms_text
jms_delivery_mode: $jms_delivery_mode
jms_expiration: $jms_expiration
jms_priority: $jms_priority
jms_message_id: $jms_message_id
jms_timestamp: $jms_timestamp
jms_correlation_id: $jms_correlation_id
jms_destination: $jms_destination
jms_reply_to: $jms_reply_to
jms_redelivered: $jms_redelivered
""".lstrip()
# ################################################################################################################################
class MessageConverter:
def to_message(self, object_to_be_converted_to_a_message):
raise NotImplementedError('Should be implemented by subclasses')
def from_message(self, message_to_be_converted_to_an_object):
raise NotImplementedError('Should be implemented by subclasses')
# ################################################################################################################################
class JMSTemplate:
def __init__(self, factory=None, delivery_persistent=None, priority=None, time_to_live=None, message_converter=None,
default_destination=None):
self.factory = factory
# QoS
self.delivery_persistent = delivery_persistent
self.priority = priority
self.time_to_live = time_to_live
self.message_converter = message_converter
self.default_destination = default_destination
def convert_and_send(self, object_, destination=None):
if not self.message_converter:
raise BaseException("Couldn't send the message, no message converter set")
self.send(self.message_converter.to_message(object_), destination)
# ################################################################################################################################
def send(self, message, destination=None):
if isinstance(message, basestring):
message = TextMessage(message)
if destination:
dest = destination
elif self.default_destination:
dest = self.default_destination
else:
raise BaseException('No destination given and no default destination set')
message.jms_destination = dest
self.factory.send(message, dest)
# ################################################################################################################################
def receive(self, destination=None, timeout=1000):
if destination:
dest = destination
elif self.default_destination:
dest = self.default_destination
else:
raise BaseException('No destination given and no default destination set')
return self.factory.receive(dest, timeout)
# ################################################################################################################################
def receive_and_convert(self, destination=None, timeout=1000):
if not self.message_converter:
raise BaseException("Couldn't receive a message, no message converter set")
return self.message_converter.from_message(self.receive(destination, timeout))
# ################################################################################################################################
def open_dynamic_queue(self):
return self.factory.open_dynamic_queue()
# ################################################################################################################################
def close_dynamic_queue(self, dynamic_queue_name):
self.factory.close_dynamic_queue(dynamic_queue_name)
# ################################################################################################################################
class TextMessage:
def __init__(self, text=None, jms_correlation_id=None, jms_delivery_mode=None, jms_destination=None, jms_expiration=None,
jms_message_id=None, jms_priority=None, jms_redelivered=None, jms_reply_to=None, jms_timestamp=None,
max_chars_printed=100):
self.text = text
self.jms_correlation_id = jms_correlation_id
self.jms_delivery_mode = jms_delivery_mode or DEFAULT_DELIVERY_MODE
self.jms_destination = jms_destination
self.jms_expiration = jms_expiration
self.jms_message_id = jms_message_id
self.jms_priority = jms_priority
self.jms_redelivered = jms_redelivered
self.jms_reply_to = jms_reply_to
self.jms_timestamp = jms_timestamp
self.max_chars_printed = max_chars_printed
self.put_date = None
self.put_time = None
self.mqmd = None
# ################################################################################################################################
def _get_basic_data(self):
return {
'delivery_mode': self.jms_delivery_mode,
'expiration':self.jms_expiration,
'priority':self.jms_priority,
'msg_id':hexlify(self.jms_message_id),
'put_date':self.put_date,
'put_time':self.put_time,
'correlation_id':hexlify(self.jms_correlation_id),
'destination':self.jms_destination,
'reply_to':self.jms_reply_to,
'redelivered':self.jms_redelivered,
'mqmd': b64encode(pickle_dumps(self.mqmd)).decode('utf8')
}
# ################################################################################################################################
def to_dict(self, include_text=True):
data = self._get_basic_data()
data['text'] = self.text if include_text else None
for k, v in data.items():
if isinstance(v, bytes):
data[k] = v.decode('utf8')
return data
# ################################################################################################################################
def __str__(self):
basic_data = self._get_basic_data()
buff = StringIO()
buff.write(Template(text_message_template).safe_substitute(basic_data))
user_attrs = set(dir(self)) - reserved_attributes
user_attrs = list(user_attrs)
user_attrs.sort()
if user_attrs:
for user_attr in user_attrs:
user_attr_value = getattr(self, user_attr)
if isinstance(user_attr_value, unicode):
user_attr_value = user_attr_value.encode('utf-8')
buff.write(' %s:%s\n' % (user_attr, user_attr_value))
if self.text is not None:
text_to_show = self.text[:self.max_chars_printed]
if isinstance(text_to_show, unicode):
text_to_show = text_to_show.encode('utf-8')
buff.write(text_to_show)
if len(text_to_show) < len(self.text):
omitted = locale.format_string('%d', (len(self.text) - len(text_to_show)), True)
buff.write('\nAnother ')
buff.write(omitted)
buff.write(' character(s) omitted')
else:
buff.write('<None>')
value = buff.getvalue()
buff.close()
return value
# ################################################################################################################################
| 9,902
|
Python
|
.py
| 182
| 47.016484
| 130
| 0.523775
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,120
|
connection.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/jms_wmq/jms/connection.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# pylint: disable=attribute-defined-outside-init
"""
Copyright 2006-2008 SpringSource (http://springsource.com), All Rights Reserved
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# stdlib
from io import BytesIO
from logging import DEBUG, getLogger
from struct import pack, unpack
from xml.sax.saxutils import escape
from time import time, mktime, strptime, altzone
from threading import RLock
from traceback import format_exc
import xml.etree.ElementTree as etree
# Python 2/3 compatibility
from zato.common.ext.future.utils import iteritems
from zato.common.py23_.past.builtins import basestring, long, unicode
# Zato
from zato.common.util.wmq import unhexlify_wmq_id
from zato.server.connection.jms_wmq.jms.core import reserved_attributes, TextMessage
from zato.server.connection.jms_wmq.jms import BaseException, WebSphereMQException, NoMessageAvailableException, \
DELIVERY_MODE_NON_PERSISTENT, DELIVERY_MODE_PERSISTENT
# ################################################################################################################################
logger = getLogger('zato_ibm_mq')
# ################################################################################################################################
# Some WMQ constants are not exposed by pymqi.
_WMQ_MQRFH_VERSION_2 = b'\x00\x00\x00\x02'
_WMQ_DEFAULT_ENCODING = 273
_WMQ_DEFAULT_ENCODING_WIRE_FORMAT = pack('!l', _WMQ_DEFAULT_ENCODING)
# 1208 = UTF-8
_WMQ_DEFAULT_CCSID = 1208
_WMQ_DEFAULT_CCSID_WIRE_FORMAT = pack('!l', _WMQ_DEFAULT_CCSID)
# From cmqc.h
_WMQ_MQFMT_RF_HEADER_2 = b'MQHRF2 '
# MQRFH_NO_FLAGS_WIRE is in cmqc.h
_WMQ_MQRFH_NO_FLAGS_WIRE_FORMAT = b'\x00\x00\x00\x00'
# Java documentation says '214748364.7 seconds'.
_WMQ_MAX_EXPIRY_TIME = 214748364.7
_WMQ_ID_PREFIX = 'ID:'
# In current implementation, an mcd JMS folder is constant for every message
# sent, so let's build it here.
_mcd = etree.Element('mcd')
_msd = etree.Element('Msd')
_mcd.append(_msd)
# For now, it's always a TextMessage
_msd.text = 'jms_text'
_msgbody = etree.Element('msgbody')
_msgbody.set('xmlns:xsi', 'dummy') # We're using a dummy namespace
_msgbody.set('xsi:nil', 'true')
_mcd.append(_msgbody)
# Clean up namespace.
del(_msd, _msgbody)
# ################################################################################################################################
class WebSphereMQConnection:
def __init__(self, _ignored_logger, queue_manager=None, channel=None, host=None, port=None, username=None,
password=None, cache_open_send_queues=True, cache_open_receive_queues=True, use_shared_connections=True,
dynamic_queue_template='SYSTEM.DEFAULT.MODEL.QUEUE', ssl=False, ssl_cipher_spec=None, ssl_key_repository=None,
needs_mcd=True, needs_jms=False, max_chars_printed=100):
# TCP-level settings
self.queue_manager = queue_manager.encode('utf8') or ''
self.channel = channel.encode('utf8')
self.host = host
self.port = port
# Credentials
self.username = str(username) if username is not None else username
self.password = str(password) if password is not None else password
self.use_shared_connections = use_shared_connections
self.dynamic_queue_template = dynamic_queue_template
# SSL support
self.ssl = ssl
self.ssl_cipher_spec = ssl_cipher_spec
self.ssl_key_repository = ssl_key_repository
# WMQ >= 7.0 must not use the mcd folder
self.needs_mcd = needs_mcd
# Whether we expect to both send and receive JMS messages or not
self.needs_jms = needs_jms
# Use by channels for this connection
self.max_chars_printed = max_chars_printed
from pymqi import CMQC
import pymqi
self.CMQC = CMQC
self.mq = pymqi
self._open_send_queues_cache = {}
self._open_receive_queues_cache = {}
self._open_dynamic_queues_cache = {}
self.cache_open_send_queues = cache_open_send_queues
self.cache_open_receive_queues = cache_open_receive_queues
self.is_connected = False
self.is_reconnecting = False
self._disconnecting = False
self.lock = RLock()
self.has_debug = logger.isEnabledFor(DEBUG)
# ################################################################################################################################
def get_config(self):
return {
'queue_manager': self.queue_manager,
'channel': self.channel,
'host': self.host,
'port': self.port,
'username': self.username,
'password': self.password,
'cache_open_send_queues': self.cache_open_send_queues,
'cache_open_receive_queues': self.cache_open_receive_queues,
'use_shared_connections': self.use_shared_connections,
'dynamic_queue_template': self.dynamic_queue_template,
'ssl': self.ssl,
'ssl_cipher_spec': self.ssl_cipher_spec,
'ssl_key_repository': self.ssl_key_repository,
'needs_mcd': self.needs_mcd,
'needs_jms': self.needs_jms,
'max_chars_printed': self.max_chars_printed
}
# ################################################################################################################################
def close(self):
with self.lock:
if self.is_connected:
self._disconnecting = True
try:
logger.info('Deleting queues from caches')
self._open_send_queues_cache.clear()
self._open_receive_queues_cache.clear()
self._open_dynamic_queues_cache.clear()
logger.info('Caches cleared')
except Exception:
try:
logger.error('Could not clear caches, e:`%s`' % format_exc())
except Exception:
pass
try:
logger.info('Disconnecting from queue manager `%s`' % self.queue_manager)
self.mgr.disconnect()
logger.info('Disconnected from queue manager `%s`' % self.queue_manager)
except Exception:
try:
msg = 'Could not disconnect from queue manager `%s`, e:`%s`'
logger.error(msg % (self.queue_manager, format_exc()))
except Exception:
pass
self.is_connected = False
else:
logger.debug('Not connected, skipping cleaning up the resources')
# ################################################################################################################################
def get_connection_info(self):
return 'queue manager:`%s`, channel:`%s`, conn_name:`%s(%s)`' % (self.queue_manager, self.channel, self.host, self.port)
# ################################################################################################################################
def ping(self):
""" Pings a remote queue manager by making sure that current connection exists.
"""
self.connect()
# ################################################################################################################################
def reconnect(self):
with self.lock:
if self.is_reconnecting:
return
else:
self.is_reconnecting = True
try:
self.close()
self.connect()
except Exception:
raise
finally:
if self.is_reconnecting:
self.is_reconnecting = False
# ################################################################################################################################
def connect(self):
with self.lock:
if self.is_connected:
return
self._disconnecting = False
conn_name = '%s(%s)' % (self.host, self.port)
logger.info('Connecting to queue manager:`%s`, channel:`%s`' ', connection info:`%s`' % (
self.queue_manager, self.channel, conn_name))
self.mgr = self.mq.QueueManager(None)
kwargs = {}
cd = self.mq.cd()
cd.ChannelName = self.channel
cd.ConnectionName = conn_name.encode('utf8')
cd.ChannelType = self.CMQC.MQCHT_CLNTCONN
cd.TransportType = self.CMQC.MQXPT_TCP
if self.ssl:
if not(self.ssl_cipher_spec and self.ssl_key_repository):
msg = 'SSL support requires setting both ssl_cipher_spec and ssl_key_repository'
logger.error(msg)
raise BaseException(msg)
kwargs['sco'] = self.mq.sco()
kwargs['sco'].KeyRepository = self.ssl_key_repository
cd.SSLCipherSpec = self.ssl_cipher_spec
if self.use_shared_connections:
connect_options = self.CMQC.MQCNO_HANDLE_SHARE_BLOCK
else:
connect_options = self.CMQC.MQCNO_HANDLE_SHARE_NONE
try:
self.mgr.connect_with_options(self.queue_manager, cd=cd, opts=connect_options, user=self.username,
password=self.password, **kwargs)
except self.mq.MQMIError as e:
exc = WebSphereMQException(e, e.comp, e.reason)
raise exc
else:
self.is_connected = True
logger.info('Successfully connected to queue manager:`%s`, channel:`%s`, connection info:`%s`' % (
self.queue_manager, self.channel, conn_name))
# ################################################################################################################################
def _get_queue_from_cache(self, destination, cache, open_options=None):
if not open_options:
open_options = self.CMQC.MQOO_INPUT_SHARED | self.CMQC.MQOO_OUTPUT
with self.lock:
# Will usually choose this path and find the queue here.
if destination in cache:
return cache[destination]
else:
if self.has_debug:
logger.debug('Adding queue:`%s` to cache' % destination)
cache[destination] = self.mq.Queue(self.mgr, destination, open_options)
if self.has_debug:
logger.debug('Queue `%s` added to cache' % destination)
logger.debug('Cache contents `%s`' % cache)
return cache[destination]
# ################################################################################################################################
def get_queue_for_sending(self, destination):
if self.cache_open_send_queues:
queue = self._get_queue_from_cache(
destination, self._open_send_queues_cache, self.CMQC.MQOO_OUTPUT)
else:
queue = self.mq.Queue(self.mgr, destination, self.CMQC.MQOO_OUTPUT)
return queue
# ################################################################################################################################
def get_queue_for_receiving(self, destination, open_options=None):
if not open_options:
open_options = self.CMQC.MQOO_INPUT_SHARED
if self.cache_open_receive_queues:
queue = self._get_queue_from_cache(destination, self._open_receive_queues_cache, open_options)
else:
queue = self.mq.Queue(self.mgr, destination, open_options)
return queue
# ################################################################################################################################
def send(self, message, destination):
if self._disconnecting:
logger.info('Connection factory disconnecting, aborting receive')
return
else:
if self.has_debug:
logger.debug('send -> not disconnecting')
if not self.is_connected:
if self.has_debug:
logger.debug('send -> _is_connected1 %s' % self.is_connected)
self.connect()
if self.has_debug:
logger.debug('send -> _is_connected2 %s' % self.is_connected)
if not isinstance(destination, unicode):
destination = destination.decode('utf8')
destination = self._strip_prefixes_from_destination(destination)
destination = destination.encode('utf8')
# Will consist of an MQRFH2 header and the actual business payload.
buff = BytesIO()
# Build the message descriptor (MQMD)
md = self._build_md(message)
now = long(time() * 1000)
# Create MQRFH2 header, if requested to
if self.needs_jms:
mqrfh2jms = MQRFH2JMS(self.needs_mcd, self.has_debug).build_header(message, destination, self.CMQC, now)
buff.write(mqrfh2jms)
if message.text is not None:
buff.write(message.text.encode('utf8') if isinstance(message.text, unicode) else message.text)
body = buff.getvalue()
buff.close()
queue = self.get_queue_for_sending(destination)
try:
queue.put(body, md)
except self.mq.MQMIError as e:
logger.error('MQMIError in queue.put, comp:`%s`, reason:`%s`' % (e.comp, e.reason))
exc = WebSphereMQException(e, e.comp, e.reason)
raise exc
if not self.cache_open_send_queues:
queue.close()
# Map the JMS headers overwritten by calling queue.put
message.jms_message_id = md.MsgId
message.jms_priority = md.Priority
message.jms_correlation_id = md.CorrelId
message.JMSXUserID = md.UserIdentifier
message.JMSXAppID = md.PutApplName
if md.PutDate and md.PutTime:
message.jms_timestamp = self._get_jms_timestamp_from_md(md.PutDate.strip(), md.PutTime.strip())
message.JMS_IBM_PutDate = md.PutDate.strip()
message.JMS_IBM_PutTime = md.PutTime.strip()
else:
logger.warning('No md.PutDate and md.PutTime found, md:`%r`' % repr(md))
# queue.put has succeeded, so overwrite expiration time as well
if message.jms_expiration:
message.jms_expiration += now
if self.has_debug:
logger.debug('Successfully sent a message `%s`, connection info `%s`' % (message, self.get_connection_info()))
logger.debug('message:`%s`, body:`%r`, md:`%r`' % (message, body, repr(md)))
# ################################################################################################################################
def receive(self, destination, wait_interval, _connection_closing='zato.connection.closing'):
if self._disconnecting:
logger.info('Connection factory disconnecting, aborting receive')
return _connection_closing
else:
if self.has_debug:
logger.debug('receive -> not disconnecting')
if not self.is_connected:
if self.has_debug:
logger.debug('receive -> _is_connected1 %s' % self.is_connected)
self.connect()
if self.has_debug:
logger.debug('receive -> _is_connected2 %s' % self.is_connected)
queue = self.get_queue_for_receiving(destination)
try:
# Default message descriptor ..
md = self.mq.md()
# .. and custom get message options
gmo = self.mq.gmo()
gmo.Options = self.CMQC.MQGMO_WAIT | self.CMQC.MQGMO_FAIL_IF_QUIESCING
gmo.WaitInterval = wait_interval
message = queue.get(None, md, gmo)
return self._build_text_message(md, message)
except self.mq.MQMIError as e:
if e.reason == self.CMQC.MQRC_NO_MSG_AVAILABLE:
text = 'No message available for destination:`%s`, wait_interval:`%s` ms' % (destination, wait_interval)
raise NoMessageAvailableException(text)
else:
logger.debug('Exception caught in get, comp:`%s`, reason:`%s`' % (e.comp, e.reason))
exc = WebSphereMQException(e, e.comp, e.reason)
raise exc
# ################################################################################################################################
def open_dynamic_queue(self):
if self._disconnecting:
logger.info('Connection factory disconnecting, aborting open_dynamic_queue')
return
else:
logger.debug('open_dynamic_queue -> not disconnecting')
if not self.is_connected:
if self.has_debug:
logger.debug('open_dynamic_queue -> _is_connected1 %s' % self.is_connected)
self.connect()
if self.has_debug:
logger.debug('open_dynamic_queue -> _is_connected2 %s' % self.is_connected)
dynamic_queue = self.mq.Queue(self.mgr, self.dynamic_queue_template, self.CMQC.MQOO_INPUT_SHARED)
# A bit hackish, but there's no other way to get its name.
dynamic_queue_name = dynamic_queue._Queue__qDesc.ObjectName.strip()
with self.lock:
self._open_dynamic_queues_cache[dynamic_queue_name] = dynamic_queue
logger.debug('Successfully created a dynamic queue, descriptor `%s`' % (dynamic_queue._Queue__qDesc))
return dynamic_queue_name
# ################################################################################################################################
def close_dynamic_queue(self, dynamic_queue_name):
if self._disconnecting:
logger.info('Connection factory disconnecting, aborting close_dynamic_queue')
return
else:
if self.has_debug:
logger.debug('close_dynamic_queue -> not disconnecting')
if not self.is_connected:
# If we're not connected then all dynamic queues had been already closed.
if self.has_debug:
logger.debug('close_dynamic_queue -> _is_connected1 %s' % self.is_connected)
return
else:
if self.has_debug:
logger.debug('close_dynamic_queue -> _is_connected2 %s' % self.is_connected)
with self.lock:
dynamic_queue = self._open_dynamic_queues_cache[dynamic_queue_name]
dynamic_queue.close()
self._open_dynamic_queues_cache.pop(dynamic_queue_name, None)
self._open_send_queues_cache.pop(dynamic_queue_name, None)
self._open_receive_queues_cache.pop(dynamic_queue_name, None)
if self.has_debug:
logger.debug('Successfully closed a dynamic queue `%s`' % dynamic_queue_name)
# ################################################################################################################################
def _get_jms_timestamp_from_md(self, put_date, put_time):
pattern = '%Y%m%d%H%M%S'
centi = int(put_time[6:]) / 100.0
put_date_time = put_date + put_time[:6]
if not isinstance(put_date_time, unicode):
put_date_time = put_date_time.decode('utf8')
strp = strptime(put_date_time, pattern)
mk = mktime(strp)
return long((mk - altzone + centi) * 1000.0)
# ################################################################################################################################
def _build_text_message(self, md, message):
if self.has_debug:
logger.debug('Building a text message:`%r`, md:`%r`' % (repr(message), repr(md)))
class_ = MQRFH2JMS if self.needs_jms else DummyMQRFH2JMS
mqrfh2 = class_(self.needs_mcd)
mqrfh2.build_folders_and_payload_from_message(message)
jms_folder = mqrfh2.folders.get('jms', None)
usr_folder = mqrfh2.folders.get('usr', None)
# Create a message instance ..
text_message = TextMessage()
text_message.mqmd = md
if usr_folder:
for attr_name, attr_value in usr_folder.items():
setattr(text_message, attr_name, str(attr_value))
# .. set its JMS properties ..
if jms_folder:
if jms_folder.find('Dst') is not None:
text_message.jms_destination = jms_folder.find('Dst').text.strip()
if jms_folder.find('Exp') is not None:
text_message.jms_expiration = long(jms_folder.find('Exp').text)
else:
text_message.jms_expiration = 0 # Same as in Java
if jms_folder.find('Cid') is not None:
text_message.jms_correlation_id = jms_folder.find('Cid').text
else:
text_message.jms_correlation_id = getattr(md, 'CorrelId', None)
if md.Persistence == self.CMQC.MQPER_NOT_PERSISTENT:
text_message.jms_delivery_mode = DELIVERY_MODE_NON_PERSISTENT
elif md.Persistence in(self.CMQC.MQPER_PERSISTENT, self.CMQC.MQPER_PERSISTENCE_AS_Q_DEF):
text_message.jms_delivery_mode = DELIVERY_MODE_PERSISTENT
else:
text = "Don't know how to handle md.Persistence mode:`%s`" % (md.Persistence)
logger.error(text)
exc = WebSphereMQException(text)
raise exc
# Reply-to will have at least a queue name
md_reply_to_queue = md.ReplyToQ.strip()
# Are replies to be sent anywhere?
if md_reply_to_queue:
# We will have a reply-to-qm potentially as well
md_reply_to_qm = md.ReplyToQMgr.strip()
# Convert everything to string
if isinstance(md_reply_to_queue, bytes):
md_reply_to_queue = md_reply_to_queue.decode('utf8')
if isinstance(md_reply_to_qm, bytes):
md_reply_to_qm = md_reply_to_qm.decode('utf8')
if self.has_debug:
logger.debug('Found md.ReplyToQ:`%r`' % md_reply_to_queue)
text_message.jms_reply_to = 'queue://' + md_reply_to_qm + '/' + md_reply_to_queue
text_message.jms_priority = md.Priority
text_message.jms_message_id = md.MsgId
text_message.put_date = md.PutDate.strip()
text_message.put_time = md.PutTime.strip()
text_message.jms_redelivered = bool(int(md.BackoutCount))
text_message.JMSXUserID = md.UserIdentifier.strip()
text_message.JMSXAppID = md.PutApplName.strip()
text_message.JMSXDeliveryCount = md.BackoutCount
text_message.JMSXGroupID = md.GroupId.strip()
text_message.JMSXGroupSeq = md.MsgSeqNumber
md_report_to_jms = {
self.CMQC.MQRO_EXCEPTION: 'Exception',
self.CMQC.MQRO_EXPIRATION: 'Expiration',
self.CMQC.MQRO_COA: 'COA',
self.CMQC.MQRO_COD: 'COD',
self.CMQC.MQRO_PAN: 'PAN',
self.CMQC.MQRO_NAN: 'NAN',
self.CMQC.MQRO_PASS_MSG_ID: 'Pass_Msg_ID',
self.CMQC.MQRO_PASS_CORREL_ID: 'Pass_Correl_ID',
self.CMQC.MQRO_DISCARD_MSG: 'Discard_Msg',
}
for report_name, jms_header_name in iteritems(md_report_to_jms):
report_value = md.Report & report_name
if report_value:
header_value = report_value
else:
header_value = None
setattr(text_message, 'JMS_IBM_Report_' + jms_header_name, header_value)
text_message.JMS_IBM_MsgType = md.MsgType
text_message.JMS_IBM_Feedback = md.Feedback
text_message.JMS_IBM_Format = md.Format.strip()
text_message.JMS_IBM_PutApplType = md.PutApplType
text_message.JMS_IBM_PutDate = md.PutDate.strip()
text_message.JMS_IBM_PutTime = md.PutTime.strip()
if md.MsgFlags & self.CMQC.MQMF_LAST_MSG_IN_GROUP:
text_message.JMS_IBM_Last_Msg_In_Group = self.CMQC.MQMF_LAST_MSG_IN_GROUP
else:
text_message.JMS_IBM_Last_Msg_In_Group = None
# .. and its payload too.
if mqrfh2.payload:
text_message.text = mqrfh2.payload.decode('utf-8', 'replace')
return text_message
# ################################################################################################################################
def _strip_prefixes_from_destination(self, destination):
if destination.startswith('queue:///'):
return destination.replace('queue:///', '', 1)
elif destination.startswith('queue://'):
no_qm_dest = destination.replace('queue://', '', 1)
no_qm_dest = no_qm_dest.split('/')[1:]
return '/'.join(no_qm_dest)
else:
return destination
# ################################################################################################################################
def _build_md(self, message):
md = self.mq.md()
if self.needs_jms:
md.Format = _WMQ_MQFMT_RF_HEADER_2
md.CodedCharSetId = _WMQ_DEFAULT_CCSID
md.Encoding = _WMQ_DEFAULT_ENCODING
# Map JMS headers to MQMD
if message.jms_message_id:
md.MsgId = message.jms_message_id
if message.jms_correlation_id:
if message.jms_correlation_id.startswith(_WMQ_ID_PREFIX.encode('utf-8')):
md.CorrelId = unhexlify_wmq_id(message.jms_correlation_id)
else:
md.CorrelId = message.jms_correlation_id.ljust(24)[:24]
if message.jms_delivery_mode:
if message.jms_delivery_mode == DELIVERY_MODE_NON_PERSISTENT:
persistence = self.CMQC.MQPER_NOT_PERSISTENT
elif message.jms_delivery_mode == DELIVERY_MODE_PERSISTENT:
persistence = self.CMQC.MQPER_PERSISTENT
else:
pattern = 'jms_delivery_mode should be DELIVERY_MODE_NON_PERSISTENT or DELIVERY_MODE_PERSISTENT, not `%r`'
info = pattern % message.jms_delivery_mode
logger.error(info)
exc = BaseException(info)
raise exc
md.Persistence = persistence
if message.jms_priority:
md.Priority = message.jms_priority
if message.jms_reply_to:
md.ReplyToQ = message.jms_reply_to
if self.has_debug:
logger.debug(('Set jms_reply_to. md.ReplyToQ:`%r`,' ' message.jms_reply_to:`%r`' % (
md.ReplyToQ, message.jms_reply_to)))
# jms_expiration is in milliseconds, md.Expiry is in centiseconds.
if message.jms_expiration:
message.jms_expiration = int(message.jms_expiration)
if message.jms_expiration / 1000 > _WMQ_MAX_EXPIRY_TIME:
md.Expiry = self.CMQC.MQEI_UNLIMITED
else:
md.Expiry = int(message.jms_expiration / 10)
# IBM MQ provider-specific JMS headers
jmsxgroupseq = getattr(message, 'JMSXGroupSeq', None)
if jmsxgroupseq is not None:
md.MsgSeqNumber = jmsxgroupseq
md.MsgFlags |= self.CMQC.MQMF_MSG_IN_GROUP
jmsxgroupid = getattr(message, 'JMSXGroupID', None)
if jmsxgroupid is not None:
if jmsxgroupid.startswith(_WMQ_ID_PREFIX):
md.GroupId = unhexlify_wmq_id(jmsxgroupid)
else:
md.GroupId = jmsxgroupid.ljust(24)[:24]
md.MsgFlags |= self.CMQC.MQMF_MSG_IN_GROUP
report_names = 'Exception', 'Expiration', 'COA', 'COD', 'PAN', 'NAN', 'Pass_Msg_ID', 'Pass_Correl_ID', 'Discard_Msg'
for report_name in report_names:
report = getattr(message, 'JMS_IBM_Report_' + report_name, None)
if report is not None:
md.Report |= report
# Doesn't make much sense to map feedback options as we're stuffed into
# request messages (MQMT_REQUEST) not report messages (MQMT_REPORT)
# but different types of messages are still possible to implement in
# the future so let's leave it.
jms_ibm_feedback = getattr(message, 'JMS_IBM_Feedback', None)
if jms_ibm_feedback is not None:
md.Feedback = jms_ibm_feedback
jms_ibm_last_msg_in_group = getattr(message, 'JMS_IBM_Last_Msg_In_Group', None)
if jms_ibm_last_msg_in_group is not None:
md.MsgFlags |= self.CMQC.MQMF_LAST_MSG_IN_GROUP
return md
# ################################################################################################################################
class DummyMQRFH2JMS:
""" Dummy MQRFH2 container used when the message read from queues aren't actually JMS.
"""
def __init__(self, *ignored_args, **ignored_kwargs):
self.folders = {'jms':None, 'mcd':None, 'usr':None}
self.payload = None
def build_folders_and_payload_from_message(self, payload):
self.payload = payload
# ################################################################################################################################
class MQRFH2JMS:
""" A class for representing a subset of MQRFH2, suitable for passing IBM MQ JMS headers around.
"""
# 4 bytes - MQRFH_STRUC_ID
# 4 bytes - _WMQ_MQRFH_VERSION_2
# 4 bytes - the whole MQRFH2 header length
# 4 bytes - Encoding
# 4 bytes - CodedCharacterSetId
# 8 bytes - MQFMT_STRING
# 4 bytes - MQRFH_NO_FLAGS
# 4 bytes - NameValueCCSID
FIXED_PART_LENGTH = 36
# MQRFH2 folder length must be a multiple of 4.
FOLDER_LENGTH_MULTIPLE = 4
# Size of a folder header is always 4 bytes.
FOLDER_SIZE_HEADER_LENGTH = 4
def __init__(self, needs_mcd=True, has_debug=False):
# Whether to add the mcd folder. Needs to be False for everything to work properly with WMQ >= 7.0
self.needs_mcd = needs_mcd
self.has_debug = has_debug
self.folders = {}
self.payload = None
def _pad_folder(self, folder):
""" Pads the folder to a multiple of 4, as required by IBM MQ.
"""
folder_len = len(folder)
if folder_len % MQRFH2JMS.FOLDER_LENGTH_MULTIPLE == 0:
return folder
else:
padding = MQRFH2JMS.FOLDER_LENGTH_MULTIPLE - folder_len % MQRFH2JMS.FOLDER_LENGTH_MULTIPLE
return folder.ljust(folder_len + padding)
def build_folders_and_payload_from_message(self, message):
total_mqrfh2_length = unpack('!l', message[8:12])[0]
mqrfh2 = message[MQRFH2JMS.FIXED_PART_LENGTH:total_mqrfh2_length]
self.payload = message[MQRFH2JMS.FIXED_PART_LENGTH + len(mqrfh2):]
if self.has_debug:
logger.debug('message:`%r`' % message)
logger.debug('mqrfh2:`%r`' % mqrfh2)
logger.debug('self.payload:`%r`' % self.payload)
left = mqrfh2
while left:
current_folder_length = unpack('!l', left[:4])[0]
raw_folder = left[MQRFH2JMS.FOLDER_SIZE_HEADER_LENGTH:MQRFH2JMS.FOLDER_SIZE_HEADER_LENGTH + current_folder_length]
if self.has_debug:
logger.debug('raw_folder:`%r`' % raw_folder)
self.build_folder(raw_folder)
left = left[MQRFH2JMS.FOLDER_SIZE_HEADER_LENGTH + current_folder_length:]
# ################################################################################################################################
def build_folder(self, raw_folder):
# Java JMS sends folders with unbound prefixes, i.e. <msgbody xsi:nil='true'></msgbody>
# which is in no way a valid XML so we have to insert the prefix ourselves
# in order to avoid parser bailing out with an ExpatError. I can't think
# of any other way to work around it if we'd like to treat folders as
# XML(-like) structures.
if 'xsi:nil="true"' in raw_folder and not 'xmlns' in raw_folder:
if self.has_debug:
logger.debug('Binding xsi:nil to a dummy namespace:`%s`' % raw_folder)
raw_folder = raw_folder.replace('xsi:nil="true"', 'xmlns:xsi="dummy" xsi:nil="true"')
if self.has_debug:
logger.debug('raw_folder after binding:`%s`' % raw_folder)
folder = etree.fromstring(raw_folder)
root_name = folder.tag
root_names = ['jms', 'usr']
if self.needs_mcd:
root_names.append('mcd')
if root_name in root_names:
self.folders[root_name] = folder
else:
logger.warning('Ignoring unrecognized JMS folder `%s`=`%s`' % (root_name, raw_folder))
# ################################################################################################################################
def build_header(self, message, queue_name, CMQC, now):
if self.needs_mcd:
self.folders['mcd'] = _mcd
mcd = self._pad_folder(etree.tostring(self.folders['mcd']))
mcd_len = len(mcd)
else:
mcd_len = 0
self.add_jms(message, queue_name, now)
self.add_usr(message)
jms = self._pad_folder(etree.tostring(self.folders['jms']))
if 'usr' in self.folders:
usr = self._pad_folder(etree.tostring(self.folders['usr']))
usr_len = len(usr)
else:
usr_len = 0
jms_len = len(jms)
total_header_length = 0
total_header_length += MQRFH2JMS.FIXED_PART_LENGTH
# Each folder has a 4-byte header describing its length,
# hence the 'len(self.folders) * 4' below.
variable_part_length = len(self.folders) * 4 + mcd_len + jms_len + usr_len
total_header_length += variable_part_length
buff = BytesIO()
buff.write(CMQC.MQRFH_STRUC_ID)
buff.write(_WMQ_MQRFH_VERSION_2)
buff.write(pack('!l', total_header_length))
buff.write(_WMQ_DEFAULT_ENCODING_WIRE_FORMAT)
buff.write(_WMQ_DEFAULT_CCSID_WIRE_FORMAT)
buff.write(CMQC.MQFMT_STRING)
buff.write(_WMQ_MQRFH_NO_FLAGS_WIRE_FORMAT)
buff.write(_WMQ_DEFAULT_CCSID_WIRE_FORMAT)
if self.needs_mcd:
buff.write(pack('!l', mcd_len))
buff.write(mcd)
buff.write(pack('!l', jms_len))
buff.write(jms)
if 'usr' in self.folders:
buff.write(pack('!l', usr_len))
buff.write(usr)
value = buff.getvalue()
buff.close()
return value
# ################################################################################################################################
def add_jms(self, message, queue_name, now):
jms = etree.Element('jms')
dst = etree.Element('Dst')
tms = etree.Element('Tms')
dlv = etree.Element('Dlv')
jms.append(dst)
jms.append(tms)
jms.append(dlv)
tms.text = unicode(now)
dst.text = u'queue:///' + queue_name
dlv.text = unicode(message.jms_delivery_mode)
if message.jms_expiration:
exp = etree.Element('Exp')
exp.text = unicode(now + message.jms_expiration)
if self.has_debug:
logger.debug('jms.Exp:`%r`' % exp.text)
jms.append(exp)
if message.jms_priority:
pri = etree.Element('Pri')
pri.text = unicode(message.jms_priority)
if self.has_debug:
logger.debug('jms.Pri:`%r`' % pri.text)
jms.append(pri)
if message.jms_correlation_id:
cid = etree.Element('Cid')
cid.text = unicode(message.jms_correlation_id)
if self.has_debug:
logger.debug('jms.Cid:`%r`' % cid.text)
jms.append(cid)
self.folders['jms'] = jms
# ################################################################################################################################
def add_usr(self, message):
user_attrs = set(dir(message)) - reserved_attributes
if self.has_debug:
logger.debug('user_attrs:`%s`' % user_attrs)
if user_attrs:
usr = etree.Element('usr')
for user_attr in user_attrs:
user_attr_value = getattr(message, user_attr)
# Some values are integers, e.g. delivery_mode
if isinstance(user_attr_value, basestring):
user_attr_value = escape(user_attr_value)
# Create a JMS attribute and set its value.
user_attr = etree.Element(unicode(user_attr))
user_attr.text = unicode(user_attr_value)
usr.append(user_attr)
self.folders['usr'] = usr
# ################################################################################################################################
| 37,786
|
Python
|
.py
| 735
| 40.668027
| 130
| 0.550492
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,121
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/server/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,122
|
config.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/server/rpc/config.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from contextlib import closing
from logging import getLogger
# Zato
from zato.common.ext.dataclasses import dataclass
from zato.common.odb.query import server_by_name, server_list
from zato.common.typing_ import cast_
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.odb.api import SessionWrapper
from zato.common.odb.model import SecurityBase as SecurityBaseModel, Server as ServerModel
from zato.common.typing_ import callable_, intnone, list_, strnone
from zato.server.base.parallel import ParallelServer
ParallelServer = ParallelServer
SecurityBaseModel = SecurityBaseModel
SessionWrapper = SessionWrapper
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class CredentialsConfig:
sec_def_name = 'zato.internal.invoke'
api_user = 'zato.internal.invoke.user'
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class RPCServerInvocationCtx:
cluster_name: 'strnone' = None
server_name: 'strnone' = None
address: 'strnone' = None
port: 'intnone' = None
username: 'strnone' = None
password: 'strnone' = None
needs_ping: 'bool' = True
crypto_use_tls: 'bool' = False
@dataclass(init=False)
class InvocationCredentials:
username: 'strnone' = None
password: 'strnone' = None
# ################################################################################################################################
# ################################################################################################################################
class ConfigSource:
""" A base class for returning server configuration.
"""
def __init__(self, cluster_name:'str', server_name:'str', decrypt_func:'callable_') -> 'None':
self.current_cluster_name = cluster_name
self.current_server_name = server_name
self.decrypt_func = decrypt_func
def get_server_ctx(self, parallel_server, cluster_name:'str', server_name:'str') -> 'RPCServerInvocationCtx':
raise NotImplementedError('Should be overridden by subclasses')
def get_server_ctx_list(self, cluster_name:'str') -> 'list_[RPCServerInvocationCtx]':
raise NotImplementedError('Should be overridden by subclasses')
def get_invocation_credentials(self, cluster_name:'str') -> 'InvocationCredentials':
raise NotImplementedError('Should be overridden by subclasses')
# ################################################################################################################################
# ################################################################################################################################
class ODBConfigSource(ConfigSource):
""" Returns server configuration based on information in the cluster's ODB.
"""
def __init__(self, odb:'SessionWrapper', cluster_name:'str', server_name:'str', decrypt_func:'callable_') -> 'None':
super().__init__(cluster_name, server_name, decrypt_func)
self.odb = odb
# ################################################################################################################################
def get_invocation_credentials(self, _unused_session, cluster_name:'str'):
for sec_item in self.odb.get_basic_auth_list(None, cluster_name):
sec_item = cast_('SecurityBaseModel', sec_item)
if sec_item.name == CredentialsConfig.sec_def_name:
out = InvocationCredentials()
out.username = sec_item.username
out.password = self.decrypt_func(sec_item.password)
return out
else:
raise ValueError('No such security definition `{}` in cluster `{}`'.format(
CredentialsConfig.sec_def_name, cluster_name))
# ################################################################################################################################
def build_server_ctx(self, server_model:'ServerModel', credentials:'InvocationCredentials') -> 'RPCServerInvocationCtx':
out = RPCServerInvocationCtx()
out.cluster_name = server_model.cluster_name
out.server_name = server_model.name
out.address = server_model.preferred_address
out.crypto_use_tls = server_model.crypto_use_tls
out.port = server_model.bind_port
out.username = credentials.username
out.password = credentials.password
return out
# ################################################################################################################################
def get_server_ctx(
self,
_ignored_parallel_server, # type: ParallelServer
cluster_name, # type: str
server_name # type: str
) -> 'RPCServerInvocationCtx':
""" Returns a specific server defined in ODB.
"""
with closing(self.odb.session()) as session:
result = server_by_name(session, None, cluster_name, server_name)
if not result:
msg = 'No such server or cluster {}@{}'.format(server_name, cluster_name)
logger.warning(msg)
raise ValueError(msg)
# Multiple matches - naturally, should not happen
elif len(result) > 1:
msg = 'Unexpected output for {}@{} len:`{}`, result:`{}`'.format(
server_name, cluster_name, len(result), '\n' + '\n'.join(str(elem) for elem in result))
logger.warning(msg)
raise ValueError(msg)
else:
server_model = result[0] # type: ServerModel
credentials = self.get_invocation_credentials(session, cluster_name)
credentials = cast_('SecurityBaseModel', credentials)
return self.build_server_ctx(server_model, credentials)
# ################################################################################################################################
def get_server_ctx_list(self, cluster_name:'str') -> 'list_[RPCServerInvocationCtx]':
# Response to return
out = []
with closing(self.odb.session()) as session:
# First, get API credentials that will be the same for all servers ..
credentials = self.get_invocation_credentials(session, cluster_name)
# .. now, get servers from the database ..
result = server_list(session, None, cluster_name)
result = result[0]
result = result.result
# .. combine the two ..
for item in result:
server_ctx = self.build_server_ctx(item, credentials)
out.append(server_ctx)
# .. and return everything to our caller.
return out
# ################################################################################################################################
# ################################################################################################################################
| 8,021
|
Python
|
.py
| 137
| 51.437956
| 130
| 0.466369
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,123
|
api.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/server/rpc/api.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
# Zato
from zato.common.ext.dataclasses import dataclass
from zato.common.typing_ import cast_, list_field
from zato.server.connection.server.rpc.invoker import LocalServerInvoker, RemoteServerInvoker
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, anylist, generator_, stranydict
from zato.server.base.parallel import ParallelServer
from zato.server.connection.server.rpc.config import ConfigSource, RPCServerInvocationCtx
from zato.server.connection.server.rpc.invoker import PerPIDResponse, ServerInvoker
ConfigSource = ConfigSource
ParallelServer = ParallelServer
PerPIDResponse = PerPIDResponse
RPCServerInvocationCtx = RPCServerInvocationCtx
# ################################################################################################################################
# ################################################################################################################################
@dataclass
class InvokeAllResult:
# By default, we assume that the invocation succeeded,
# unless it is overwritten by one of the per-PID responses
is_ok: 'bool' = True
# This is a list of responses from each PID of each server
data: 'anylist' = list_field()
# ################################################################################################################################
# ################################################################################################################################
class ConfigCtx:
""" A config-like class that knows how to return details needed to invoke local or remote servers.
"""
def __init__(self,
config_source, # type: ConfigSource
parallel_server, # type: ParallelServer
local_server_invoker_class = LocalServerInvoker, # type: type[LocalServerInvoker]
remote_server_invoker_class = RemoteServerInvoker # type: type[RemoteServerInvoker]
):
self.config_source = config_source
self.parallel_server = parallel_server
self.local_server_invoker_class = local_server_invoker_class
self.remote_server_invoker_class = remote_server_invoker_class
def get_remote_server_invoker(self, server_name:'str') -> 'RemoteServerInvoker':
ctx = self.config_source.get_server_ctx(self.parallel_server, self.config_source.current_cluster_name, server_name)
return self.remote_server_invoker_class(ctx)
def get_remote_server_invoker_list(self) -> 'generator_[ServerInvoker, None, None]':
ctx_list = self.config_source.get_server_ctx_list(self.config_source.current_cluster_name)
for ctx in ctx_list:
if ctx.server_name == self.config_source.current_server_name:
cluster_name = cast_('str', ctx.cluster_name)
server_name = cast_('str', ctx.server_name)
invoker = self.local_server_invoker_class(self.parallel_server, cluster_name, server_name)
else:
invoker = self.remote_server_invoker_class(ctx)
yield invoker
# ################################################################################################################################
# ################################################################################################################################
class ServerRPC:
""" A facade through which Zato servers can be invoked.
"""
def __init__(self, config_ctx:'ConfigCtx') -> 'None':
self.config_ctx = config_ctx
self.current_cluster_name = self.config_ctx.config_source.current_cluster_name
self._invokers = {} # type: stranydict
self.logger = getLogger('zato')
# ################################################################################################################################
def _get_invoker_by_server_name(self, server_name:'str') -> 'ServerInvoker':
if server_name == self.config_ctx.parallel_server.name:
invoker = self.config_ctx.local_server_invoker_class(
self.config_ctx.parallel_server,
self.config_ctx.parallel_server.cluster_name,
self.config_ctx.parallel_server.name,
)
else:
invoker = self.config_ctx.get_remote_server_invoker(server_name)
return invoker
# ################################################################################################################################
def get_invoker_by_server_name(self, server_name:'str') -> 'ServerInvoker':
has_invoker_by_server_name = server_name in self._invokers
if not has_invoker_by_server_name:
invoker = self._get_invoker_by_server_name(server_name)
self._invokers[server_name] = invoker
invoker = self._invokers[server_name]
return invoker
# ################################################################################################################################
def populate_invokers(self) -> 'None':
for invoker in self.config_ctx.get_remote_server_invoker_list():
self._invokers[invoker.server_name] = invoker
# ################################################################################################################################
def invoke_all(
self,
service, # type: str
request = None, # type: any_
*args, # type: any_
**kwargs # type: any_
) -> 'InvokeAllResult':
# First, make sure that we are aware of all the servers currently available
self.populate_invokers()
# Response to produce
out = InvokeAllResult()
# Now, invoke all the servers ..
for invoker in self._invokers.values():
invoker = cast_('ServerInvoker', invoker)
# .. each response object received is a list of sub-responses,
# .. with each sub-response representing a specific PID ..
response = invoker.invoke_all_pids(service, request, *args, **kwargs)
out.data.extend(response)
# .. now we can return the result.
return out
# ################################################################################################################################
# ################################################################################################################################
| 6,839
|
Python
|
.py
| 115
| 52.217391
| 130
| 0.490955
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,124
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/server/rpc/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,125
|
invoker.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/server/rpc/invoker.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
# Requests
from requests import get as requests_get
# Zato
from zato.client import AnyServiceInvoker
from zato.common.ext.dataclasses import dataclass
from zato.common.typing_ import any_, cast_, dict_field
# ################################################################################################################################
# ################################################################################################################################
if 0:
from requests import Response
from typing import Callable
from zato.client import ServiceInvokeResponse
from zato.common.typing_ import anydict, anylist, callable_, intnone, stranydict, strordictnone
from zato.server.base.parallel import ParallelServer
from zato.server.connection.server.rpc.config import RPCServerInvocationCtx
Callable = Callable
ParallelServer = ParallelServer
RPCServerInvocationCtx = RPCServerInvocationCtx
Response = Response
ServiceInvokeResponse = ServiceInvokeResponse
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class ServerInvocationResult:
is_ok: 'bool' = False
has_data: 'bool' = False
data: 'anydict' = dict_field()
error_info: 'any_' = ''
@dataclass
class PerPIDResponse:
is_ok: 'bool' = False
pid: 'int' = 0
pid_data: 'strordictnone' = dict_field()
error_info: 'any_' = ''
# ################################################################################################################################
# ################################################################################################################################
class ServerInvoker:
""" A base class for local and remote server invocations.
"""
def __init__(self, parallel_server:'ParallelServer', cluster_name:'str', server_name:'str') -> 'None':
# This parameter is used for local invocations only
# to have access to self.parallel_server.invoke/.invoke_async/.invoke_all_pids
self.parallel_server = parallel_server
self.cluster_name = cluster_name
self.server_name = server_name
def invoke(self, *args:'any_', **kwargs:'any_') -> 'any_':
raise NotImplementedError(self.__class__)
def invoke_async(self, *args:'any_', **kwargs:'any_') -> 'any_':
raise NotImplementedError(self.__class__)
def invoke_all_pids(self, *args:'any_', **kwargs:'any_') -> 'anylist':
raise NotImplementedError(self.__class__)
# ################################################################################################################################
# ################################################################################################################################
class LocalServerInvoker(ServerInvoker):
""" Invokes services directly on the current server, without any network-based RPC.
"""
def invoke(self, *args:'any_', **kwargs:'any_') -> 'any_':
response = self.parallel_server.invoke(*args, **kwargs)
return response
# ################################################################################################################################
def invoke_async(self, *args:'any_', **kwargs:'any_') -> 'any_':
response = self.parallel_server.invoke_async(*args, **kwargs)
return response
# ################################################################################################################################
def invoke_all_pids(self, *args:'any_', **kwargs:'any_') -> 'anylist':
response = self.parallel_server.invoke_all_pids(*args, **kwargs)
return response
# ################################################################################################################################
# ################################################################################################################################
class RemoteServerInvoker(ServerInvoker):
""" Invokes services on a remote server using RPC.
"""
url_path = '/zato/internal/invoke'
def __init__(self, ctx:'RPCServerInvocationCtx') -> 'None':
super().__init__(
cast_('ParallelServer', None),
cast_('str', ctx.cluster_name),
cast_('str', ctx.server_name),
)
self.invocation_ctx = ctx
# We need to cover both HTTP and HTTPS connections to other servers
protocol = 'https' if self.invocation_ctx.crypto_use_tls else 'http'
# These two are used to ping each server right before an actual request is sent - with a short timeout,
# this lets out quickly discover whether the server is up and running.
self.ping_address = '{}://{}:{}/zato/ping'.format(protocol, self.invocation_ctx.address, self.invocation_ctx.port)
self.ping_timeout = 1
# Build the full address to the remote server
self.address = '{}://{}:{}'.format(protocol, self.invocation_ctx.address, self.invocation_ctx.port)
# Credentials to connect to the remote server with
credentials = (self.invocation_ctx.username, self.invocation_ctx.password)
# Now, we can build a client to the remote server
self.invoker = AnyServiceInvoker(self.address, self.url_path, credentials)
# ################################################################################################################################
def ping(self, ping_timeout:'intnone'=None) -> 'None':
ping_timeout = ping_timeout or self.ping_timeout
_ = requests_get(self.ping_address, timeout=ping_timeout)
# ################################################################################################################################
def close(self) -> 'None':
self.invoker.session.close()
# ################################################################################################################################
def _invoke(
self,
invoke_func, # type: callable_
service:'str', # type: str
request:'any_' = None, # type: any_
*args:'any_', # type: any_
**kwargs:'any_' # type: any_
) -> 'stranydict | anylist | str | None':
if not self.invocation_ctx.address:
logger.info('RPC address not found for %s:%s -> `%r` (%s)',
self.invocation_ctx.cluster_name,
self.invocation_ctx.server_name,
self.address,
service)
return
# Optionally, ping the remote server to quickly find out if it is still available ..
if self.invocation_ctx.needs_ping:
ping_timeout = kwargs.get('ping_timeout') or self.ping_timeout
_ = requests_get(self.ping_address, timeout=ping_timeout)
# .. actually invoke the server now ..
response = invoke_func(service, request, *args, **kwargs) # type: ServiceInvokeResponse
response = response.data
return response
# ################################################################################################################################
def invoke(self, *args:'any_', **kwargs:'any_') -> 'any_':
return self._invoke(self.invoker.invoke, *args, **kwargs)
# ################################################################################################################################
def invoke_async(self, *args:'any_', **kwargs:'any_') -> 'any_':
return self._invoke(self.invoker.invoke_async, *args, **kwargs)
# ################################################################################################################################
def invoke_all_pids(self, *args:'any_', **kwargs:'any_') -> 'any_':
kwargs['all_pids'] = True
skip_response_elem = kwargs.pop('skip_response_elem', True)
return self._invoke(self.invoker.invoke, skip_response_elem=skip_response_elem, *args, **kwargs)
# ################################################################################################################################
# ################################################################################################################################
| 8,897
|
Python
|
.py
| 146
| 54.712329
| 130
| 0.43954
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,126
|
es.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/search/es.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# elasticsearch
from elasticsearch.client import Elasticsearch
# Zato
from zato.server.store import BaseAPI, BaseStore
class ElasticSearchAPI(BaseAPI):
""" API to obtain ElasticSearch connections through.
"""
class ElasticSearchConnStore(BaseStore):
""" Stores connections to ElasticSearch.
"""
def create_impl(self, config, config_no_sensitive):
return Elasticsearch(config.hosts.splitlines(), timeout=float(config.timeout), send_get_body_as=config.body_as)
| 729
|
Python
|
.py
| 18
| 37.555556
| 119
| 0.759943
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,127
|
solr.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/search/solr.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
# pysolr
try:
from pysolr import Solr
except ImportError:
pass
# Zato
from zato.common.util.api import ping_solr
from zato.server.connection.queue import Wrapper
from zato.server.store import BaseAPI, BaseStore
logger = getLogger(__name__)
class SolrWrapper(Wrapper):
def __init__(self, config):
config.auth_url = config.address
super(SolrWrapper, self).__init__(config, 'Solr')
def add_client(self):
# Make sure everything is OK
ping_solr(self.config)
# Create a client now
self.client.put_client(Solr(self.config.address, timeout=self.config.timeout))
class SolrAPI(BaseAPI):
""" API to obtain ElasticSearch connections through.
"""
class SolrConnStore(BaseStore):
""" Stores connections to ElasticSearch.
"""
def create_impl(self, _, config_no_sensitive):
w = SolrWrapper(config_no_sensitive)
w.build_queue()
return w
| 1,216
|
Python
|
.py
| 37
| 28.594595
| 86
| 0.709262
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,128
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/search/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
class SearchAPI:
def __init__(self, es, solr):
self.es = es
self.solr = solr
| 336
|
Python
|
.py
| 10
| 30.2
| 82
| 0.677019
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,129
|
sftp.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/file_client/sftp.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from functools import wraps
from logging import getLogger
# Zato
from zato.server.connection.file_client.base import BaseFileClient, PathAccessException
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.server.connection.sftp import SFTPIPCFacade, SFTPInfo
SFTPIPCFacade = SFTPIPCFacade
SFTPInfo = SFTPInfo
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
def ensure_path_exists(func):
@wraps(func)
def inner(self, path, *args, **kwargs):
# type: (SFTPFileClient, str)
if not self.conn.exists(path):
raise PathAccessException('Path `{}` could not be accessed'.format(path))
else:
return func(self, path, *args, **kwargs)
return inner
# ################################################################################################################################
class SFTPFileClient(BaseFileClient):
def __init__(self, conn, config):
# type: (SFTPIPCFacade, dict) -> None
super().__init__(config)
self.conn = conn
# ################################################################################################################################
def create_directory(self, path):
self.conn.create_directory(path)
# ################################################################################################################################
@ensure_path_exists
def delete_directory(self, path):
self.conn.delete_directory(path)
# ################################################################################################################################
@ensure_path_exists
def get(self, path):
# (str) -> str
return self.conn.read(path)
# ################################################################################################################################
@ensure_path_exists
def store(self, path, data):
# type: (str, object) -> None
self.conn.write(data, path)
# ################################################################################################################################
@ensure_path_exists
def delete_file(self, path):
self.conn.delete_file(path)
# ################################################################################################################################
@ensure_path_exists
def list(self, path):
""" Returns a list of directories and files for input path.
"""
# type: (str) -> list
result = self.conn.list(path)
file_list = []
directory_list = []
out = {
'file_list': file_list,
'directory_list': directory_list
}
for item in result: # type: SFTPInfo
if item.is_file:
file_list.append(item.to_dict(False))
else:
directory_list.append(item.to_dict(False))
self.sort_result(out)
return out
# ################################################################################################################################
def path_exists(self, path):
# type: (str) -> bool
return self.conn.exists(path)
# ################################################################################################################################
def ping(self):
return self.conn.ping()
# ################################################################################################################################
def close(self):
# Not applicable
pass
# ################################################################################################################################
# ################################################################################################################################
| 4,715
|
Python
|
.py
| 90
| 46.322222
| 130
| 0.30377
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,130
|
api.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/file_client/api.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from .ftp import FTPFileClient
# For pyflakes
FTPFileClient = FTPFileClient
| 233
|
Python
|
.py
| 9
| 24.444444
| 64
| 0.745455
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,131
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/file_client/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 148
|
Python
|
.py
| 5
| 28.2
| 64
| 0.687943
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,132
|
ftp.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/file_client/ftp.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
# Zato
from .base import BaseFileClient
# ################################################################################################################################
# ################################################################################################################################
if 0:
# stdlib
from typing import BinaryIO
# pyfilesystem
from fs.ftpfs import FTPFS
from fs.info import Info
BinaryIO = BinaryIO
FTPFS = FTPFS
Info = Info
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
# The 'touch' command cannot be executed for files longer than that many bytes
touch_max_size = 200_000
# ################################################################################################################################
# ################################################################################################################################
class FTPFileClient(BaseFileClient):
def __init__(self, conn, config):
# type: (FTPFS) -> None
super().__init__(config)
self.conn = conn
# ################################################################################################################################
def create_directory(self, path):
self.conn.makedir(path)
# ################################################################################################################################
def delete_directory(self, path):
self.conn.removetree(path)
# ################################################################################################################################
def get(self, path):
# (str, str) -> str
return self.conn.readbytes(path)
# ################################################################################################################################
def get_as_file_object(self, path, file_object):
# type: (str, BinaryIO) -> None
self.conn.download(path, file_object)
# ################################################################################################################################
def move_file(self, path_from, path_to):
# type: (str, str)
self.conn.move(path_from, path_to)
# ################################################################################################################################
def store(self, path, data):
# type: (str, object) -> None
if not isinstance(data, bytes):
data = data.encode(self.config['encoding'])
self.conn.writebytes(path, data)
# ################################################################################################################################
def delete_file(self, path):
# type: (str) -> None
self.conn.remove(path)
# ################################################################################################################################
def list(self, path):
""" Returns a list of directories and files for input path.
"""
# type: (str) -> list
# Response to produce
out = {
'directory_list': [],
'file_list': [],
}
# Call the remote resource
result = self.conn.scandir(path)
# Process all items produced
for item in result: # type: Info
elem = {
'name': item.name,
'size': item.size,
'is_dir': item.is_dir,
'last_modified': item.modified,
'has_touch': item.size < touch_max_size
}
if item.is_dir:
out['directory_list'].append(elem)
else:
out['file_list'].append(elem)
# Sort all items by name
self.sort_result(out)
# Return the response to our caller
return out
# ################################################################################################################################
def touch(self, path):
""" Touches a remote file by overwriting its contents with itself.
"""
with self.conn._lock:
# Get the current size ..
size = self.conn.getsize(path)
# .. make sure the file is not too big ..
if size > touch_max_size:
raise ValueError('File `{}` is too big for the touch command; size={}; max={}'.format(
path, size, touch_max_size))
# .. read all data in ..
with conn.openbin(path) as f:
data = f.read(size)
# .. and write it under the same path, effectively merely changing its modification time.
with conn.openbin(path, 'w') as f:
f.write(data)
# ################################################################################################################################
def path_exists(self, path):
# type: (str) -> bool
return self.conn.exists(path)
# ################################################################################################################################
def ping(self):
return self.path_exists('.')
# ################################################################################################################################
def close(self):
self.conn.close()
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
# pyfilesystem
from fs.ftpfs import FTPFS
host = 'localhost'
user = 'abc'
password = 'def'
port = 11021
conn = FTPFS(host, user, password, port=port)
config = {
'encoding': 'utf8'
}
client = FTPFileClient(conn, config)
# client.create_directory('aaa2')
# client.delete_directory('aaa2')
path = '/aaa2/abc.txt2'
client.store(path, 'zzzz')
client.touch(path)
result = client.list('/aaa2')
for item in result['file_list']: # type: dict
print('ITEM', item)
data = client.download(path)
print('DATA', data)
client.delete_file(path)
# ################################################################################################################################
# ################################################################################################################################
| 7,258
|
Python
|
.py
| 145
| 42.834483
| 130
| 0.320562
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,133
|
base.py
|
zatosource_zato/code/zato-server/src/zato/server/connection/file_client/base.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from operator import itemgetter
# ################################################################################################################################
# ################################################################################################################################
class PathAccessException(Exception):
""" Raised when a given remote path can not be accessed by a file client.
"""
# ################################################################################################################################
# ################################################################################################################################
class BaseFileClient:
def __init__(self, config):
# type: (dict) -> None
self.config = config
def sort_result(self, result):
# type: (dict) -> None
result['directory_list'].sort(key=itemgetter('name'))
result['file_list'].sort(key=itemgetter('name'))
def create_directory(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by subclasses')
def delete_directory(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by subclasses')
def get(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by subclasses')
def get_as_file_object(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by subclasses')
def store(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by subclasses')
def move_file(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by subclasses')
def delete_file(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by subclasses')
def list(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by subclasses')
def touch(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by subclasses')
def path_exists(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by subclasses')
def ping(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by subclasses')
def close(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by subclasses')
# ################################################################################################################################
# ################################################################################################################################
| 2,774
|
Python
|
.py
| 48
| 52.145833
| 130
| 0.492786
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,134
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Zato
from zato.common.api import GENERIC
from zato.common.odb.model import GenericConn as ModelGenericConn
from zato.common.util.api import get_sa_model_columns
# ################################################################################################################################
columns_gen_conn = set(get_sa_model_columns(ModelGenericConn))
attrs_gen_conn = set(columns_gen_conn)
attrs_gen_conn.remove(GENERIC.ATTR_NAME)
attrs_gen_conn.add('opaque')
attrs_gen_conn = tuple(attrs_gen_conn)
# ################################################################################################################################
| 795
|
Python
|
.py
| 16
| 48.25
| 130
| 0.515544
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,135
|
connection.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/connection.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# Bunch
from bunch import bunchify
# Zato
from zato.common.api import GENERIC
from zato.common.json_internal import dumps, loads
from zato.server.generic import attrs_gen_conn
# ################################################################################################################################
class GenericConnection:
""" An individual business-level (not SQL one) representation of a generic connection.
"""
__slots__ = attrs_gen_conn
def __init__(self):
self.id = None
self.name = ''
self.type_ = ''
self.is_active = False
self.is_internal = False
self.cache_expiry = None
self.address = None
self.port = None
self.timeout = None
self.data_format = None
self.opaque = {}
self.is_channel = False
self.is_outconn = False
self.version = None
self.extra = None
self.pool_size = None
self.username = None
self.username_type = None
self.secret = ''
self.secret_type = None
self.sec_use_rbac = False
self.conn_def_id = None
self.cache_id = None
self.cluster_id = None
# ################################################################################################################################
@staticmethod
def from_dict(data, skip=None):
conn = GenericConnection()
skip = skip or []
for key, value in sorted(data.items()):
if key in skip:
continue
try:
setattr(conn, key, value)
except AttributeError:
conn.opaque[key] = value
return conn
# ################################################################################################################################
def to_dict(self, needs_bunch=False):
out = {}
for name in self.__slots__:
if name != 'opaque':
value = getattr(self, name)
if isinstance(value, bytes):
value = value.decode('utf8')
out[name] = value
out.update(self.opaque)
return bunchify(out) if needs_bunch else out
# ################################################################################################################################
@staticmethod
def from_model(data):
instance = GenericConnection()
opaque_value = getattr(data, GENERIC.ATTR_NAME, None)
if opaque_value:
instance.opaque.update(loads(opaque_value))
for name in instance.__slots__:
if name != 'opaque':
value = getattr(data, name, '<no-value-given-{}>'.format(name))
setattr(instance, name, value)
return instance
from_bunch = from_model
# ################################################################################################################################
def to_sql_dict(self, needs_bunch=False, skip=None):
out = {}
skip = skip or []
for name in self.__slots__:
if name in skip:
continue
if name != 'opaque':
out[name] = getattr(self, name)
else:
out[GENERIC.ATTR_NAME] = dumps(self.opaque)
return bunchify(out) if needs_bunch else out
# ################################################################################################################################
| 3,653
|
Python
|
.py
| 91
| 31.43956
| 130
| 0.443879
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,136
|
channel_sftp.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/impl/channel_sftp.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
import os
# Bunch
from bunch import bunchify
# sh
import sh
# Zato
from zato.common.model import SFTPChannel as SFTPChannelModel
# ################################################################################################################################
log_format = '%(asctime)s - %(levelname)s - %(process)d:%(threadName)s - %(name)s:%(lineno)d - %(message)s'
logging.basicConfig(level=logging.DEBUG, format=log_format)
logger = logging.getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
class SFTPServer:
def __init__(self, logger, model):
self.logger = logger # type: logging.Logger
self.model = model # type: SFTPChannelModel
self.command = self.get_command()
# ################################################################################################################################
def get_command(self):
""" Returns a reusable sh.Command object that can will start an SFTP server.
"""
# A list of arguments that will be added to the base command
args = []
# Disable local port forwarding
args.append('-j')
# Disable remote port forwarding
args.append('-k')
# Disable password logins for root
args.append('-g')
# Disable root logins
args.append('-w')
# Log to stdout
args.append('-E')
# Do not fork into background
args.append('-F')
# Idle timeout
args.append('-I {}'.format(self.model.idle_timeout))
# Keep-alive timeout
args.append('-K {}'.format(self.model.keep_alive_timeout))
# Bind address
args.append('-p')
args.append(self.model.address)
# Host key to use
args.append('-r')
args.append(self.model.host_key)
# PID file
# args.append('-p {}'.format(self.model.pid))
# Base command to build additional arguments into
command = getattr(sh, self.model.sftp_command)
command(*args)
# ################################################################################################################################
def serve_forever(self):
logger.warning('CCC %s', self.command)
self.command()
# ################################################################################################################################
# ################################################################################################################################
class SFTPChannel:
""" Represents a particular FTP channel along with its configuration, users and connected clients.
"""
def __init__(self, config):
# type: (dict)
self.config = config
self.logger = logging.getLogger()
self.model = self._get_model_from_config(self.config)
self.server = SFTPServer(logger, self.model)
# ################################################################################################################################
def start(self):
self.server.serve_forever()
# ################################################################################################################################
def _get_model_from_config(self, config):
# type: (dict) -> SFTPChannel
# For dotted-attribute access
config = bunchify(config)
# Use expected data types in configuration
config.idle_timeout = int(config.idle_timeout)
config.keep_alive_timeout = int(config.keep_alive_timeout)
# Resolve home directories
config.host_key = os.path.expanduser(config.host_key)
# Return a Python-level configuration object
return SFTPChannelModel.from_dict(config)
# ################################################################################################################################
# ################################################################################################################################
def main():
config = bunchify({
'id': 1,
'name': 'My FTP channel',
'address': '0.0.0.0:33022',
'service_name': 'helpers.raw-request-logger',
'topic_name': None,
'idle_timeout': '300',
'keep_alive_timeout': '20',
'sftp_command': 'dropbear',
'host_key': '~/tmp/mykey.txt',
})
channel = SFTPChannel(config)
channel.start()
# ################################################################################################################################
if __name__ == '__main__':
_ = main()
# ################################################################################################################################
| 5,119
|
Python
|
.py
| 108
| 41
| 130
| 0.412641
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,137
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/impl/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
| 154
|
Python
|
.py
| 5
| 29.4
| 64
| 0.687075
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,138
|
def_kafka.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/def_kafka.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
from traceback import format_exc
# PyKafka
try:
from pykafka import KafkaClient, SslConfig # type: ignore
except ImportError:
has_kafka = False
else:
has_kafka = True
# Zato
from zato.common.typing_ import cast_
from zato.server.connection.wrapper import Wrapper
# ################################################################################################################################
# ################################################################################################################################
if 0:
from pykafka.broker import Broker # type: ignore
Broker = Broker
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class DefKafkaWrapper(Wrapper):
""" Wraps a Kafka connection client.
"""
_impl: 'KafkaClient'
wrapper_type = 'Kafka definition'
# ################################################################################################################################
def _init_client(self) -> 'None':
with self.update_lock:
if self.is_connected:
return
# TLS is optional
if self.config.is_tls_enabled:
tls_config = SslConfig(**{
'certfile': self.config.tls_cert_file,
'keyfile': self.config.tls_private_key_file,
'password': self.config.tls_pem_passphrase,
'cafile': self.config.tls_ca_certs_file,
})
else:
tls_config = None
# Our server list needs to be reformatted in accordance with what KafkaClient expects
# and it may be turned into a Kafka or ZooKeeper server list.
server_list = self.config.server_list.splitlines()
server_list = ','.join(server_list)
if self.config.should_use_zookeeper:
hosts = None
zookeeper_hosts = server_list
else:
hosts = server_list
zookeeper_hosts = None
client_config = {
'hosts': hosts,
'zookeeper_hosts': zookeeper_hosts,
'socket_timeout_ms': self.config.socket_timeout * 1000,
'offsets_channel_socket_timeout_ms': self.config.offset_timeout * 1000,
'use_greenlets': True,
'exclude_internal_topics': self.config.should_exclude_internal_topics,
'source_address': self.config.source_address or '',
'ssl_config': tls_config,
'broker_version': self.config.broker_version,
}
# Create the actual connection object
self._impl = KafkaClient(**client_config)
# Confirm the connection was established
self.ping()
# We can assume we are connected now
self.is_connected = True
# ################################################################################################################################
def get(self) -> 'KafkaClient':
return self._impl
# ################################################################################################################################
def _init_impl(self) -> 'None':
if not has_kafka:
return
try:
self._init_client()
except Exception:
logger.warning('Could not build `%s` client to `%s`; e:`%s`', self.wrapper_type, self.config.name, format_exc())
raise
# ################################################################################################################################
def _delete(self) -> 'None':
for elem in self._impl.brokers.values():
elem = cast_('Broker', elem)
try:
elem._connection.disconnect()
except Exception:
logger.warning('Could not disconnect `%s` from `%r`, e:`%s`', elem, self.config, format_exc())
# ################################################################################################################################
def _ping(self) -> 'None':
self._impl.cluster.fetch_api_versions()
# ################################################################################################################################
# ################################################################################################################################
| 5,131
|
Python
|
.py
| 100
| 42.03
| 130
| 0.383877
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,139
|
channel_file_transfer.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/channel_file_transfer.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
# Zato
from zato.server.connection.wrapper import Wrapper
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class ChannelFileTransferWrapper(Wrapper):
""" Represents a file transfer channel.
"""
needs_self_client = False
wrapper_type = 'File transfer channel'
build_if_not_active = True
def __init__(self, *args, **kwargs):
super(ChannelFileTransferWrapper, self).__init__(*args, **kwargs)
self._impl = None
# ################################################################################################################################
def _init_impl(self):
with self.update_lock:
# Create a new observer ..
self.server.worker_store.file_transfer_api.create(self.config)
# .. and start it now.
self.server.worker_store.file_transfer_api.start_observer(self.config.name)
# We can assume we are done building the channel now
self.is_connected = True
# ################################################################################################################################
def delete(self):
""" This is overridden from Wrapper.delete because we do not have self._impl.
"""
self.server.worker_store.file_transfer_api.delete(self.config)
# ################################################################################################################################
def _ping(self):
pass
# ################################################################################################################################
# ################################################################################################################################
| 2,391
|
Python
|
.py
| 42
| 51.52381
| 130
| 0.371134
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,140
|
cloud_confluence.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/cloud_confluence.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
from traceback import format_exc
# Zato
from zato.common.typing_ import cast_
from zato.server.connection.confluence_ import ConfluenceClient
from zato.server.connection.queue import Wrapper
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import stranydict
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class _ConfluenceClient(ConfluenceClient):
def __init__(self, config:'stranydict') -> 'None':
super().__init__(
zato_api_version = config['api_version'],
zato_address = config['address'],
zato_username = config['username'],
zato_token = config['secret'],
zato_is_cloud = config['is_cloud'],
)
def ping(self):
return self.request()
# ################################################################################################################################
# ################################################################################################################################
class CloudConfluenceWrapper(Wrapper):
""" Wraps a queue of connections to Confluence.
"""
def __init__(self, config:'stranydict', server) -> 'None':
config['auth_url'] = config['address']
super(CloudConfluenceWrapper, self).__init__(config, 'Confluence', server)
# ################################################################################################################################
def add_client(self):
try:
conn = _ConfluenceClient(self.config)
self.client.put_client(conn)
except Exception:
logger.warning('Caught an exception while adding a Confluence client (%s); e:`%s`',
self.config['name'], format_exc())
# ################################################################################################################################
def ping(self):
with self.client() as client:
client = cast_('_ConfluenceClient', client)
client.ping()
# ################################################################################################################################
def delete(self, ignored_reason=None):
pass
# ################################################################################################################################
# ################################################################################################################################
| 3,402
|
Python
|
.py
| 58
| 53.086207
| 130
| 0.31628
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,141
|
channel_hl7_mllp.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/channel_hl7_mllp.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
# Bunch
from bunch import bunchify
# Zato
from zato.common.api import GENERIC
from zato.common.util.api import spawn_greenlet
from zato.hl7.mllp.server import HL7MLLPServer
from zato.server.connection.wrapper import Wrapper
# ###############################################################################################################################
# ###############################################################################################################################
logger = getLogger(__name__)
# ###############################################################################################################################
# ###############################################################################################################################
_audit_log_type = GENERIC.CONNECTION.TYPE.CHANNEL_HL7_MLLP
# ################################################################################################################################
# ################################################################################################################################
class ChannelHL7MLLPWrapper(Wrapper):
""" Represents an HL7 MLLP channel.
"""
needs_self_client = False
wrapper_type = 'HL7 MLLP channel'
build_if_not_active = True
def __init__(self, *args, **kwargs):
super(ChannelHL7MLLPWrapper, self).__init__(*args, **kwargs)
self._impl = None # type: HL7MLLPServer
# ################################################################################################################################
def _init_impl(self):
# Zato
from zato.common.util.api import hex_sequence_to_bytes
with self.update_lock:
# Unwrap the expected bytes sequences
config = bunchify({
'id': self.config.id,
'name': self.config.name,
'address': self.config.address,
'service_name': self.config.service,
'max_msg_size': self.config.max_msg_size,
'read_buffer_size': self.config.read_buffer_size,
# Convert to seconds from milliseconds
'recv_timeout': self.config.recv_timeout / 100.0,
'logging_level': self.config.logging_level,
'should_log_messages': self.config.should_log_messages,
'start_seq': hex_sequence_to_bytes(self.config.start_seq),
'end_seq': hex_sequence_to_bytes(self.config.end_seq),
'is_audit_log_sent_active': self.config.get('is_audit_log_sent_active'),
'is_audit_log_received_active': self.config.get('is_audit_log_received_active'),
})
# Create a server ..
self._impl = HL7MLLPServer(config, self.server.invoke, self.server.audit_log)
# .. start the server in a new greenlet, waiting a moment to confirm that it runs ..
spawn_greenlet(self._impl.start)
# .. and set up audit log.
self.server.set_up_object_audit_log_by_config(_audit_log_type, self.config.id, self.config, False)
# We can assume we are done building the channel now
self.is_connected = True
# ################################################################################################################################
def _delete(self):
if self._impl:
# Clear the audit log ..
self.server.audit_log.delete_container(_audit_log_type, self.config.id)
# .. and stop the connection.
self._impl.stop()
# ################################################################################################################################
def _ping(self):
pass
# ################################################################################################################################
# ################################################################################################################################
| 4,308
|
Python
|
.py
| 74
| 50.027027
| 130
| 0.415436
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,142
|
cloud_microsoft_365.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/cloud_microsoft_365.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
from traceback import format_exc
# Zato
from zato.common.typing_ import cast_
from zato.server.connection.cloud.microsoft_365 import Microsoft365Client
from zato.server.connection.queue import Wrapper
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_
from O365 import Account as Office365Account
Office365Account = Office365Account
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class CloudMicrosoft365Wrapper(Wrapper):
""" Wraps a queue of connections to Microsoft 365.
"""
def __init__(self, config:'any_', server:'any_') -> 'None':
config['auth_url'] = config['address']
super(CloudMicrosoft365Wrapper, self).__init__(config, 'Microsoft 365', server)
# ################################################################################################################################
def add_client(self):
try:
conn = Microsoft365Client(self.config)
_ = self.client.put_client(conn)
except Exception:
logger.warning('Caught an exception while adding a Microsoft 365 client (%s); e:`%s`',
self.config['name'], format_exc())
# ################################################################################################################################
def ping(self):
with self.client() as client:
client = cast_('Microsoft365Client', client)
client.ping()
# ################################################################################################################################
# ################################################################################################################################
| 2,638
|
Python
|
.py
| 44
| 55.227273
| 130
| 0.337471
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,143
|
outconn_hl7_mllp.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/outconn_hl7_mllp.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
from traceback import format_exc
# Zato
from zato.hl7.mllp.client import HL7MLLPClient
from zato.server.connection.queue import Wrapper
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class _HL7MLLPConnection:
def __init__(self, config):
self.impl = HL7MLLPClient(config)
def invoke(self, data):
# type: (str) -> str
return self.impl.send(data)
# ################################################################################################################################
# ################################################################################################################################
class OutconnHL7MLLPWrapper(Wrapper):
""" Wraps a queue of connections to HL7 MLLP servers.
"""
def __init__(self, config, server):
config.auth_url = config.address
super(OutconnHL7MLLPWrapper, self).__init__(config, 'HL7 MLLP', server)
def add_client(self):
try:
conn = _HL7MLLPConnection(self.config)
self.client.put_client(conn)
except Exception:
logger.warning('Caught an exception while adding an HL7 MLLP client (%s); e:`%s`',
self.config.name, format_exc())
def delete(self, ignored_reason=None):
pass
# ################################################################################################################################
# ################################################################################################################################
| 2,233
|
Python
|
.py
| 41
| 49.585366
| 130
| 0.349104
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,144
|
cloud_jira.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/cloud_jira.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
from traceback import format_exc
# Zato
from zato.common.typing_ import cast_
from zato.server.connection.jira_ import JiraClient
from zato.server.connection.queue import Wrapper
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from requests import Response
from zato.common.typing_ import any_, stranydict, strnone
from zato.server.base.parallel import ParallelServer
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class _JiraClient(JiraClient):
def __init__(self, config:'stranydict') -> 'None':
super().__init__(
zato_api_version = config['api_version'],
zato_address = config['address'],
zato_username = config['username'],
zato_token = config['secret'],
zato_is_cloud = config['is_cloud'],
)
def ping(self):
response:'Response' = self.request()
return response
# ################################################################################################################################
# ################################################################################################################################
class CloudJiraWrapper(Wrapper):
""" Wraps a queue of connections to Jira.
"""
def __init__(self, config:'Bunch', server:'ParallelServer') -> 'None':
config['auth_url'] = config['address']
super(CloudJiraWrapper, self).__init__(config, 'Jira', server)
# ################################################################################################################################
def add_client(self):
try:
conn = _JiraClient(self.config)
_ = self.client.put_client(conn)
except Exception:
logger.warning('Caught an exception while adding a Jira client (%s); e:`%s`',
self.config['name'], format_exc())
# ################################################################################################################################
def ping(self):
with self.client() as client:
client = cast_('_JiraClient', client)
_:'any_' = client.ping()
# ################################################################################################################################
def delete(self, ignored_reason:'strnone'=None):
pass
# ################################################################################################################################
# ################################################################################################################################
| 3,546
|
Python
|
.py
| 62
| 51.596774
| 130
| 0.326307
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,145
|
outconn_mongodb.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/outconn_mongodb.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
from uuid import uuid4
# Bunch
from bunch import bunchify
# PyMongo
from pymongo import MongoClient
# Zato
from zato.server.connection.wrapper import Wrapper
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class OutconnMongoDBWrapper(Wrapper):
""" Wraps a MongoDB connection client.
"""
wrapper_type = 'MongoDB connection'
def __init__(self, *args, **kwargs):
super(OutconnMongoDBWrapper, self).__init__(*args, **kwargs)
self._impl = None # type: MongoClient
# ################################################################################################################################
def _init_impl(self):
with self.update_lock:
write_to_replica = self.config.write_to_replica
if not isinstance(write_to_replica, int) or isinstance(write_to_replica, bool):
try:
write_to_replica = int(write_to_replica)
except(ValueError, TypeError):
write_to_replica = 0
# Configuration of the underlying client
client_config = bunchify({
'host': self.config.server_list.splitlines(),
'tz_aware': self.config.is_tz_aware,
'connect': True,
'maxPoolSize': self.config.pool_size_max,
'minPoolSize': 0,
'maxIdleTimeMS': self.config.max_idle_time * 1000,
'socketTimeoutMS': self.config.socket_timeout * 1000,
'connectTimeoutMS': self.config.connect_timeout * 1000,
'serverSelectionTimeoutMS': self.config.server_select_timeout * 1000,
'waitQueueTimeoutMS': self.config.wait_queue_timeout * 1000,
'heartbeatFrequencyMS': self.config.hb_frequency * 1000,
'appname': self.config.app_name,
'retryWrites': self.config.should_retry_write,
'zlibCompressionLevel': self.config.zlib_level,
'w': write_to_replica,
'wTimeoutMS': self.config.write_timeout,
'journal': self.config.is_write_journal_enabled,
'fsync': self.config.is_write_fsync_enabled,
'replicaSet': self.config.replica_set or None,
'readPreference': self.config.read_pref_type,
'readPreferenceTags': self.config.read_pref_tag_list or '',
'maxStalenessSeconds': self.config.read_pref_max_stale,
'username': self.config.username,
'password': self.config.secret or self.config.get('password') or '{}.{}'.format(self.__class__.__name__, uuid4().hex),
'authSource': self.config.auth_source,
'authMechanism': self.config.auth_mechanism,
})
client_config.password = self.server.decrypt(client_config.password) # type: ignore
if self.config.document_class:
client_config.document_class = self.config.document_class
if self.config.compressor_list:
client_config.compressors = self.config.compressor_list
if self.config.is_tls_enabled:
client_config.ssl = self.config.is_tls_enabled
client_config.ssl_certfile = self.config.tls_cert_file
client_config.ssl_keyfile = self.config.tls_private_key_file
client_config.ssl_pem_passphrase = self.config.tls_pem_passphrase
client_config.ssl_cert_reqs = self.config.tls_validate
client_config.ssl_ca_certs = self.config.tls_ca_certs_file
client_config.ssl_crlfile = self.config.tls_crl_file
client_config.ssl_match_hostname = self.config.is_tls_match_hostname_enabled
# Create the actual connection object
self._impl = MongoClient(**client_config)
# Confirm the connection was established
self.ping()
# We can assume we are connected now
self.is_connected = True
# ################################################################################################################################
def _delete(self):
self._impl.close()
# ################################################################################################################################
def _ping(self):
self._impl.admin.command('ismaster')
# ################################################################################################################################
# ################################################################################################################################
| 5,323
|
Python
|
.py
| 92
| 46.804348
| 134
| 0.492311
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,146
|
cloud_salesforce.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/cloud_salesforce.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
from traceback import format_exc
# Zato
from zato.common.typing_ import cast_
from zato.server.connection.salesforce import SalesforceClient
from zato.server.connection.queue import Wrapper
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import stranydict
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class _SalesforceClient:
def __init__(self, config:'stranydict') -> 'None':
# The actual connection object
self.impl = SalesforceClient.from_config(config)
# Forward invocations to the underlying client
self.get = self.impl.get
self.post = self.impl.post
self.ping = self.impl.ping
# ################################################################################################################################
# ################################################################################################################################
class CloudSalesforceWrapper(Wrapper):
""" Wraps a queue of connections to Salesforce.
"""
def __init__(self, config:'stranydict', server) -> 'None':
config['auth_url'] = config['address']
super(CloudSalesforceWrapper, self).__init__(config, 'Salesforce', server)
# ################################################################################################################################
def add_client(self):
try:
conn = _SalesforceClient(self.config)
self.client.put_client(conn)
except Exception:
logger.warning('Caught an exception while adding a Salesforce client (%s); e:`%s`',
self.config['name'], format_exc())
# ################################################################################################################################
def ping(self):
with self.client() as client:
client = cast_('_SalesforceClient', client)
client.ping()
# ################################################################################################################################
# ################################################################################################################################
| 3,124
|
Python
|
.py
| 52
| 55
| 130
| 0.327982
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,147
|
cloud_dropbox.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/cloud_dropbox.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
# Dropbox
from dropbox import create_session, Dropbox as DropboxClient
# Zato
from zato.common.util.api import parse_extra_into_dict
from zato.common.util.eval_ import as_list
from zato.server.connection.wrapper import Wrapper
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class CloudDropbox(Wrapper):
""" Wraps a Dropbox connection client.
"""
wrapper_type = 'Dropbox connection'
required_secret_attr = 'secret'
required_secret_label = 'an OAuth 2 access token'
def __init__(self, *args, **kwargs):
super(CloudDropbox, self).__init__(*args, **kwargs)
self._impl = None # type: DropboxClient
# ################################################################################################################################
def _init_impl(self):
with self.update_lock:
# Create a pool of at most that many connections
session = create_session(50)
scope = as_list(self.config.default_scope, ',')
config = {
'session': session,
'user_agent': self.config.user_agent,
'oauth2_access_token': self.server.decrypt(self.config.secret),
'oauth2_access_token_expiration': int(self.config.oauth2_access_token_expiration or 0),
'scope': scope,
'max_retries_on_error': int(self.config.max_retries_on_error or 0),
'max_retries_on_rate_limit': int(self.config.max_retries_on_rate_limit or 0),
'timeout': int(self.config.timeout),
'headers': parse_extra_into_dict(self.config.http_headers),
}
# Create the actual connection object
self._impl = DropboxClient(**config)
# Confirm the connection was established
self.ping()
# We can assume we are connected now
self.is_connected = True
# ################################################################################################################################
def _delete(self):
if self._impl:
self._impl.close()
# ################################################################################################################################
def _ping(self):
self._impl.check_user()
# ################################################################################################################################
# ################################################################################################################################
| 3,218
|
Python
|
.py
| 59
| 46.949153
| 130
| 0.422463
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,148
|
outconn_hl7_fhir.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/outconn_hl7_fhir.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from base64 import b64encode
from logging import getLogger
from traceback import format_exc
# FHIR-py
from fhirpy import SyncFHIRClient
# Zato
from zato.common.api import HL7
from zato.server.connection.queue import Wrapper
from zato.common.typing_ import cast_
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import stranydict, strnone
from zato.server.base.parallel import ParallelServer
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
_basic_auth = HL7.Const.FHIR_Auth_Type.Basic_Auth.id
_oauth = HL7.Const.FHIR_Auth_Type.OAuth.id
# ################################################################################################################################
# ################################################################################################################################
class _HL7FHIRConnection(SyncFHIRClient):
zato_config: 'stranydict'
def __init__(self, config:'stranydict') -> 'None':
self.zato_config = config
self.zato_security_id = self.zato_config.get('security_id') or 0
self.zato_auth_type = self.zato_config.get('auth_type')
# This can be built in advance in case we are using Basic Auth
if self.zato_auth_type == _basic_auth:
self.zato_basic_auth_header = self.zato_get_basic_auth_header()
else:
self.zato_basic_auth_header = None
address = self.zato_config['address']
super().__init__(address)
# ################################################################################################################################
def _build_request_headers(self):
# This is constant
headers = {
'Accept': 'application/json'
}
# This is inherited from the parent class
if self.extra_headers is not None:
headers = {**headers, **self.extra_headers}
# This is already available ..
if self.zato_auth_type == _basic_auth:
auth_header = self.zato_basic_auth_header
# .. while this needs to be dynamically created ..
elif self.zato_auth_type == _oauth:
auth_header = self.zato_get_oauth_header()
else:
auth_header = None
# .. now, it can be assigned ..
if auth_header:
headers['Authorization'] = auth_header
# .. and the whole set of headers can be returned.
return headers
# ################################################################################################################################
def zato_get_basic_auth_header(self) -> 'str':
username = self.zato_config['username']
password = self.zato_config['secret']
auth_header = f'{username}:{password}'
auth_header = auth_header.encode('ascii')
auth_header = b64encode(auth_header)
auth_header = auth_header.decode('ascii')
auth_header = f'Basic {auth_header}'
return auth_header
# ################################################################################################################################
def zato_get_oauth_header(self) -> 'strnone':
server = self.zato_config['server'] # type: ParallelServer
auth_header = server.oauth_store.get_auth_header(self.zato_security_id)
return auth_header
# ################################################################################################################################
def zato_ping(self):
self.execute(path='/CapabilityStatement', method='get')
# ################################################################################################################################
# ################################################################################################################################
class OutconnHL7FHIRWrapper(Wrapper):
""" Wraps a queue of connections to HL7 FHIR servers.
"""
def __init__(self, config, server):
config.auth_url = config.address
config.server = server
super(OutconnHL7FHIRWrapper, self).__init__(config, 'HL7 FHIR', server)
# ################################################################################################################################
def add_client(self):
try:
conn = _HL7FHIRConnection(self.config)
self.client.put_client(conn)
except Exception:
logger.warning('Caught an exception while adding an HL7 FHIR client (%s); e:`%s`',
self.config['name'], format_exc())
# ################################################################################################################################
def ping(self):
with self.client() as client:
client = cast_('_HL7FHIRConnection', client)
client.zato_ping()
# ################################################################################################################################
# ################################################################################################################################
| 5,993
|
Python
|
.py
| 106
| 50
| 130
| 0.387064
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,149
|
outconn_im_telegram.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/outconn_im_telegram.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from http.client import OK
from logging import getLogger
from traceback import format_exc
# Bunch
from bunch import bunchify
# Requests
import requests
# Zato
from zato.common.json_internal import loads
from zato.common.util.http_ import get_proxy_config
from zato.server.connection.wrapper import Wrapper
# ################################################################################################################################
if 0:
from requests import Response
Response = Response
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class TelegramClient:
def __init__(self, address, token, connect_timeout, invoke_timeout, proxies):
# type: (str, str, int, int, dict)
self.address = address.replace('{token}', token)
self.token = token
self.connect_timeout = connect_timeout
self.invoke_timeout = invoke_timeout
self.session = requests.Session()
self.session.proxies = proxies
# ################################################################################################################################
def _invoke(self, method, data=None, *args, **kwargs):
to_bunch = kwargs.get('to_bunch', True)
result = self.session.post(self.address.format(method=method), data=data, *args, **kwargs) # type: Response
if not result.status_code == OK:
raise Exception(result.text)
if to_bunch:
out = loads(result.text)
return bunchify(out)
else:
return result
# ################################################################################################################################
def invoke(self, *args, **kwargs):
try:
return self._invoke(*args, **kwargs)
except Exception:
logger.warning('Could not invoke Telegram API, e:`%s`', format_exc())
raise
# ################################################################################################################################
def ping(self):
return self.invoke('getMe')
# ################################################################################################################################
# ################################################################################################################################
class OutconnIMTelegramWrapper(Wrapper):
""" Wraps a Telegram connection client.
"""
wrapper_type = 'Telegram connection'
def __init__(self, *args, **kwargs):
super(OutconnIMTelegramWrapper, self).__init__(*args, **kwargs)
self._impl = None # type: TelegramClient
# ################################################################################################################################
def _init_impl(self):
with self.update_lock:
# Configuration of the underlying client
client_config = {
'address': self.config.address,
'connect_timeout': self.config.connect_timeout,
'invoke_timeout': self.config.invoke_timeout,
'token': self.config.secret or '<default-empty-telegram-token>',
'proxies': get_proxy_config(self.config),
}
# Create the actual connection object
self._impl = TelegramClient(**client_config)
# Confirm the connection was established
self.ping()
# We can assume we are connected now
self.is_connected = True
# ################################################################################################################################
def _delete(self):
self._impl.session.close()
# ################################################################################################################################
def _ping(self):
return self._impl.ping()
# ################################################################################################################################
# ################################################################################################################################
| 4,787
|
Python
|
.py
| 90
| 46.355556
| 130
| 0.390249
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,150
|
outconn_im_slack.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/outconn_im_slack.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
# Slack
from slackclient import SlackClient
# Zato
from zato.common.util.http_ import get_proxy_config
from zato.server.connection.wrapper import Wrapper
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class OutconnIMSlackWrapper(Wrapper):
""" Wraps a Slack connection client.
"""
wrapper_type = 'Slack connection'
def __init__(self, *args, **kwargs):
super(OutconnIMSlackWrapper, self).__init__(*args, **kwargs)
self._impl = None # type: SlackClient
# ################################################################################################################################
def _init_impl(self):
with self.update_lock:
# Configuration of the underlying client
client_config = {
'token': self.config.secret,
'proxies': get_proxy_config(self.config)
}
# Create the actual connection object
self._impl = SlackClient(**client_config)
# Confirm the connection was established
self.ping()
# We can assume we are connected now
self.is_connected = True
# ################################################################################################################################
def _delete(self):
if self._impl.server.websocket:
self._impl.server.websocket.close()
# ################################################################################################################################
def _ping(self):
out = self._impl.api_call('api.test')
if not out['ok']:
raise Exception(out['error'])
# ################################################################################################################################
# ################################################################################################################################
| 2,579
|
Python
|
.py
| 49
| 46.265306
| 130
| 0.374551
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,151
|
outconn_ldap.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/outconn_ldap.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2022, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import ssl
from codecs import encode
from logging import getLogger
from traceback import format_exc
from uuid import uuid4
# ldap3
from ldap3 import Connection as LDAPConnection, Server, ServerPool, SYNC, Tls
# Zato
from zato.common.util.api import spawn_greenlet
from zato.server.connection.queue import Wrapper
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.common.typing_ import any_, stranydict, strdictnone
from zato.server.base.parallel import ParallelServer
Bunch = Bunch
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class ConnectionWrapper:
conn: 'LDAPConnection'
def __init__(self, client:'LDAPClient') -> 'None':
self.client = client
def __enter__(self):
try:
self.conn = self.client.connect()
except Exception:
logger.warning(
'Could not obtain a connection to `%s` (%s)',
self.client.config.server_list, self.client.config.name
)
raise
else:
return self.conn
def __exit__(self, type, value, traceback): # type: ignore
if self.conn:
self.conn.unbind()
# ################################################################################################################################
# ################################################################################################################################
class LDAPClient:
""" A client through which outgoing LDAP messages can be sent.
"""
def __init__(self, config:'Bunch') -> 'None':
self.config = config
# By default, we are not connected anywhere
self.is_connected = False
# Initialize in a separate greenlet so as not to block the main one
# if the remote server is slow to respond.
_ = spawn_greenlet(self._init, timeout=2)
# ################################################################################################################################
def _init(self):
# Try to ping the remote end
self.ping()
# If we are here it means that ping succeeded so we can assume the connection's configuration is good
self.is_connected = True
# Assign the connection object to our configuration so that upper layers can use it
self.config['conn'] = self
# ################################################################################################################################
def get_conn_config(self) -> 'stranydict':
# All servers in our pool, even if there is only one
servers = []
# TLS is optional
if self.config.is_tls_enabled:
tls_validate = getattr(ssl, self.config.tls_validate)
tls_version = getattr(ssl, 'PROTOCOL_{}'.format(self.config.tls_version))
tls_config = {
'local_private_key_file': self.config.tls_private_key_file or None,
'local_certificate_file': self.config.tls_cert_file or None,
'validate': tls_validate or None,
'version': tls_version,
'ca_certs_file': self.config.tls_ca_certs_file or None,
'ciphers': self.config.tls_ciphers,
}
tls = Tls(**tls_config)
else:
tls = None
for server_info in self.config.server_list: # type: str
# Configuration for each server
server_config = {
'host': server_info,
'use_ssl': self.config.is_tls_enabled,
'get_info': self.config.get_info,
'connect_timeout': self.config.connect_timeout,
'mode': self.config.ip_mode,
'tls': tls
} # type: stranydict
# Create a server object and append it to the list given to the pool later on
servers.append(Server(**server_config))
# Configuration for the server pool
pool_config = {
'servers': servers,
'pool_strategy': self.config.pool_ha_strategy,
'active': self.config.pool_max_cycles,
'exhaust': self.config.pool_exhaust_timeout
}
# Create our server pool
pool = ServerPool(**pool_config)
# If secret is None, meaning that we do not have it at all yet,
# e.g. because we have just created this connection, we still need to
# provide some secret as it is required by the underlying library.
if self.config.secret is None:
secret = 'zato.auto.ldap.{}'.format(uuid4().hex)
else:
secret = self.config.secret
# Connection configuration
conn_config = {
'server': pool,
'user': self.config.username,
'password': secret,
'auto_bind': self.config.auto_bind,
'auto_range': self.config.use_auto_range,
'client_strategy': SYNC,
'check_names': self.config.should_check_names,
'collect_usage': self.config.is_stats_enabled,
'read_only': self.config.is_read_only,
'pool_name': self.config.pool_name or encode(self.config.name),
'pool_size': 1,
'pool_lifetime': self.config.pool_lifetime,
'return_empty_attributes': self.config.should_return_empty_attrs,
'pool_keepalive': self.config.pool_keep_alive,
'raise_exceptions': True,
}
if self.config.sasl_mechanism:
conn_config['sasl_mechanism'] = self.config.sasl_mechanism
conn_config['sasl_credentials'] = self.config.sasl_credentials
return conn_config
# ################################################################################################################################
def connect(self, conn_config:'strdictnone'=None) -> 'LDAPConnection':
# Obtain connection configuration ..
conn_config = conn_config or self.get_conn_config()
# .. create the connection objet
conn = LDAPConnection(**conn_config)
# .. bind only if we are to be active.
if self.config.is_active:
conn.bind()
# Finally, return the connection object
return conn
# ################################################################################################################################
def zato_delete_impl(self):
pass # Not implemented by LDAP connections
# ################################################################################################################################
def get(self):
return ConnectionWrapper(self)
# ################################################################################################################################
def check_credentials(self, user_data:'any_', secret:'str', raise_on_error:'bool'=True):
# Build a new connection definition dictionary with input credentials ..
conn_config = self.get_conn_config()
conn_config['user'] = user_data
conn_config['password'] = secret
# .. and try to connect to the remote end.
conn = None
try:
conn = self.connect(conn_config)
conn.abandon(0)
except Exception:
if raise_on_error:
raise
else:
return False
else:
return True
finally:
if conn:
conn.unbind()
# ################################################################################################################################
def delete(self):
# Need for API completeness but it does not do anything
pass
# ################################################################################################################################
def ping(self):
logger.info('Pinging LDAP `%s`', self.config.server_list)
with self.get() as conn:
conn.abandon(0)
# ################################################################################################################################
# ################################################################################################################################
class OutconnLDAPWrapper(Wrapper):
""" Wraps a queue of connections to LDAP.
"""
def __init__(self, config:'Bunch', server:'ParallelServer') -> 'None':
config.parent = self
config.auth_url = config.server_list
super(OutconnLDAPWrapper, self).__init__(config, 'outgoing LDAP', server)
# ################################################################################################################################
def ping(self):
with self.client() as client:
client.ping() # type: ignore
# ################################################################################################################################
def add_client(self):
try:
conn = LDAPClient(self.config)
except Exception:
logger.warning('LDAP client could not be built `%s`', format_exc())
else:
_ = self.client.put_client(conn)
# ################################################################################################################################
# ################################################################################################################################
| 10,369
|
Python
|
.py
| 205
| 41.731707
| 130
| 0.432815
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,152
|
client_zato.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/outconn/wsx/client_zato.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
from threading import current_thread
from traceback import format_exc
# gevent
from gevent import sleep
# Zato
from zato.common.wsx_client import Client as ZatoWSXClientImpl, Config as _ZatoWSXConfigImpl
from zato.common.util.api import new_cid
from zato.server.generic.api.outconn.wsx.common import _BaseWSXClient
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, callable_, strdict, strlist
from zato.server.generic.api.outconn.wsx.base import OutconnWSXWrapper
from zato.server.base.parallel import ParallelServer
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class _ZatoWSXClientImpl(ZatoWSXClientImpl):
def __init__(
self,
_outcon_wsx_on_connect_cb:'callable_',
*args: 'any_',
**kwargs: 'any_'
) -> 'None':
self._outcon_wsx_on_connect_cb = _outcon_wsx_on_connect_cb
super(_ZatoWSXClientImpl, self).__init__(*args, **kwargs)
def on_connected(self) -> 'None':
super(_ZatoWSXClientImpl, self).on_connected()
self._outcon_wsx_on_connect_cb()
# ################################################################################################################################
# ################################################################################################################################
class ZatoWSXClient(_BaseWSXClient):
""" A client through which Zato services can be invoked over outgoing WebSocket connections.
"""
def __init__(
self,
server: 'ParallelServer',
config:'strdict',
on_connected_cb:'callable_',
on_message_cb:'callable_',
on_close_cb:'callable_',
) -> 'None':
# Call our base class first
super(ZatoWSXClient, self).__init__(
server,
config,
on_connected_cb,
on_message_cb,
on_close_cb,
)
# Assign for later use
self.server = server
# Initialize the underlying client's configuration
self._zato_client_config = _ZatoWSXConfigImpl()
self._zato_client_config.client_name = 'WSX outconn - {}:{} - {}'.format(
self.config['id'],
current_thread().name,
self.config['name']
)
self._zato_client_config.check_is_active_func = self.check_is_active
self._zato_client_config.on_outconn_stopped_running_func = self.on_outconn_stopped_running
self._zato_client_config.on_outconn_connected_func = self.on_outconn_connected
self._zato_client_config.client_id = 'wsx.out.{}'.format(new_cid(8))
self._zato_client_config.address = self.config['address']
self._zato_client_config.on_request_callback = self.on_message_cb
self._zato_client_config.on_closed_callback = self.on_close_cb
self._zato_client_config.max_connect_attempts = self.config.get('max_connect_attempts', 1234567890)
if self.config.get('username'):
self._zato_client_config.username = self.config['username']
self._zato_client_config.secret = self.config['secret']
self._zato_client = _ZatoWSXClientImpl(self.opened, self.server, self._zato_client_config)
self.invoke = self._zato_client.invoke
self.send = self.invoke
# ################################################################################################################################
def init(self) -> 'None':
pass
# ################################################################################################################################
def connect(self) -> 'None':
# Not needed but added for API completeness.
# The reason it is not needed is that self._zato_client's run_forever will connect itself.
pass
# ################################################################################################################################
def delete(self, reason:'str'='') -> 'None':
self.close()
# ################################################################################################################################
def close(self, reason:'str'='') -> 'None':
self._zato_client.stop(reason)
# ################################################################################################################################
def should_keep_running(self):
return self._zato_client.keep_running
# ################################################################################################################################
def check_is_connected(self):
return self._zato_client.is_connected
# ################################################################################################################################
def check_is_active(self):
parent:'OutconnWSXWrapper' = self.config['parent']
is_active = parent.check_is_active()
return is_active
# ################################################################################################################################
def on_outconn_stopped_running(self):
parent:'OutconnWSXWrapper' = self.config['parent']
parent.on_outconn_stopped_running()
# ################################################################################################################################
def on_outconn_connected(self):
parent:'OutconnWSXWrapper' = self.config['parent']
parent.on_outconn_connected()
# ################################################################################################################################
def get_subscription_list(self) -> 'strlist':
# This is an initial, static list of topics to subscribe to ..
subscription_list = (self.config['subscription_list'] or '').splitlines()
# .. while the rest can be dynamically populated by services.
on_subscribe_service_name = self.config.get('on_subscribe_service_name')
if on_subscribe_service_name:
topic_list = self.config['parent'].on_subscribe_cb(on_subscribe_service_name)
if topic_list:
_ = subscription_list.extend(topic_list)
return subscription_list
# ################################################################################################################################
def subscribe_to_topics(self) -> 'None':
subscription_list = self.get_subscription_list()
if subscription_list:
logger.info('Subscribing WSX outconn `%s` to `%s`', self.config['name'], subscription_list)
for topic_name in subscription_list:
try:
self._zato_client.subscribe(topic_name)
except Exception:
logger.warning('Could not subscribe WSX outconn to `%s`, e:`%s`', self.config['name'], format_exc())
# ################################################################################################################################
def run_forever(self) -> 'None':
try:
# This will establish an outgoing connection to the remote WSX server.
# However, this will be still a connection on the level of TCP / WSX,
# which means that we still need to wait before we can invoke
# the server with our list of subscriptions below.
self._zato_client.run()
# Wait until the client is fully ready
while not self._zato_client.is_authenticated:
# Sleep for a moment ..
sleep(0.1)
# .. and do not loop anymore if we are not to keep running.
if not self.should_keep_running():
return
# If we are here, it means that we are both connected and authenticated,
# so we know that we can try to subscribe to pub/sub topics
# and we will not be rejected based on the fact that we are not logged in.
self.subscribe_to_topics()
except Exception:
logger.warn('Exception in run_forever -> %s', format_exc())
# ################################################################################################################################
# ################################################################################################################################
| 9,307
|
Python
|
.py
| 159
| 50.54717
| 130
| 0.437892
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,153
|
base.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/outconn/wsx/base.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2024, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from json import loads
from logging import getLogger
from traceback import format_exc
# gevent
from gevent import sleep as _gevent_sleep
# Zato
from zato.common.api import DATA_FORMAT, GENERIC as COMMON_GENERIC, WEB_SOCKET, ZATO_NONE
from zato.common.typing_ import cast_
from zato.common.util.config import resolve_name
from zato.server.connection.queue import Wrapper
from zato.server.generic.api.outconn.wsx.client_generic import _NonZatoWSXClient
from zato.server.generic.api.outconn.wsx.client_zato import ZatoWSXClient
from zato.server.generic.api.outconn.wsx.common import OnClosed, OnConnected, OnMessageReceived
# ################################################################################################################################
# ################################################################################################################################
if 0:
from bunch import Bunch
from zato.common.typing_ import any_, callable_, strdict, strlist, strnone
from zato.common.wsx_client import MessageFromServer
from zato.server.base.parallel import ParallelServer
Bunch = Bunch
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
_json = DATA_FORMAT.JSON
msg_closing_superfluous = 'Closing superfluous connection (Zato queue)'
# ################################################################################################################################
# ################################################################################################################################
class WSXClient:
""" A client through which outgoing WebSocket messages can be sent.
"""
is_zato: 'bool'
impl: 'ZatoWSXClient | _NonZatoWSXClient'
send: 'callable_'
invoke: 'callable_'
address_masked:'str'
def __init__(self, server:'ParallelServer', config:'strdict') -> 'None':
self.server = server
self.config = config
self.config['name'] = resolve_name(self.config['name'])
self.is_zato = self.config['is_zato']
self.impl = cast_('any_', None)
self.address_masked = self.config['address_masked']
def _init(self) -> 'None':
# Decide which implementation class to use ..
if self.is_zato:
_impl_class = ZatoWSXClient
else:
_impl_class = _NonZatoWSXClient
# .. this will create an instance ..
self.impl = _impl_class(
self.server,
self.config,
self.on_connected_cb,
self.on_message_cb,
self.on_close_cb
)
# .. this will initialize it ..
_ = self.impl.init()
# .. so now, we can make use of what was possibly initialized in .init above ..
self.send = self.impl.send
self.invoke = self.send
# .. additional features of the Zato client ..
if _impl_class is ZatoWSXClient:
self.invoke_service = self.impl._zato_client.invoke_service # type: ignore
# .. now, the client can connect ..
_ = self.impl.connect()
# .. and run forever.
_ = self.impl.run_forever()
# ################################################################################################################################
def init(self) -> 'None':
# Local variables
config_id = self.config['id']
is_zato = self.config['is_zato']
# Keep trying until our underlying client is connected ..
while not self.is_impl_connected():
# .. stop if the client should not try again, e.g. it has been already deleted ..
if self.impl and (not self.impl.should_keep_running()):
# .. log what we are about to do ..
msg = f'Returning from WSXClient.init -> {self.address_masked} -> '
msg += f'self.impl of `{hex(id(self))}` should not keep running'
logger.info(msg)
# .. do return to our caller.
return
# .. also, delete the connection and stop if we are no longer ..
# .. in the server-wide list of connection pools that should exist ..
if not self.server.wsx_connection_pool_wrapper.has_item(is_zato=is_zato, config_id=config_id, item=self):
# .. log what we are about to do ..
msg = f'Returning from WSXClient.init -> `{self.address_masked}` -> '
msg += 'pool `{hex(id(self))}` already deleted'
logger.info(msg)
# .. delete and close the underlying client ..
self.delete()
# .. do return to our caller.
return
# .. if we are here, it means that we keep trying ..
else:
# .. do try to connect ..
self._init()
# .. sleep for a while after the attempt.
_gevent_sleep(1)
def on_connected_cb(self, conn:'OutconnWSXWrapper') -> 'None':
self.config['parent'].on_connected_cb(conn)
# ################################################################################################################################
def on_message_cb(self, msg:'MessageFromServer') -> 'any_':
return self.config['parent'].on_message_cb(msg)
# ################################################################################################################################
def on_close_cb(self, code:'int', reason:'strnone'=None) -> 'None':
self.config['parent'].on_close_cb(code, reason)
# ################################################################################################################################
def delete(self, reason:'str'='') -> 'None':
if self.impl:
# In the Zato client, the .delete method calls its own .close,
# so we do not need to call it. But in the non-Zato client,
# .delete and .close are distinct and both need to be called.
if isinstance(self.impl, _NonZatoWSXClient):
self.impl.delete()
# This is common to both implementations.
self.impl.close(reason=reason) # type: ignore
# ################################################################################################################################
def is_impl_connected(self) -> 'bool':
return self.impl and self.impl.check_is_connected()
# ################################################################################################################################
def get_name(self) -> 'str':
return f'{self.config["name"]} - {self.config["type_"]} - {hex(id(self))}'
# ################################################################################################################################
# ################################################################################################################################
class OutconnWSXWrapper(Wrapper):
""" Wraps a queue of connections to WebSockets.
"""
has_delete_reasons = True
supports_reconnections = True
on_connect_service_name:'str' = ''
on_message_service_name:'str' = ''
on_close_service_name:'str' = ''
on_subscribe_service_name:'str' = ''
is_on_connect_service_wsx_adapter:'bool' = False
is_on_message_service_wsx_adapter:'bool' = False
is_on_close_service_wsx_adapter:'bool' = False
is_on_subscribe_service_wsx_adapter:'bool' = False
def __init__(self, config:'strdict', server:'ParallelServer') -> 'None':
# .. these used to be optional which is why we need ..
# .. to ensure that we have this information here ..
if not config.get('ping_interval'):
config['ping_interval'] = WEB_SOCKET.DEFAULT.PING_INTERVAL
if not config.get('pings_missed_threshold'):
config['pings_missed_threshold'] = WEB_SOCKET.DEFAULT.PINGS_MISSED_THRESHOLD_OUTGOING
if not config.get('socket_read_timeout'):
config['socket_read_timeout'] = WEB_SOCKET.DEFAULT.Socket_Read_Timeout
# .. note that it is the same value as with the read timeout ..
# .. because the underlying TCP sockets may be shared by multiple threads ..
if not config.get('socket_write_timeout'):
config['socket_write_timeout'] = config['socket_read_timeout']
config['parent'] = self
self._has_json = config.get('data_format') == _json
self._resolve_config_ids(config, server)
super(OutconnWSXWrapper, self).__init__(cast_('Bunch', config), COMMON_GENERIC.ConnName.OutconnWSX, server)
# ################################################################################################################################
def check_is_active(self) -> 'bool':
is_active = self.server.is_active_outconn_wsx(self.config['id'])
return is_active
# ################################################################################################################################
def on_outconn_stopped_running(self) -> 'None':
self.server.on_wsx_outconn_stopped_running(self.config['id'])
# ################################################################################################################################
def on_outconn_connected(self) -> 'None':
self.server.on_wsx_outconn_connected(self.config['id'])
# ################################################################################################################################
def _resolve_config_ids(self, config:'strdict', server:'ParallelServer') -> 'None':
on_connect_service_id = config.get('on_connect_service_id', 0) # type: int
on_message_service_id = config.get('on_message_service_id', 0) # type: int
on_close_service_id = config.get('on_close_service_id', 0) # type: int
on_subscribe_service_id = config.get('on_subscribe_service_id', 0) # type: int
on_connect_service_name = config.get('on_connect_service_name', '') # type: str
on_message_service_name = config.get('on_message_service_name', '') # type: str
on_close_service_name = config.get('on_close_service_name', '') # type: str
on_subscribe_service_name = config.get('on_subscribe_service_name', '') # type: str
#
# Connect service
#
if not on_connect_service_name:
if on_connect_service_id:
on_connect_service_name = server.api_service_store_get_service_name_by_id(on_connect_service_id)
if on_connect_service_name:
self.on_connect_service_name = on_connect_service_name
self.is_on_connect_service_wsx_adapter = server.is_service_wsx_adapter(self.on_connect_service_name)
config['on_connect_service_name'] = self.on_connect_service_name
#
# On message service
#
if not on_message_service_name:
if on_message_service_id:
on_message_service_name = server.api_service_store_get_service_name_by_id(on_message_service_id)
if on_message_service_name:
self.on_message_service_name = on_message_service_name
self.is_on_message_service_wsx_adapter = server.is_service_wsx_adapter(self.on_message_service_name)
config['on_message_service_name'] = self.on_message_service_name
#
# OnClosed service
#
if not on_close_service_name:
if on_close_service_id:
on_close_service_name = server.api_service_store_get_service_name_by_id(on_close_service_id)
if on_close_service_name:
self.on_close_service_name = on_close_service_name
self.is_on_close_service_wsx_adapter = server.is_service_wsx_adapter(self.on_close_service_name)
config['on_close_service_name'] = self.on_close_service_name
#
# Subscribe service
#
if not on_subscribe_service_name:
if on_subscribe_service_id:
on_subscribe_service_name = server.api_service_store_get_service_name_by_id(on_subscribe_service_id)
if on_subscribe_service_name:
self.on_subscribe_service_name = on_subscribe_service_name
self.is_on_subscribe_service_wsx_adapter = server.is_service_wsx_adapter(self.on_subscribe_service_name)
config['on_subscribe_service_name'] = self.on_subscribe_service_name
if config.get('security_def'):
if config['security_def'] != ZATO_NONE:
_ignored_sec_type, sec_def_id = config['security_def'].split('/')
sec_def_id = int(sec_def_id)
sec_def_config = server.api_worker_store_basic_auth_get_by_id(sec_def_id)
if sec_def_config:
config['username'] = sec_def_config['username']
config['secret'] = sec_def_config['password']
# ################################################################################################################################
def on_subscribe_cb(self, service_name:'str') -> 'strlist':
# Our response to produce
out:'strlist' = []
# Invoke the service that will produce a list of topics to subscribe to
response = self.server.invoke(service_name)
# If there was any response, make sure our caller receives it
if response:
out.extend(response)
# Finally, return the result to the caller
return out
# ################################################################################################################################
def on_connected_cb(self, conn:'OutconnWSXWrapper') -> 'None':
if self.on_connect_service_name:
try:
ctx = OnConnected(self.config, conn)
if self.is_on_connect_service_wsx_adapter:
self.server.invoke_wsx_adapter(self.on_connect_service_name, ctx)
else:
self.server.invoke(self.on_connect_service_name, ctx)
except Exception:
logger.warning('Could not invoke CONNECT service `%s`, e:`%s`', self.on_connect_service_name, format_exc())
# ################################################################################################################################
def on_message_cb(self, msg:'bytes | MessageFromServer') -> 'None':
if self.on_message_service_name:
try:
if self._has_json and isinstance(msg, bytes):
msg = msg.decode('utf8') # type: ignore
msg = loads(msg) # type: ignore
ctx = OnMessageReceived(cast_('strdict | MessageFromServer', msg), self.config, self)
if self.is_on_message_service_wsx_adapter:
response = self.server.invoke_wsx_adapter(self.on_message_service_name, ctx)
return response
else:
response = self.server.invoke(self.on_message_service_name, ctx)
return response
except Exception:
logger.warning('Could not invoke MESSAGE service `%s`, e:`%s`', self.on_message_service_name, format_exc())
# ################################################################################################################################
def _should_handle_close_cb(self, _ignored_code:'int', reason:'strnone') -> 'bool':
if reason not in (ZATO_NONE, msg_closing_superfluous):
if not self.delete_requested:
return True
# Return False by default
return False
# ################################################################################################################################
def on_close_cb(self, code:'int', reason:'strnone'=None) -> 'None':
# We need to special-case the situation when it is us who deleted the outgoing connection.
reason_is_not_delete = not reason in {COMMON_GENERIC.DeleteReasonBytes, COMMON_GENERIC.InitialReason}
# Ignore events we generated ourselves, e.g. when someone edits a connection in web-admin
# this will result in deleting and rerecreating a connection which implicitly calls this callback.
if self._should_handle_close_cb(code, reason):
# If reason is something else than our deleting the connection, we can log this message
# to indicate that it must have been the remote server that did it.
if reason_is_not_delete:
logger.info('Remote server closed connection to WebSocket `%s`, c:`%s`, r:`%s`',
self.config['name'], code, reason)
if self.on_close_service_name:
try:
ctx = OnClosed(code, reason, self.config, self)
if self.is_on_close_service_wsx_adapter:
self.server.invoke_wsx_adapter(self.on_close_service_name, ctx)
else:
self.server.invoke(self.on_close_service_name, ctx)
except Exception:
logger.warning('Could not invoke CLOSE service `%s`, e:`%s`', self.on_close_service_name, format_exc())
has_auto_reconnect = self.config.get('has_auto_reconnect', True)
if has_auto_reconnect:
try:
# Reconnect only if it was not us who deleted the connection ..
if reason_is_not_delete:
# .. log what we are about to do ..
logger.info('WebSocket `%s` will reconnect to `%s` (hac:%d)',
self.config['name'], self.config['address'], has_auto_reconnect)
# .. and do reconnect now.
self.server.api_worker_store_reconnect_generic(self.config['id'])
except Exception:
logger.warning('Could not reconnect WebSocket `%s` to `%s`, e:`%s`',
self.config['name'], self.config['address'], format_exc())
else:
# Do not handle it but log information so as not to overlook the event
logger.info('WSX `%s` (%s) ignoring close event code:`%s` reason:`%s`',
self.config['name'], self.config['address'], code, reason)
# ################################################################################################################################
def send(self, data:'any_') -> 'None':
# If we are being invoked while the queue is still building, we need to wait until it becomes available ..
while self.client.is_building_conn_queue:
_gevent_sleep(1)
# .. now, we can invoke the remote web socket.
with self.client() as client:
client.send(data) # type: ignore
invoke = send
# ################################################################################################################################
def add_client(self) -> 'None':
# Local variables
config_id = self.config['id']
is_zato = self.config['is_zato']
# Obtain a lock whose type will differ depending on whether it is a connection to Zato or not ..
_lock = self.server.wsx_connection_pool_wrapper.get_update_lock(is_zato=is_zato)
# .. do make use of the lock ..
with _lock(config_id):
try:
# First, make sure there are no previous connection pools for this ID ..
self.server.wsx_connection_pool_wrapper.delete_all(config_id=config_id, is_zato=is_zato)
# .. now, initialize the client ..
conn = WSXClient(self.server, self.config)
# .. append it for potential later use ..
self.conn_in_progress_list.append(conn)
# .. add it to the wrapper for potential later use ..
self.server.wsx_connection_pool_wrapper.add_item(config_id=config_id, is_zato=is_zato, item=conn)
# .. try to initialize the connection ..
conn.init()
# .. if we are not connected at this point, we need to delete all the reference to the pool ..
if not conn.is_impl_connected():
self.delete()
self.client.decr_in_progress_count()
return
except Exception:
logger.warning('WSX client `%s` could not be built `%s`', self.config['name'], format_exc())
else:
try:
if not self.client.put_client(conn):
self.delete_queue_connections(msg_closing_superfluous)
except Exception:
logger.warning('WSX error `%s`', format_exc())
finally:
self.client.decr_in_progress_count()
# ################################################################################################################################
# ################################################################################################################################
| 21,866
|
Python
|
.py
| 362
| 49.558011
| 130
| 0.498362
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,154
|
client_generic.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/outconn/wsx/client_generic.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
from traceback import format_exc
# gevent
from gevent import sleep
# Zato
from zato.common.api import ZATO_NONE
from zato.common.typing_ import cast_
from zato.common.util.api import spawn_greenlet
from zato.common.util.config import replace_query_string_items
from zato.server.generic.api.outconn.wsx.common import _BaseWSXClient
# Zato - Ext - ws4py
from zato.server.ext.ws4py.client.threadedclient import WebSocketClient
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, callable_, stranydict
from zato.server.base.parallel import ParallelServer
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class _NonZatoWSXClientImpl(WebSocketClient, _BaseWSXClient):
def __init__(
self,
server:'ParallelServer',
config:'stranydict',
on_connected_cb:'callable_',
on_message_cb:'callable_',
on_close_cb:'callable_'
) -> 'None':
_BaseWSXClient.__init__(self, server, config, on_connected_cb, on_message_cb, on_close_cb)
WebSocketClient.__init__(
self,
server=server,
url=config['address'],
heartbeat_freq=config['ping_interval'],
socket_read_timeout=config['socket_read_timeout'],
socket_write_timeout=config['socket_write_timeout'],
)
# ################################################################################################################################
def close(self, code:'int'=1000, reason:'str'=ZATO_NONE) -> 'None':
# It is needed to set this custom reason code because when it is us who closes the connection the 'closed' event
# (i.e. on_close_cb) gets invoked and we need to know not to reconnect automatically in such a case.
super(_NonZatoWSXClientImpl, self).close(code, reason)
# ################################################################################################################################
# ################################################################################################################################
class _NonZatoWSXClient:
send:'callable_' # type: ignore
invoke:'callable_'
log_address:'str'
_non_zato_client:'_NonZatoWSXClientImpl'
def __init__(
self,
server: 'ParallelServer',
config:'stranydict',
on_connected_cb:'callable_',
on_message_cb:'callable_',
on_close_cb:'callable_',
*args:'any_',
**kwargs:'any_'
) -> 'None':
self.config = config
self.on_connected_cb = on_connected_cb
self.on_message_cb = on_message_cb
self.on_close_cb = on_close_cb
self.init_args = args
self.init_kwargs = kwargs
self.server = server
self.keep_running = True
self.connection_attempts_so_far = 0
# This is different than that the underlying implementation's .is_connected flag
# because this here indicates that we completed a handshake and, for instance,
# the remote end has not returned any 40x response, whereas .is_connected
# only indicates if a TCP-level connection exists.
self.has_established_connection = False
# This will be overwritten in self._init in a new thread
# but we need it set to None so that self.init can check
# if the client object has been already created.
self._non_zato_client = cast_('_NonZatoWSXClientImpl', None)
# ################################################################################################################################
def _init(self) -> 'any_':
# This is the actual client, starting in a new thread ..
self._non_zato_client = _NonZatoWSXClientImpl(
self.server,
self.config,
self.on_connected_cb,
self.on_message_cb,
self.on_close_cb,
*self.init_args,
**self.init_kwargs,
)
# .. build it here as we may want to update it dynamically ..
self.address_masked = replace_query_string_items(self.server, self.config['address'])
# .. map implementation methods to our own.
self.invoke = self._non_zato_client.send
self.send = self.invoke # type: ignore
# ################################################################################################################################
def init(self) -> 'any_':
# This will start a WSX connection in a new thread ..
_ = spawn_greenlet(self._init)
# .. which is why we wait here until the object has been created.
while not self._non_zato_client:
sleep(0.1)
# ################################################################################################################################
def send(self, *args:'any_', **kwargs:'any_') -> 'any_':
""" This method is going to be overwritten in self._init but we need it here because our caller expects it sooner.
"""
raise NotImplementedError()
# ################################################################################################################################
def connect(self) -> 'any_':
if not self.should_keep_running():
return
self.connection_attempts_so_far += 1
try:
self._non_zato_client.connect(close_on_handshake_error=False)
except Exception:
logger.warn('WSX could not connect to `%s` -> id:%s -> `%s (#%s)',
self.address_masked,
hex(id(self._non_zato_client)),
format_exc(),
self.connection_attempts_so_far,
)
else:
self.has_established_connection = True
# ################################################################################################################################
def close(self, reason:'str') -> 'any_':
if self._non_zato_client:
self._non_zato_client.close(reason=reason)
# ################################################################################################################################
def delete(self) -> 'None':
self.keep_running = False
# ################################################################################################################################
def should_keep_running(self) -> 'bool':
return self.keep_running
# ################################################################################################################################
def check_is_connected(self) -> 'bool':
if self._non_zato_client:
is_connected = not self._non_zato_client.terminated
return is_connected and self.has_established_connection
else:
return False
# ################################################################################################################################
def run_forever(self) -> 'any_':
# Added for API completeness
pass
# ################################################################################################################################
# ################################################################################################################################
# For Flake8 - _NonZatoWSXClient
_ = _NonZatoWSXClient
# ################################################################################################################################
# ################################################################################################################################
| 8,539
|
Python
|
.py
| 158
| 46.759494
| 130
| 0.415226
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,155
|
common.py
|
zatosource_zato/code/zato-server/src/zato/server/generic/api/outconn/wsx/common.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2023, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from logging import getLogger
# Zato
from zato.common.api import WEB_SOCKET
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.typing_ import any_, callable_, strdict, strnone
from zato.common.wsx_client import MessageFromServer
from zato.server.base.parallel import ParallelServer
from zato.server.generic.api.outconn.wsx.base import OutconnWSXWrapper
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class WSXCtx:
""" Details of a message received from a WebSocket outgoing connection.
"""
type = None
invoke_service:'callable_'
def __init__(self, config:'strdict', conn:'OutconnWSXWrapper') -> 'None':
self.config = config
self.conn = conn
# ################################################################################################################################
# ################################################################################################################################
class Connected(WSXCtx):
type = WEB_SOCKET.OUT_MSG_TYPE.CONNECT
def invoke_service(self, server:'ParallelServer', service_name:'str') -> 'None':
instance, _ = server.service_store.new_instance_by_name(service_name)
instance.on_connected(self) # type: ignore
OnConnected = Connected
# ################################################################################################################################
# ################################################################################################################################
class OnMessage(WSXCtx):
type = WEB_SOCKET.OUT_MSG_TYPE.MESSAGE
def __init__(self, data:'strdict | MessageFromServer', *args:'any_', **kwargs:'any_') -> 'None':
self.data = data
super(OnMessage, self).__init__(*args, **kwargs)
def invoke_service(self, server:'ParallelServer', service_name:'str') -> 'None':
instance, _ = server.service_store.new_instance_by_name(service_name)
instance.on_message_received(self) # type: ignore
OnMessageReceived = OnMessage
# ################################################################################################################################
# ################################################################################################################################
class Close(WSXCtx):
type = WEB_SOCKET.OUT_MSG_TYPE.CLOSE
def __init__(self, code:'int', reason:'strnone'=None, *args:'any_', **kwargs:'any_') -> 'None':
self.code = code
self.reason = reason
super(Close, self).__init__(*args, **kwargs)
def invoke_service(self, server:'ParallelServer', service_name:'str') -> 'None':
instance, _ = server.service_store.new_instance_by_name(service_name)
instance.on_closed(self) # type: ignore
OnClosed = Close
# ################################################################################################################################
# ################################################################################################################################
class _BaseWSXClient:
def __init__(
self,
server: 'ParallelServer',
config:'strdict',
on_connected_cb:'callable_',
on_message_cb:'callable_',
on_close_cb:'callable_',
) -> 'None':
self.config = config
self.on_connected_cb = on_connected_cb
self.on_message_cb = on_message_cb
self.on_close_cb = on_close_cb
# ################################################################################################################################
def opened(self) -> 'None':
self.on_connected_cb(self)
# ################################################################################################################################
def received_message(self, msg:'MessageFromServer') -> 'None':
self.on_message_cb(msg.data)
# ################################################################################################################################
def closed(self, code:'int', reason:'strnone'=None) -> 'None':
self.on_close_cb(code, reason)
# ################################################################################################################################
# ################################################################################################################################
# For flake8
_BaseWSXClient = _BaseWSXClient # type: ignore
# ################################################################################################################################
# ################################################################################################################################
| 5,634
|
Python
|
.py
| 90
| 57.877778
| 130
| 0.337148
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,156
|
outbox.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/outbox.py
|
# This module is a fork of Outbox from https://github.com/nhoad/outbox/
"""
Copyright (c) 2012-2014, Nathan Hoad
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
"""
File: outbox.py
Author: Nathan Hoad
Description: Simple wrapper around smtplib for sending an email.
"""
# flake8: noqa
import smtplib
import socket, sys
from email.header import Header
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formatdate
from zato.common.py23_.past.builtins import basestring
PY2 = sys.version_info[0] == 2
if PY2:
string_type = basestring
iteritems = lambda d: d.iteritems()
else:
string_type = str
iteritems = lambda d: d.items()
# ################################################################################################################################
# ################################################################################################################################
class Email:
def __init__(self, recipients, subject, body=None, html_body=None, charset='utf8', fields=None, rfc2231=True):
"""
Object representation of an email. Contains a recipient, subject,
conditionally a body or HTML body.
Arguments:
recipients - list of strings of the email addresses of the
recipients. May also be a string containing a single
email address.
subject - Subject of the email.
body - Plain-text body.
html_body - Rich-text body.
charset - charset to use for encoding the `body` and `html_body`
attributes.
fields - any additional headers you want to add to the email message.
"""
iter(recipients)
if isinstance(recipients, string_type):
recipients = [recipients]
if not recipients:
raise ValueError("At least one recipient must be specified!")
for r in recipients:
if not isinstance(r, string_type):
raise TypeError("Recipient not a string: %s" % r)
if body is None and html_body is None:
raise ValueError("No body set")
self.recipients = recipients
self.subject = subject
self.body = body
self.html_body = html_body
self.charset = charset
self.fields = fields or {}
self.rfc2231 = rfc2231
def as_mime(self, attachments=()):
bodies = []
if self.body:
bodies.append(MIMEText(self.body, 'plain', self.charset))
if self.html_body:
bodies.append(MIMEText(self.html_body, 'html', self.charset))
with_alternative = len(bodies) == 2
if with_alternative or attachments:
if with_alternative:
txt = MIMEMultipart('alternative')
if attachments:
msg = MIMEMultipart('mixed')
msg.attach(txt)
else:
msg = txt
else:
msg = txt = MIMEMultipart('mixed')
for body in bodies:
txt.attach(body)
else:
msg = bodies[0]
msg['To'] = ', '.join(self.recipients)
msg['Date'] = formatdate(localtime=True)
msg['Subject'] = self.subject
for key, value in iteritems(self.fields):
msg[key] = value
for f in attachments:
if not isinstance(f, Attachment):
raise TypeError("attachment must be of type Attachment")
add_attachment(msg, f, self.rfc2231)
return msg
# ################################################################################################################################
# ################################################################################################################################
class Attachment:
""" Attachment for an email.
"""
def __init__(self, name, fileobj):
self.name = name
self.raw = fileobj.read()
if not isinstance(self.raw, bytes):
self.raw = self.raw.encode()
def read(self):
return self.raw
# ################################################################################################################################
# ################################################################################################################################
class Outbox:
""" Thin wrapper around the SMTP and SMTP_SSL classes from the smtplib module.
"""
def __init__(self, username, password, server, port, mode='TLS', debug=False, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
if mode not in ('SSL', 'TLS', None):
raise ValueError("Mode must be one of TLS, SSL, or None")
server = server.decode('utf8') if isinstance(server, bytes) else server
self.username = username
self.password = password
self.connection_details = (server, port, mode, debug, timeout)
self._conn = None
def __enter__(self):
self.connect()
return self
def __exit__(self, type, value, traceback):
self.disconnect()
def _login(self):
""" Login to the SMTP server specified at instantiation. Returns an authenticated SMTP instance.
"""
server, port, mode, debug, timeout = self.connection_details
if mode == 'SSL':
smtp_class = smtplib.SMTP_SSL
else:
smtp_class = smtplib.SMTP
smtp = smtp_class(server, port, timeout=timeout)
smtp.set_debuglevel(debug)
if mode == 'TLS':
smtp.starttls()
self.authenticate(smtp)
return smtp
def connect(self):
self._conn = self._login()
def authenticate(self, smtp):
""" Perform login with the given smtplib.SMTP instance.
"""
smtp.login(self.username, self.password)
def disconnect(self):
self._conn.quit()
def send(self, email, attachments=(), from_=None):
""" Send an email. Connect/Disconnect if not already connected.
Arguments:
email: Email instance to send.
attachments: iterable containing Attachment instances
"""
msg = email.as_mime(attachments)
if 'From' not in msg:
msg['From'] = self.sender_address()
cc = msg.get('CC', [])
bcc = msg.get('BCC', [])
if isinstance(cc, basestring):
cc = [elem.strip() for elem in cc.split(',')]
if isinstance(bcc, basestring):
bcc = [elem.strip() for elem in bcc.split(',')]
recipients = email.recipients
recipients.extend(cc)
recipients.extend(bcc)
if self._conn:
self._conn.sendmail(from_ or self.username, recipients,
msg.as_string())
else:
with self:
self._conn.sendmail(from_ or self.username, recipients,
msg.as_string())
def sender_address(self):
""" Return the sender address.
The default implementation is to use the username that is used for
signing in.
If you want pretty names, e.g. <Captain Awesome> foo@example.com,
override this method to do what you want.
"""
return self.username
# ################################################################################################################################
# ################################################################################################################################
class AnonymousOutbox(Outbox):
""" Outbox subclass suitable for SMTP servers that do not (or will not) perform authentication.
"""
def __init__(self, *args, **kwargs):
super(AnonymousOutbox, self).__init__('', '', *args, **kwargs)
def authenticate(self, smtp):
"""Perform no authentication as the server does not require it."""
pass
def add_attachment(message, attachment, rfc2231=True):
""" Attach an attachment to a message as a side effect.
Arguments:
message: MIMEMultipart instance.
attachment: Attachment instance.
"""
data = attachment.read()
part = MIMEBase('application', 'octet-stream')
part.set_payload(data)
part.set_charset('utf-8')
filename = attachment.name if rfc2231 else Header(attachment.name).encode()
part.add_header('Content-Disposition', 'attachment',
filename=filename)
message.attach(part)
# ################################################################################################################################
# ################################################################################################################################
| 10,091
|
Python
|
.py
| 207
| 40.062802
| 755
| 0.554039
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,157
|
pidfile.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/pidfile.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# flake8: noqa
import errno
import os
import tempfile
class Pidfile(object):
"""\
Manage a PID file. If a specific name is provided
it and '"%s.oldpid" % name' will be used. Otherwise
we create a temp file using os.mkstemp.
"""
def __init__(self, fname):
self.fname = fname
self.pid = None
def create(self, pid):
oldpid = self.validate()
if oldpid:
if oldpid == os.getpid():
return
msg = "Already running on PID %s (or pid file '%s' is stale)"
raise RuntimeError(msg % (oldpid, self.fname))
self.pid = pid
# Write pidfile
fdir = os.path.dirname(self.fname)
if fdir and not os.path.isdir(fdir):
raise RuntimeError("%s doesn't exist. Can't create pidfile." % fdir)
fd, fname = tempfile.mkstemp(dir=fdir)
os.write(fd, ("%s\n" % self.pid).encode('utf-8'))
if self.fname:
os.rename(fname, self.fname)
else:
self.fname = fname
os.close(fd)
# set permissions to -rw-r--r--
os.chmod(self.fname, 420)
def rename(self, path):
self.unlink()
self.fname = path
self.create(self.pid)
def unlink(self):
""" delete pidfile"""
try:
with open(self.fname, "r") as f:
pid1 = int(f.read() or 0)
if pid1 == self.pid:
os.unlink(self.fname)
except:
pass
def validate(self):
""" Validate pidfile and make it stale if needed"""
if not self.fname:
return
try:
with open(self.fname, "r") as f:
try:
wpid = int(f.read())
except ValueError:
return
try:
os.kill(wpid, 0)
return wpid
except OSError as e:
if e.args[0] == errno.EPERM:
return wpid
if e.args[0] == errno.ESRCH:
return
raise
except IOError as e:
if e.args[0] == errno.ENOENT:
return
raise
| 3,609
|
Python
|
.py
| 98
| 28.346939
| 80
| 0.608883
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,158
|
_compat.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/_compat.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# flake8: noqa
import sys
from zato.server.ext.zunicorn import six
PY26 = (sys.version_info[:2] == (2, 6))
PY33 = (sys.version_info >= (3, 3))
# For pyflakes
from zato.common.py23_.past.builtins import execfile, unicode
def _check_if_pyc(fname):
"""Return True if the extension is .pyc, False if .py
and None if otherwise"""
from imp import find_module
from os.path import realpath, dirname, basename, splitext
# Normalize the file-path for the find_module()
filepath = realpath(fname)
dirpath = dirname(filepath)
module_name = splitext(basename(filepath))[0]
# Validate and fetch
try:
fileobj, fullpath, (_, _, pytype) = find_module(module_name, [dirpath])
except ImportError:
raise IOError("Cannot find config file. "
"Path maybe incorrect! : {0}".format(filepath))
return pytype, fileobj, fullpath
def _get_codeobj(pyfile):
""" Returns the code object, given a python file """
from imp import PY_COMPILED, PY_SOURCE
result, fileobj, fullpath = _check_if_pyc(pyfile)
# WARNING:
# fp.read() can blowup if the module is extremely large file.
# Lookout for overflow errors.
try:
data = fileobj.read()
finally:
fileobj.close()
# This is a .pyc file. Treat accordingly.
if result is PY_COMPILED:
# .pyc format is as follows:
# 0 - 4 bytes: Magic number, which changes with each create of .pyc file.
# First 2 bytes change with each marshal of .pyc file. Last 2 bytes is "\r\n".
# 4 - 8 bytes: Datetime value, when the .py was last changed.
# 8 - EOF: Marshalled code object data.
# So to get code object, just read the 8th byte onwards till EOF, and
# UN-marshal it.
import marshal
code_obj = marshal.loads(data[8:])
elif result is PY_SOURCE:
# This is a .py file.
code_obj = compile(data, fullpath, 'exec')
else:
# Unsupported extension
raise Exception("Input file is unknown format: {0}".format(fullpath))
# Return code object
return code_obj
if six.PY3:
def execfile_(fname, *args):
if fname.endswith(".pyc"):
code = _get_codeobj(fname)
else:
code = compile(open(fname, 'rb').read(), fname, 'exec')
return six.exec_(code, *args)
def bytes_to_str(b):
if isinstance(b, six.text_type):
return b
return str(b, 'latin1')
import urllib.parse
def unquote_to_wsgi_str(string):
return _unquote_to_bytes(string).decode('utf-8')
_unquote_to_bytes = urllib.parse.unquote_to_bytes
else:
def execfile_(fname, *args):
""" Overriding PY2 execfile() implementation to support .pyc files """
if fname.endswith(".pyc"):
return six.exec_(_get_codeobj(fname), *args)
return execfile(fname, *args)
def bytes_to_str(s):
if isinstance(s, unicode):
return s.encode('utf-8')
return s
import urllib
unquote_to_wsgi_str = urllib.unquote
# The following code adapted from trollius.py33_exceptions
def _wrap_error(exc, mapping, key):
if key not in mapping:
return
new_err_cls = mapping[key]
new_err = new_err_cls(*exc.args)
# raise a new exception with the original traceback
six.reraise(new_err_cls, new_err,
exc.__traceback__ if hasattr(exc, '__traceback__') else sys.exc_info()[2])
if PY33:
import builtins
BlockingIOError = builtins.BlockingIOError
BrokenPipeError = builtins.BrokenPipeError
ChildProcessError = builtins.ChildProcessError
ConnectionRefusedError = builtins.ConnectionRefusedError
ConnectionResetError = builtins.ConnectionResetError
InterruptedError = builtins.InterruptedError
ConnectionAbortedError = builtins.ConnectionAbortedError
PermissionError = builtins.PermissionError
FileNotFoundError = builtins.FileNotFoundError
ProcessLookupError = builtins.ProcessLookupError
def wrap_error(func, *args, **kw):
return func(*args, **kw)
else:
import errno
import select
import socket
class BlockingIOError(OSError):
pass
class BrokenPipeError(OSError):
pass
class ChildProcessError(OSError):
pass
class ConnectionRefusedError(OSError):
pass
class InterruptedError(OSError):
pass
class ConnectionResetError(OSError):
pass
class ConnectionAbortedError(OSError):
pass
class PermissionError(OSError):
pass
class FileNotFoundError(OSError):
pass
class ProcessLookupError(OSError):
pass
_MAP_ERRNO = {
errno.EACCES: PermissionError,
errno.EAGAIN: BlockingIOError,
errno.EALREADY: BlockingIOError,
errno.ECHILD: ChildProcessError,
errno.ECONNABORTED: ConnectionAbortedError,
errno.ECONNREFUSED: ConnectionRefusedError,
errno.ECONNRESET: ConnectionResetError,
errno.EINPROGRESS: BlockingIOError,
errno.EINTR: InterruptedError,
errno.ENOENT: FileNotFoundError,
errno.EPERM: PermissionError,
errno.EPIPE: BrokenPipeError,
errno.ESHUTDOWN: BrokenPipeError,
errno.EWOULDBLOCK: BlockingIOError,
errno.ESRCH: ProcessLookupError,
}
def wrap_error(func, *args, **kw):
"""
Wrap socket.error, IOError, OSError, select.error to raise new specialized
exceptions of Python 3.3 like InterruptedError (PEP 3151).
"""
try:
return func(*args, **kw)
except (socket.error, IOError, OSError) as exc:
if hasattr(exc, 'winerror'):
_wrap_error(exc, _MAP_ERRNO, exc.winerror)
# _MAP_ERRNO does not contain all Windows errors.
# For some errors like "file not found", exc.errno should
# be used (ex: ENOENT).
_wrap_error(exc, _MAP_ERRNO, exc.errno)
raise
except select.error as exc:
if exc.args:
_wrap_error(exc, _MAP_ERRNO, exc.args[0])
raise
if PY26:
from urlparse import (
_parse_cache, MAX_CACHE_SIZE, clear_cache, _splitnetloc, SplitResult,
scheme_chars,
)
def urlsplit(url, scheme='', allow_fragments=True):
"""Parse a URL into 5 components:
<scheme>://<netloc>/<path>?<query>#<fragment>
Return a 5-tuple: (scheme, netloc, path, query, fragment).
Note that we don't break the components up in smaller bits
(e.g. netloc is a single string) and we don't expand % escapes."""
allow_fragments = bool(allow_fragments)
key = url, scheme, allow_fragments, type(url), type(scheme)
cached = _parse_cache.get(key, None)
if cached:
return cached
if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth
clear_cache()
netloc = query = fragment = ''
i = url.find(':')
if i > 0:
if url[:i] == 'http': # optimize the common case
scheme = url[:i].lower()
url = url[i+1:]
if url[:2] == '//':
netloc, url = _splitnetloc(url, 2)
if (('[' in netloc and ']' not in netloc) or
(']' in netloc and '[' not in netloc)):
raise ValueError("Invalid IPv6 URL")
if allow_fragments and '#' in url:
url, fragment = url.split('#', 1)
if '?' in url:
url, query = url.split('?', 1)
v = SplitResult(scheme, netloc, url, query, fragment)
_parse_cache[key] = v
return v
for c in url[:i]:
if c not in scheme_chars:
break
else:
# make sure "url" is not actually a port number (in which case
# "scheme" is really part of the path)
rest = url[i+1:]
if not rest or any(c not in '0123456789' for c in rest):
# not a port number
scheme, url = url[:i].lower(), rest
if url[:2] == '//':
netloc, url = _splitnetloc(url, 2)
if (('[' in netloc and ']' not in netloc) or
(']' in netloc and '[' not in netloc)):
raise ValueError("Invalid IPv6 URL")
if allow_fragments and '#' in url:
url, fragment = url.split('#', 1)
if '?' in url:
url, query = url.split('?', 1)
v = SplitResult(scheme, netloc, url, query, fragment)
_parse_cache[key] = v
return v
else:
from zato.server.ext.zunicorn.six.moves.urllib.parse import urlsplit
# For pyflakes
urlsplit = urlsplit
import inspect
if hasattr(inspect, 'signature'):
positionals = (
inspect.Parameter.POSITIONAL_ONLY,
inspect.Parameter.POSITIONAL_OR_KEYWORD,
)
def get_arity(f):
sig = inspect.signature(f)
arity = 0
for param in sig.parameters.values():
if param.kind in positionals:
arity += 1
return arity
else:
def get_arity(f):
return len(inspect.getargspec(f)[0])
try:
import html
def html_escape(s):
return html.escape(s)
except ImportError:
import cgi
def html_escape(s):
return cgi.escape(s, quote=True)
| 10,877
|
Python
|
.py
| 277
| 31.259928
| 99
| 0.632438
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,159
|
errors.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/errors.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# We don't need to call super() in __init__ methods of our
# BaseException and Exception classes because we also define
# our own __str__ methods so there is no need to pass 'message'
# to the base class to get a meaningful output from 'str(exc)'.
# pylint: disable=super-init-not-called
# we inherit from BaseException here to make sure to not be caught
# at application level
class HaltServer(BaseException):
def __init__(self, reason, exit_status=1):
self.reason = reason
self.exit_status = exit_status
def __str__(self):
return "<HaltServer %r %d>" % (self.reason, self.exit_status)
class ConfigError(Exception):
""" Exception raised on config error """
class AppImportError(Exception):
""" Exception raised when loading an application """
| 2,155
|
Python
|
.py
| 45
| 45.666667
| 69
| 0.768496
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,160
|
arbiter.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/arbiter.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# flake8: noqa
import errno
import os
import random
import select
import signal
import sys
import time
import traceback
# Zato
from zato.common.api import OS_Env
from zato.common.util.platform_ import is_posix
from zato.common.version import get_version
from zato.server.ext.zunicorn import SERVER_SOFTWARE, sock, systemd, util
from zato.server.ext.zunicorn.errors import HaltServer, AppImportError
from zato.server.ext.zunicorn.pidfile import Pidfile
from zato.server.ext.zunicorn.util import is_forking
version = get_version()
class Arbiter:
"""
Arbiter maintain the workers processes alive. It launches or
kills them if needed. It also manages application reloading
via SIGHUP/USR2.
"""
zato_deployment_key: 'str'
# A flag indicating if a worker failed to
# to boot. If a worker process exist with
# this error code, the arbiter will terminate.
WORKER_BOOT_ERROR = 3
# A flag indicating if an application failed to be loaded
APP_LOAD_ERROR = 4
START_CTX = {}
LISTENERS = []
WORKERS = {}
PIPE = []
SIG_QUEUE = []
if is_posix:
SIGNALS = [getattr(signal, "SIG%s" % x) for x in "HUP QUIT INT TERM TTIN TTOU USR1 USR2 WINCH".split()]
else:
SIGNALS = []
SIG_NAMES = dict(
(getattr(signal, name), name[3:].lower()) for name in dir(signal)
if name[:3] == "SIG" and name[3] != "_"
)
def __init__(self, app):
os.environ["SERVER_SOFTWARE"] = SERVER_SOFTWARE
self._num_workers = None
self._last_logged_active_worker_count = None
self.log = None
self.setup(app)
self.pidfile = None
self.systemd = False
self.worker_age = 0
self.reexec_pid = 0
self.master_pid = 0
self.master_name = "Main"
cwd = util.getcwd()
args = sys.argv[:]
args.insert(0, sys.executable)
# init start context
self.START_CTX = {
"args": args,
"cwd": cwd,
0: sys.executable
}
def _get_num_workers(self):
return self._num_workers
def _set_num_workers(self, value):
old_value = self._num_workers
self._num_workers = value
self.cfg.nworkers_changed(self, value, old_value)
num_workers = property(_get_num_workers, _set_num_workers)
def setup(self, app):
self.app = app
self.cfg = app.cfg
if self.log is None:
self.log = self.cfg.logger_class(app.cfg)
# reopen files
if 'GUNICORN_FD' in os.environ:
self.log.reopen_files()
self.worker_class = self.cfg.worker_class
self.address = self.cfg.address
self.num_workers = self.cfg.workers
self.timeout = self.cfg.timeout
self.proc_name = self.cfg.proc_name
self.log.debug('Current configuration:\n{0}'.format(
'\n'.join(
' {0}: {1}'.format(config, value.value)
for config, value
in sorted(self.cfg.settings.items(),
key=lambda setting: setting[1]))))
# set enviroment' variables
if self.cfg.env:
for k, v in self.cfg.env.items():
os.environ[k] = v
if self.cfg.preload_app:
self.app.wsgi()
def start(self):
"""\
Initialize the arbiter. Start listening and set pidfile if needed.
"""
self.log.info('Starting %s', version)
if 'GUNICORN_PID' in os.environ:
self.master_pid = int(os.environ.get('GUNICORN_PID'))
self.proc_name = self.proc_name + ".2"
self.master_name = "Main.2"
self.pid = os.getpid()
if self.cfg.pidfile is not None:
pidname = self.cfg.pidfile
if self.master_pid != 0:
pidname += ".2"
self.pidfile = Pidfile(pidname)
self.pidfile.create(self.pid)
self.cfg.on_starting(self)
if is_posix:
self.init_signals()
if not self.LISTENERS:
fds = None
listen_fds = systemd.listen_fds()
if listen_fds:
self.systemd = True
fds = range(systemd.SD_LISTEN_FDS_START,
systemd.SD_LISTEN_FDS_START + listen_fds)
elif self.master_pid:
fds = []
for fd in os.environ.pop('GUNICORN_FD').split(','):
fds.append(int(fd))
self.LISTENERS = sock.create_sockets(self.cfg, self.log, fds)
listeners_str = ",".join([str(l) for l in self.LISTENERS])
self.log.debug("Arbiter booted")
self.log.info("Listening at: %s (%s)", listeners_str, self.pid)
# check worker class requirements
if hasattr(self.worker_class, "check_config"):
self.worker_class.check_config(self.cfg, self.log)
self.cfg.when_ready(self)
def init_signals(self):
"""\
Initialize master signal handling. Most of the signals
are queued. Child signals only wake up the master.
"""
# close old PIPE
for p in self.PIPE:
os.close(p)
# initialize the pipe
self.PIPE = pair = os.pipe()
for p in pair:
util.set_non_blocking(p)
util.close_on_exec(p)
self.log.close_on_exec()
# initialize all signals
for s in self.SIGNALS:
signal.signal(s, self.signal)
signal.signal(signal.SIGCHLD, self.handle_chld)
def signal(self, sig, frame):
if len(self.SIG_QUEUE) < 5:
self.SIG_QUEUE.append(sig)
self.wakeup()
def run(self):
"Main master loop."
self.start()
util._setproctitle("master [%s]" % self.proc_name)
try:
self.manage_workers()
while True:
self.maybe_promote_master()
sig = self.SIG_QUEUE.pop(0) if self.SIG_QUEUE else None
if sig is None:
self.sleep()
self.murder_workers()
self.manage_workers()
continue
if sig not in self.SIG_NAMES:
self.log.info("Ignoring unknown signal: %s", sig)
continue
signame = self.SIG_NAMES.get(sig)
handler = getattr(self, "handle_%s" % signame, None)
if not handler:
self.log.error("Unhandled signal: %s", signame)
continue
self.log.info("Handling signal: %s", signame)
handler()
self.wakeup()
except StopIteration:
self.halt()
except KeyboardInterrupt:
self.halt()
except HaltServer as inst:
self.halt(reason=inst.reason, exit_status=inst.exit_status)
except SystemExit:
raise
except Exception:
self.log.info("Unhandled exception in main loop", exc_info=True)
self.stop(False)
if self.pidfile is not None:
self.pidfile.unlink()
sys.exit(-1)
def handle_chld(self, sig, frame):
"SIGCHLD handling"
self.reap_workers()
self.wakeup()
def handle_hup(self):
"""\
HUP handling.
- Reload configuration
- Start the new worker processes with a new configuration
- Gracefully shutdown the old worker processes
"""
self.log.info("Hang up: %s", self.master_name)
self.reload()
def handle_term(self):
"SIGTERM handling"
raise StopIteration
def handle_int(self):
"SIGINT handling"
self.stop(False)
raise StopIteration
def handle_quit(self):
"SIGQUIT handling"
self.stop(False)
raise StopIteration
def handle_ttin(self):
"""\
SIGTTIN handling.
Increases the number of workers by one.
"""
self.num_workers += 1
self.manage_workers()
def handle_ttou(self):
"""\
SIGTTOU handling.
Decreases the number of workers by one.
"""
if self.num_workers <= 1:
return
self.num_workers -= 1
self.manage_workers()
def handle_usr1(self):
"""\
SIGUSR1 handling.
Kill all workers by sending them a SIGUSR1
"""
self.log.reopen_files()
self.kill_workers(signal.SIGUSR1)
def handle_usr2(self):
"""\
SIGUSR2 handling.
Creates a new master/worker set as a slave of the current
master without affecting old workers. Use this to do live
deployment with the ability to backout a change.
"""
self.reexec()
def handle_winch(self):
"""SIGWINCH handling"""
if self.cfg.daemon:
self.log.info("graceful stop of workers")
self.num_workers = 0
self.kill_workers(signal.SIGTERM)
else:
self.log.debug("SIGWINCH ignored. Not daemonized")
def maybe_promote_master(self):
if self.master_pid == 0:
return
if self.master_pid != os.getppid():
self.log.info("Main has been promoted.")
# reset master infos
self.master_name = "Main"
self.master_pid = 0
self.proc_name = self.cfg.proc_name
del os.environ['GUNICORN_PID']
# rename the pidfile
if self.pidfile is not None:
self.pidfile.rename(self.cfg.pidfile)
# reset proctitle
util._setproctitle("master [%s]" % self.proc_name)
def wakeup(self):
"""\
Wake up the arbiter by writing to the PIPE
"""
try:
os.write(self.PIPE[1], b'.')
except IOError as e:
if e.errno not in [errno.EAGAIN, errno.EINTR]:
raise
def halt(self, reason=None, exit_status=0):
""" halt arbiter """
self.stop()
self.log.info("Shutting down: %s", self.master_name)
if reason is not None:
self.log.info("Reason: %s", reason)
if self.pidfile is not None:
self.pidfile.unlink()
self.cfg.on_exit(self)
sys.exit(exit_status)
def sleep(self):
"""\
Sleep until PIPE is readable or we timeout.
A readable PIPE means a signal occurred.
"""
try:
ready = select.select([self.PIPE[0]], [], [], 1.0)
if not ready[0]:
return
while os.read(self.PIPE[0], 1):
pass
except (select.error, OSError) as e:
# TODO: select.error is a subclass of OSError since Python 3.3.
error_number = getattr(e, 'errno', e.args[0])
if error_number not in [errno.EAGAIN, errno.EINTR]:
raise
except KeyboardInterrupt:
sys.exit()
def stop(self, graceful=True):
"""\
Stop workers
:attr graceful: boolean, If True (the default) workers will be
killed gracefully (ie. trying to wait for the current connection)
"""
unlink = self.reexec_pid == self.master_pid == 0 and not self.systemd
sock.close_sockets(self.LISTENERS, unlink)
self.LISTENERS = []
sig = signal.SIGTERM
if is_posix:
if not graceful:
sig = signal.SIGQUIT
limit = time.time() + self.cfg.graceful_timeout
# instruct the workers to exit
self.kill_workers(sig)
# wait until the graceful timeout
while self.WORKERS and time.time() < limit:
time.sleep(0.1)
if is_posix:
self.kill_workers(signal.SIGKILL)
else:
self.kill_workers(signal.SIGTERM)
def reexec(self):
"""\
Relaunch the master and workers.
"""
if self.reexec_pid != 0:
self.log.warning("USR2 signal ignored. Child exists.")
return
if self.master_pid != 0:
self.log.warning("USR2 signal ignored. Parent exists.")
return
master_pid = os.getpid()
self.reexec_pid = os.fork()
if self.reexec_pid != 0:
return
self.cfg.pre_exec(self)
environ = self.cfg.env_orig.copy()
environ['GUNICORN_PID'] = str(master_pid)
if self.systemd:
environ['LISTEN_PID'] = str(os.getpid())
environ['LISTEN_FDS'] = str(len(self.LISTENERS))
else:
environ['GUNICORN_FD'] = ','.join(
str(l.fileno()) for l in self.LISTENERS)
os.chdir(self.START_CTX['cwd'])
# exec the process using the original environment
os.execvpe(self.START_CTX[0], self.START_CTX['args'], environ)
def reload(self):
old_address = self.cfg.address
# reset old environment
for k in self.cfg.env:
if k in self.cfg.env_orig:
# reset the key to the value it had before
# we launched gunicorn
os.environ[k] = self.cfg.env_orig[k]
else:
# delete the value set by gunicorn
try:
del os.environ[k]
except KeyError:
pass
# reload conf
self.app.reload()
self.setup(self.app)
# reopen log files
self.log.reopen_files()
# do we need to change listener ?
if old_address != self.cfg.address:
# close all listeners
for l in self.LISTENERS:
l.close()
# init new listeners
self.LISTENERS = sock.create_sockets(self.cfg, self.log)
listeners_str = ",".join([str(l) for l in self.LISTENERS])
self.log.info("Listening at: %s", listeners_str)
# do some actions on reload
self.cfg.on_reload(self)
# unlink pidfile
if self.pidfile is not None:
self.pidfile.unlink()
# create new pidfile
if self.cfg.pidfile is not None:
self.pidfile = Pidfile(self.cfg.pidfile)
self.pidfile.create(self.pid)
# set new proc_name
util._setproctitle("main [%s]" % self.proc_name)
# spawn new workers
for idx, _ in enumerate(range(self.cfg.workers)):
os.environ['ZATO_SERVER_WORKER_IDX'] = str(idx)
self.spawn_worker()
# manage workers
self.manage_workers()
def murder_workers(self):
"""\
Kill unused/idle workers
"""
if not self.timeout:
return
workers = list(self.WORKERS.items())
for (pid, worker) in workers:
try:
if time.time() - worker.tmp.last_update() <= self.timeout:
continue
except (OSError, ValueError):
continue
if not worker.aborted:
self.log.critical("WORKER TIMEOUT (pid:%s)", pid)
worker.aborted = True
self.kill_worker(pid, signal.SIGABRT)
else:
self.kill_worker(pid, signal.SIGKILL)
def reap_workers(self):
"""\
Reap workers to avoid zombie processes
"""
try:
while True:
wpid, status = os.waitpid(-1, os.WNOHANG)
if not wpid:
break
if self.reexec_pid == wpid:
self.reexec_pid = 0
else:
# A worker was terminated. If the termination reason was
# that it could not boot, we'll shut it down to avoid
# infinite start/stop cycles.
exitcode = status >> 8
if exitcode == self.WORKER_BOOT_ERROR:
sys.exit(self.WORKER_BOOT_ERROR)
if exitcode == self.APP_LOAD_ERROR:
sys.exit(self.APP_LOAD_ERROR)
worker = self.WORKERS.pop(wpid, None)
if not worker:
continue
worker.tmp.close()
self.cfg.child_exit(self, worker)
except OSError as e:
if e.errno != errno.ECHILD:
raise
def manage_workers(self):
"""\
Maintain the number of workers by spawning or killing
as required.
"""
if len(self.WORKERS.keys()) < self.num_workers:
self.spawn_workers()
workers = self.WORKERS.items()
workers = sorted(workers, key=lambda w: w[1].age)
while len(workers) > self.num_workers:
(pid, _) = workers.pop(0)
self.kill_worker(pid, signal.SIGTERM)
active_worker_count = len(workers)
if self._last_logged_active_worker_count != active_worker_count:
self._last_logged_active_worker_count = active_worker_count
self.log.debug("{0} workers".format(active_worker_count),
extra={"metric": "gunicorn.workers",
"value": active_worker_count,
"mtype": "gauge"})
def spawn_worker(self):
self.worker_age += 1
worker = self.worker_class(self.worker_age, self.pid, self.LISTENERS, self.app, self.timeout / 2.0, self.cfg, self.log)
self.cfg.pre_fork(self, worker)
# Fork only if we can fork on this system
# and if memory profiling is not enabled.
needs_fork = is_forking and (not os.environ.get(OS_Env.Zato_Enable_Memory_Profiler))
if needs_fork:
pid = os.fork()
if pid != 0:
worker.pid = pid
self.WORKERS[pid] = worker
return pid
# Do not inherit the temporary files of other workers
for sibling in self.WORKERS.values():
sibling.tmp.close()
# Process Child
worker.pid = os.getpid()
# .. no forking on this system
else:
self.WORKERS[worker.pid] = worker
try:
util._setproctitle("zato [%s]" % self.proc_name)
self.log.info("Booting process with pid: %s", worker.pid)
self.cfg.post_fork(self, worker)
worker.init_process()
sys.exit(0)
except SystemExit:
raise
except AppImportError as e:
self.log.debug("Exception while loading the application",
exc_info=True)
print("%s" % e, file=sys.stderr)
sys.stderr.flush()
sys.exit(self.APP_LOAD_ERROR)
except Exception as e:
self.log.exception("Exception in worker process -> `%s`", e.args)
if not worker.booted:
sys.exit(self.WORKER_BOOT_ERROR)
sys.exit(-1)
finally:
# We go here only if we are forking because otherwise
# we would run this piece of code immediately during startup.
if is_forking:
self.log.info("Worker exiting (pid: %s)", worker.pid)
try:
worker.tmp.close()
self.cfg.worker_exit(self, worker)
except Exception as e:
self.log.warning("Exception during worker exit -> %s", e.args[0])
def spawn_workers(self):
"""\
Spawn new workers as needed.
This is where a worker process leaves the main loop
of the master process.
"""
noun = 'process' if self.num_workers == 1 else 'processes'
msg = f'Booting {self.num_workers} {noun}'
self.log.info(msg)
for idx, _ in enumerate(range(self.num_workers - len(self.WORKERS.keys()))):
os.environ['ZATO_SERVER_WORKER_IDX'] = str(idx)
self.spawn_worker()
time.sleep(0.1 * random.random())
def kill_workers(self, sig):
"""\
Kill all workers with the signal `sig`
:attr sig: `signal.SIG*` value
"""
worker_pids = list(self.WORKERS.keys())
for pid in worker_pids:
self.kill_worker(pid, sig)
def kill_worker(self, pid, sig):
"""\
Kill a worker
:attr pid: int, worker pid
:attr sig: `signal.SIG*` value
"""
worker = self.WORKERS.get(pid)
self.cfg.before_pid_kill(self, worker)
try:
os.kill(pid, sig)
except OSError as e:
if e.errno == errno.ESRCH:
try:
worker = self.WORKERS.pop(pid)
worker.tmp.close()
self.cfg.worker_exit(self, worker)
return
except (KeyError, OSError):
return
raise
| 22,412
|
Python
|
.py
| 591
| 27.142132
| 127
| 0.564568
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,161
|
util.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/util.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# flake8: noqa
import email.utils
import io
import os
import pkg_resources
import random
import socket
import sys
import textwrap
import time
import traceback
import inspect
import errno
import warnings
import logging
import re
from platform import system as platform_system
from time import gmtime
from zato.server.ext.zunicorn import _compat
from zato.server.ext.zunicorn.errors import AppImportError
from zato.server.ext.zunicorn.six import text_type
from zato.server.ext.zunicorn.workers import SUPPORTED_WORKERS
REDIRECT_TO = getattr(os, 'devnull', '/dev/null')
# Forking to child processes is used only on Linux
is_linux = 'linux' in platform_system().lower()
# Forking to child processes is used only on Linux
is_forking = is_linux
days = ('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun')
months = (None, 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')
# Server and Date aren't technically hop-by-hop
# headers, but they are in the purview of the
# origin server which the WSGI spec says we should
# act like. So we drop them and add our own.
#
# In the future, concatenation server header values
# might be better, but nothing else does it and
# dropping them is easier.
hop_headers = set("""
connection keep-alive proxy-authenticate proxy-authorization
te trailers transfer-encoding upgrade
server date
""".split())
try:
from setproctitle import setproctitle
def _setproctitle(title):
setproctitle("gunicorn: %s" % title)
except ImportError:
def _setproctitle(title):
return
try:
from importlib import import_module
except ImportError:
def _resolve_name(name, package, level):
"""Return the absolute name of the module to be imported."""
if not hasattr(package, 'rindex'):
raise ValueError("'package' not set to a string")
dot = len(package)
for _ in range(level, 1, -1):
try:
dot = package.rindex('.', 0, dot)
except ValueError:
msg = "attempted relative import beyond top-level package"
raise ValueError(msg)
return "%s.%s" % (package[:dot], name)
def import_module(name, package=None):
"""Import a module.
The 'package' argument is required when performing a relative import. It
specifies the package to use as the anchor point from which to resolve the
relative import to an absolute import.
"""
if name.startswith('.'):
if not package:
raise TypeError("relative imports require the 'package' argument")
level = 0
for character in name:
if character != '.':
break
level += 1
name = _resolve_name(name[level:], package, level)
__import__(name)
return sys.modules[name]
def load_class(uri, default="zato.server.ext.zunicorn.workers.sync.SyncWorker", section="zato.server.ext.zunicorn.workers"):
if inspect.isclass(uri):
return uri
if uri.startswith("egg:"):
# uses entry points
entry_str = uri.split("egg:")[1]
try:
dist, name = entry_str.rsplit("#", 1)
except ValueError:
dist = entry_str
name = default
try:
return pkg_resources.load_entry_point(dist, section, name)
except:
exc = traceback.format_exc()
msg = "class uri %r invalid or not found: \n\n[%s]"
raise RuntimeError(msg % (uri, exc))
else:
components = uri.split('.')
if len(components) == 1:
while True:
if uri.startswith("#"):
uri = uri[1:]
if uri in SUPPORTED_WORKERS:
components = SUPPORTED_WORKERS[uri].split(".")
break
try:
return pkg_resources.load_entry_point("gunicorn",
section, uri)
except:
exc = traceback.format_exc()
msg = "class uri %r invalid or not found: \n\n[%s]"
raise RuntimeError(msg % (uri, exc))
klass = components.pop(-1)
try:
mod = import_module('.'.join(components))
except:
exc = traceback.format_exc()
msg = "class uri %r invalid or not found: \n\n[%s]"
raise RuntimeError(msg % (uri, exc))
return getattr(mod, klass)
def get_username(uid):
""" get the username for a user id"""
# stdlib
import pwd
return pwd.getpwuid(uid).pw_name
def set_owner_process(uid, gid, initgroups=False):
""" set user and group of workers processes """
if gid:
if uid:
try:
username = get_username(uid)
except KeyError:
initgroups = False
# versions of python < 2.6.2 don't manage unsigned int for
# groups like on osx or fedora
gid = abs(gid) & 0x7FFFFFFF
if initgroups:
os.initgroups(username, gid)
elif gid != os.getgid():
os.setgid(gid)
if uid:
os.setuid(uid)
def chown(path, uid, gid):
gid = abs(gid) & 0x7FFFFFFF # see note above.
os.chown(path, uid, gid)
if sys.platform.startswith("win"):
def _waitfor(func, pathname, waitall=False):
# Peform the operation
func(pathname)
# Now setup the wait loop
if waitall:
dirname = pathname
else:
dirname, name = os.path.split(pathname)
dirname = dirname or '.'
# Check for `pathname` to be removed from the filesystem.
# The exponential backoff of the timeout amounts to a total
# of ~1 second after which the deletion is probably an error
# anyway.
# Testing on a i7@4.3GHz shows that usually only 1 iteration is
# required when contention occurs.
timeout = 0.001
while timeout < 1.0:
# Note we are only testing for the existence of the file(s) in
# the contents of the directory regardless of any security or
# access rights. If we have made it this far, we have sufficient
# permissions to do that much using Python's equivalent of the
# Windows API FindFirstFile.
# Other Windows APIs can fail or give incorrect results when
# dealing with files that are pending deletion.
L = os.listdir(dirname)
if not L if waitall else name in L:
return
# Increase the timeout and try again
time.sleep(timeout)
timeout *= 2
warnings.warn('tests may fail, delete still pending for ' + pathname,
RuntimeWarning, stacklevel=4)
def _unlink(filename):
_waitfor(os.unlink, filename)
else:
_unlink = os.unlink
def unlink(filename):
try:
_unlink(filename)
except OSError as error:
# The filename need not exist.
if error.errno not in (errno.ENOENT, errno.ENOTDIR):
raise
def is_ipv6(addr):
try:
socket.inet_pton(socket.AF_INET6, addr)
except socket.error: # not a valid address
return False
except ValueError: # ipv6 not supported on this platform
return False
return True
def parse_address(netloc, default_port=8000):
if re.match(r'unix:(//)?', netloc):
return re.split(r'unix:(//)?', netloc)[-1]
if netloc.startswith("tcp://"):
netloc = netloc.split("tcp://")[1]
# get host
if '[' in netloc and ']' in netloc:
host = netloc.split(']')[0][1:].lower()
elif ':' in netloc:
host = netloc.split(':')[0].lower()
elif netloc == "":
host = "0.0.0.0"
else:
host = netloc.lower()
#get port
netloc = netloc.split(']')[-1]
if ":" in netloc:
port = netloc.split(':', 1)[1]
if not port.isdigit():
raise RuntimeError("%r is not a valid port number." % port)
port = int(port)
else:
port = default_port
return (host, port)
def close_on_exec(fd):
# stdlib
import fcntl
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
flags |= fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
def set_non_blocking(fd):
# stdlib
import fcntl
flags = fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def close(sock):
try:
sock.close()
except socket.error:
pass
try:
from os import closerange
except ImportError:
def closerange(fd_low, fd_high):
# Iterate through and close all file descriptors.
for fd in range(fd_low, fd_high):
try:
os.close(fd)
except OSError: # ERROR, fd wasn't open to begin with (ignored)
pass
def write_chunk(sock, data):
if isinstance(data, text_type):
data = data.encode('utf-8')
chunk_size = "%X\r\n" % len(data)
chunk = b"".join([chunk_size.encode('utf-8'), data, b"\r\n"])
sock.sendall(chunk)
def write(sock, data, chunked=False):
if chunked:
return write_chunk(sock, data)
sock.sendall(data)
def write_nonblock(sock, data, chunked=False):
timeout = sock.gettimeout()
if timeout != 0.0:
try:
sock.setblocking(0)
return write(sock, data, chunked)
finally:
sock.setblocking(1)
else:
return write(sock, data, chunked)
def write_error(sock, status_int, reason, mesg):
html = textwrap.dedent("""\
<html>
<head>
<title>%(reason)s</title>
</head>
<body>
<h1><p>%(reason)s</p></h1>
%(mesg)s
</body>
</html>
""") % {"reason": reason, "mesg": _compat.html_escape(mesg)}
http = textwrap.dedent("""\
HTTP/1.1 %s %s\r
Connection: close\r
Content-Type: text/html\r
Content-Length: %d\r
\r
%s""") % (str(status_int), reason, len(html), html)
write_nonblock(sock, http.encode('latin1'))
def import_app(module):
parts = module.split(":", 1)
if len(parts) == 1:
module, obj = module, "application"
else:
module, obj = parts[0], parts[1]
try:
__import__(module)
except ImportError:
if module.endswith(".py") and os.path.exists(module):
msg = "Failed to find application, did you mean '%s:%s'?"
raise ImportError(msg % (module.rsplit(".", 1)[0], obj))
else:
raise
mod = sys.modules[module]
is_debug = logging.root.level == logging.DEBUG
try:
app = eval(obj, vars(mod))
except NameError:
if is_debug:
traceback.print_exception(*sys.exc_info())
raise AppImportError("Failed to find application object %r in %r" % (obj, module))
if app is None:
raise AppImportError("Failed to find application object: %r" % obj)
if not callable(app):
raise AppImportError("Application object must be callable.")
return app
def getcwd():
# get current path, try to use PWD env first
try:
a = os.stat(os.environ['PWD'])
b = os.stat(os.getcwd())
if a.st_ino == b.st_ino and a.st_dev == b.st_dev:
cwd = os.environ['PWD']
else:
cwd = os.getcwd()
except:
cwd = os.getcwd()
return cwd
def http_date(_gmtime=gmtime, _days=days, _months=months):
""" Return the current date and time formatted for a message header.
"""
_time_tuple = _gmtime()
return '%s, %02d %s %02d:%02d:%02d GMT' % (
_days[_time_tuple.tm_wday],
_time_tuple.tm_mday,
_months[_time_tuple.tm_mon],
_time_tuple.tm_hour,
_time_tuple.tm_min,
_time_tuple.tm_sec
)
def is_hoppish(header):
return header.lower().strip() in hop_headers
def daemonize(enable_stdio_inheritance=False):
"""\
Standard daemonization of a process.
http://www.svbug.com/documentation/comp.unix.programmer-FAQ/faq_2.html#SEC16
"""
if 'GUNICORN_FD' not in os.environ:
if os.fork():
os._exit(0)
os.setsid()
if os.fork():
os._exit(0)
os.umask(0o22)
# In both the following any file descriptors above stdin
# stdout and stderr are left untouched. The inheritance
# option simply allows one to have output go to a file
# specified by way of shell redirection when not wanting
# to use --error-log option.
if not enable_stdio_inheritance:
# Remap all of stdin, stdout and stderr on to
# /dev/null. The expectation is that users have
# specified the --error-log option.
closerange(0, 3)
fd_null = os.open(REDIRECT_TO, os.O_RDWR)
if fd_null != 0:
os.dup2(fd_null, 0)
os.dup2(fd_null, 1)
os.dup2(fd_null, 2)
else:
fd_null = os.open(REDIRECT_TO, os.O_RDWR)
# Always redirect stdin to /dev/null as we would
# never expect to need to read interactive input.
if fd_null != 0:
os.close(0)
os.dup2(fd_null, 0)
# If stdout and stderr are still connected to
# their original file descriptors we check to see
# if they are associated with terminal devices.
# When they are we map them to /dev/null so that
# are still detached from any controlling terminal
# properly. If not we preserve them as they are.
#
# If stdin and stdout were not hooked up to the
# original file descriptors, then all bets are
# off and all we can really do is leave them as
# they were.
#
# This will allow 'gunicorn ... > output.log 2>&1'
# to work with stdout/stderr going to the file
# as expected.
#
# Note that if using --error-log option, the log
# file specified through shell redirection will
# only be used up until the log file specified
# by the option takes over. As it replaces stdout
# and stderr at the file descriptor level, then
# anything using stdout or stderr, including having
# cached a reference to them, will still work.
def redirect(stream, fd_expect):
try:
fd = stream.fileno()
if fd == fd_expect and stream.isatty():
os.close(fd)
os.dup2(fd_null, fd)
except AttributeError:
pass
redirect(sys.stdout, 1)
redirect(sys.stderr, 2)
def seed():
try:
random.seed(os.urandom(64))
except NotImplementedError:
random.seed('%s.%s' % (time.time(), os.getpid()))
def check_is_writeable(path):
try:
f = open(path, 'a')
except IOError as e:
raise RuntimeError("Error: '%s' isn't writable [%r]" % (path, e))
f.close()
def to_bytestring(value, encoding="utf8"):
"""Converts a string argument to a byte string"""
if isinstance(value, bytes):
return value
if not isinstance(value, text_type):
raise TypeError('%r is not a string' % value)
return value.encode('utf8')
def has_fileno(obj):
if not hasattr(obj, "fileno"):
return False
# check BytesIO case and maybe others
try:
obj.fileno()
except (AttributeError, IOError, io.UnsupportedOperation):
return False
return True
def warn(msg):
print("!!!", file=sys.stderr)
lines = msg.splitlines()
for i, line in enumerate(lines):
if i == 0:
line = "WARNING: %s" % line
print("!!! %s" % line, file=sys.stderr)
print("!!!\n", file=sys.stderr)
sys.stderr.flush()
def make_fail_app(msg):
msg = to_bytestring(msg)
def app(environ, start_response):
start_response("500 Internal Server Error", [
("Content-Type", "text/plain"),
("Content-Length", str(len(msg)))
])
return [msg]
return app
def split_request_uri(uri):
if uri.startswith("//"):
# When the path starts with //, urlsplit considers it as a
# relative uri while the RFC says we should consider it as abs_path
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
# We use temporary dot prefix to workaround this behaviour
parts = _compat.urlsplit("." + uri)
return parts._replace(path=parts.path[1:])
return _compat.urlsplit(uri)
| 18,201
|
Python
|
.py
| 492
| 29.006098
| 124
| 0.610805
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,162
|
config.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/config.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# flake8: noqa
# Please remember to run "make -C docs html" after update "desc" attributes.
import copy
import inspect
try:
import argparse
except ImportError: # python 2.6
from . import argparse_compat as argparse
import os
import re
import ssl
import sys
import textwrap
import shlex
from zato.common.util.platform_ import is_posix
from zato.server.ext.zunicorn import _compat
from zato.server.ext.zunicorn.errors import ConfigError
from zato.server.ext.zunicorn.reloader import reloader_engines
from zato.server.ext.zunicorn import six
from zato.server.ext.zunicorn import util
KNOWN_SETTINGS = []
PLATFORM = sys.platform
def make_settings(ignore=None):
settings = {}
ignore = ignore or ()
for s in KNOWN_SETTINGS:
setting = s()
if setting.name in ignore:
continue
settings[setting.name] = setting.copy()
return settings
def auto_int(_, x):
# for compatible with octal numbers in python3
if re.match(r'0(\d)', x, re.IGNORECASE):
x = x.replace('0', '0o', 1)
return int(x, 0)
class Config:
def __init__(self, usage=None, prog=None):
self.settings = make_settings()
self.usage = usage
self.prog = prog or os.path.basename(sys.argv[0])
self.env_orig = os.environ.copy()
from zato.server.ext.zunicorn import SERVER_SOFTWARE
self.server_software = SERVER_SOFTWARE
def __getattr__(self, name):
if name not in self.settings:
raise AttributeError("No configuration setting for: %s" % name)
return self.settings[name].get()
def __setattr__(self, name, value):
if name != "settings" and name in self.settings:
raise AttributeError("Invalid access!")
super(Config, self).__setattr__(name, value)
def set(self, name, value):
if name not in self.settings:
raise AttributeError("No configuration setting for: %s" % name)
self.settings[name].set(value)
def get_cmd_args_from_env(self):
if 'GUNICORN_CMD_ARGS' in self.env_orig:
return shlex.split(self.env_orig['GUNICORN_CMD_ARGS'])
return []
def parser(self):
kwargs = {
"usage": self.usage,
"prog": self.prog
}
parser = argparse.ArgumentParser(**kwargs)
parser.add_argument("-v", "--version",
action="version", default=argparse.SUPPRESS,
version="%(prog)s (version " + 'n/a' + ")\n",
help="show program's version number and exit")
parser.add_argument("args", nargs="*", help=argparse.SUPPRESS)
keys = sorted(self.settings, key=self.settings.__getitem__)
for k in keys:
self.settings[k].add_option(parser)
return parser
@property
def worker_class_str(self):
uri = self.settings['worker_class'].get()
## are we using a threaded worker?
is_sync = uri.endswith('SyncWorker') or uri == 'sync'
if is_sync and self.threads > 1:
return "threads"
return uri
@property
def worker_class(self):
uri = self.settings['worker_class'].get()
## are we using a threaded worker?
is_sync = uri.endswith('SyncWorker') or uri == 'sync'
if is_sync and self.threads > 1:
uri = "gunicorn.workers.gthread.ThreadWorker"
worker_class = util.load_class(uri)
if hasattr(worker_class, "setup"):
worker_class.setup()
return worker_class
@property
def address(self):
s = self.settings['bind'].get()
return [util.parse_address(_compat.bytes_to_str(bind)) for bind in s]
@property
def uid(self):
return self.settings['user'].get()
@property
def gid(self):
return self.settings['group'].get()
@property
def proc_name(self):
pn = self.settings['proc_name'].get()
if pn is not None:
return pn
else:
return self.settings['default_proc_name'].get()
@property
def logger_class(self):
uri = self.settings['logger_class'].get()
if uri == "simple":
# support the default
uri = LoggerClass.default
# if default logger is in use, and statsd is on, automagically switch
# to the statsd logger
if uri == LoggerClass.default:
if 'statsd_host' in self.settings and self.settings['statsd_host'].value is not None:
uri = "zato.server.ext.zunicorn.instrument.statsd.Statsd"
logger_class = util.load_class(
uri,
default="zato.server.ext.zunicorn.glogging.Logger",
section="zato.server.ext.zunicorn.loggers")
if hasattr(logger_class, "install"):
logger_class.install()
return logger_class
@property
def is_ssl(self):
return self.certfile or self.keyfile
@property
def ssl_options(self):
opts = {}
for name, value in self.settings.items():
if value.section == 'SSL':
opts[name] = value.get()
return opts
@property
def env(self):
raw_env = self.settings['raw_env'].get()
env = {}
if not raw_env:
return env
for e in raw_env:
s = _compat.bytes_to_str(e)
try:
k, v = s.split('=', 1)
except ValueError:
raise RuntimeError("environment setting %r invalid" % s)
env[k] = v
return env
@property
def sendfile(self):
if self.settings['sendfile'].get() is not None:
return False
if 'SENDFILE' in os.environ:
sendfile = os.environ['SENDFILE'].lower()
return sendfile in ['y', '1', 'yes', 'true']
return True
@property
def reuse_port(self):
return self.settings['reuse_port'].get()
@property
def paste_global_conf(self):
raw_global_conf = self.settings['raw_paste_global_conf'].get()
if raw_global_conf is None:
return None
global_conf = {}
for e in raw_global_conf:
s = _compat.bytes_to_str(e)
try:
k, v = re.split(r'(?<!\\)=', s, 1)
except ValueError:
raise RuntimeError("environment setting %r invalid" % s)
k = k.replace('\\=', '=')
v = v.replace('\\=', '=')
global_conf[k] = v
return global_conf
class SettingMeta(type):
def __new__(cls, name, bases, attrs):
super_new = super(SettingMeta, cls).__new__
parents = [b for b in bases if isinstance(b, SettingMeta)]
if not parents:
return super_new(cls, name, bases, attrs)
attrs["order"] = len(KNOWN_SETTINGS)
attrs["validator"] = staticmethod(attrs["validator"])
new_class = super_new(cls, name, bases, attrs)
new_class.fmt_desc(attrs.get("desc", ""))
KNOWN_SETTINGS.append(new_class)
return new_class
def fmt_desc(cls, desc):
desc = textwrap.dedent(desc).strip()
setattr(cls, "desc", desc)
setattr(cls, "short", desc.splitlines()[0])
class Setting:
name = None
value = None
section = None
cli = None
validator = None
type = None
meta = None
action = None
default = None
short = None
desc = None
nargs = None
const = None
def __init__(self):
if self.default is not None:
self.set(self.default)
def add_option(self, parser):
if not self.cli:
return
args = tuple(self.cli)
help_txt = "%s [%s]" % (self.short, self.default)
help_txt = help_txt.replace("%", "%%")
kwargs = {
"dest": self.name,
"action": self.action or "store",
"type": self.type or str,
"default": None,
"help": help_txt
}
if self.meta is not None:
kwargs['metavar'] = self.meta
if kwargs["action"] != "store":
kwargs.pop("type")
if self.nargs is not None:
kwargs["nargs"] = self.nargs
if self.const is not None:
kwargs["const"] = self.const
parser.add_argument(*args, **kwargs)
def copy(self):
return copy.copy(self)
def get(self):
return self.value
def set(self, val):
if not six.callable(self.validator):
raise TypeError('Invalid validator: %s' % self.name)
self.value = self.validator(val)
def __lt__(self, other):
return (self.section == other.section and
self.order < other.order)
__cmp__ = __lt__
Setting = SettingMeta('Setting', (Setting,), {})
def validate_bool(val):
if val is None:
return
if isinstance(val, bool):
return val
if not isinstance(val, six.string_types):
raise TypeError("Invalid type for casting: %s" % val)
if val.lower().strip() == "true":
return True
elif val.lower().strip() == "false":
return False
else:
raise ValueError("Invalid boolean: %s" % val)
def validate_dict(val):
if not isinstance(val, dict):
raise TypeError("Value is not a dictionary: %s " % val)
return val
def validate_pos_int(val):
if not isinstance(val, six.integer_types):
val = int(val, 0)
else:
# Booleans are ints!
val = int(val)
if val < 0:
raise ValueError("Value must be positive: %s" % val)
return val
def validate_string(val):
if val is None:
return None
if not isinstance(val, six.string_types):
raise TypeError("Not a string: %s" % val)
return val.strip()
def validate_file_exists(val):
if val is None:
return None
if not os.path.exists(val):
raise ValueError("File %s does not exists." % val)
return val
def validate_list_string(val):
if not val:
return []
# legacy syntax
if isinstance(val, six.string_types):
val = [val]
return [validate_string(v) for v in val]
def validate_list_of_existing_files(val):
return [validate_file_exists(v) for v in validate_list_string(val)]
def validate_string_to_list(val):
val = validate_string(val)
if not val:
return []
return [v.strip() for v in val.split(",") if v]
def validate_class(val):
if inspect.isfunction(val) or inspect.ismethod(val):
val = val()
if inspect.isclass(val):
return val
return validate_string(val)
def validate_callable(arity):
def _validate_callable(val):
if isinstance(val, six.string_types):
try:
mod_name, obj_name = val.rsplit(".", 1)
except ValueError:
raise TypeError("Value '%s' is not import string. "
"Format: module[.submodules...].object" % val)
try:
mod = __import__(mod_name, fromlist=[obj_name])
val = getattr(mod, obj_name)
except ImportError as e:
raise TypeError(str(e))
except AttributeError:
raise TypeError("Can not load '%s' from '%s'"
"" % (obj_name, mod_name))
if not six.callable(val):
raise TypeError("Value is not six.callable: %s" % val)
if arity != -1 and arity != _compat.get_arity(val):
raise TypeError("Value must have an arity of: %s" % arity)
return val
return _validate_callable
def validate_user(val):
# stdlib
import pwd
if val is None:
return os.geteuid()
if isinstance(val, int):
return val
elif val.isdigit():
return int(val)
else:
try:
return pwd.getpwnam(val).pw_uid
except KeyError:
raise ConfigError("No such user: '%s'" % val)
def validate_group(val):
# stdlib
import grp
if val is None:
return os.getegid()
if isinstance(val, int):
return val
elif val.isdigit():
return int(val)
else:
try:
return grp.getgrnam(val).gr_gid
except KeyError:
raise ConfigError("No such group: '%s'" % val)
def validate_post_request(val):
val = validate_callable(-1)(val)
largs = _compat.get_arity(val)
if largs == 4:
return val
elif largs == 3:
return lambda worker, req, env, _r: val(worker, req, env)
elif largs == 2:
return lambda worker, req, _e, _r: val(worker, req)
else:
raise TypeError("Value must have an arity of: 4")
def validate_chdir(val):
# valid if the value is a string
val = validate_string(val)
# transform relative paths
path = os.path.abspath(os.path.normpath(os.path.join(util.getcwd(), val)))
# test if the path exists
if not os.path.exists(path):
raise ConfigError("can't chdir to %r" % val)
return path
def validate_hostport(val):
val = validate_string(val)
if val is None:
return None
elements = val.split(":")
if len(elements) == 2:
return (elements[0], int(elements[1]))
else:
raise TypeError("Value must consist of: hostname:port")
def validate_reload_engine(val):
if val not in reloader_engines:
raise ConfigError("Invalid reload_engine: %r" % val)
return val
def get_default_config_file():
config_path = os.path.join(os.path.abspath(os.getcwd()),
'gunicorn.conf.py')
if os.path.exists(config_path):
return config_path
return None
class ConfigFile(Setting):
name = "config"
section = "Config File"
cli = ["-c", "--config"]
meta = "CONFIG"
validator = validate_string
default = None
desc = """\
The Gunicorn config file.
A string of the form ``PATH``, ``file:PATH``, or ``python:MODULE_NAME``.
Only has an effect when specified on the command line or as part of an
application specific configuration.
.. versionchanged:: 19.4
Loading the config from a Python module requires the ``python:``
prefix.
"""
class Bind(Setting):
name = "bind"
action = "append"
section = "Server Socket"
cli = ["-b", "--bind"]
meta = "ADDRESS"
validator = validate_list_string
if 'PORT' in os.environ:
default = ['0.0.0.0:{0}'.format(os.environ.get('PORT'))]
else:
default = ['127.0.0.1:8000']
desc = """\
The socket to bind.
A string of the form: ``HOST``, ``HOST:PORT``, ``unix:PATH``. An IP is
a valid ``HOST``.
Multiple addresses can be bound. ex.::
$ gunicorn -b 127.0.0.1:8000 -b [::1]:8000 test:app
will bind the `test:app` application on localhost both on ipv6
and ipv4 interfaces.
"""
class Backlog(Setting):
name = "backlog"
section = "Server Socket"
cli = ["--backlog"]
meta = "INT"
validator = validate_pos_int
type = int
default = 2048
desc = """\
The maximum number of pending connections.
This refers to the number of clients that can be waiting to be served.
Exceeding this number results in the client getting an error when
attempting to connect. It should only affect servers under significant
load.
Must be a positive integer. Generally set in the 64-2048 range.
"""
class Workers(Setting):
name = "workers"
section = "Worker Processes"
cli = ["-w", "--workers"]
meta = "INT"
validator = validate_pos_int
type = int
default = int(os.environ.get("WEB_CONCURRENCY", 1))
desc = """\
The number of worker processes for handling requests.
A positive integer generally in the ``2-4 x $(NUM_CORES)`` range.
You'll want to vary this a bit to find the best for your particular
application's work load.
By default, the value of the ``WEB_CONCURRENCY`` environment variable.
If it is not defined, the default is ``1``.
"""
class WorkerClass(Setting):
name = "worker_class"
section = "Worker Processes"
cli = ["-k", "--worker-class"]
meta = "STRING"
validator = validate_class
default = "sync"
desc = """\
The type of workers to use.
The default class (``sync``) should handle most "normal" types of
workloads. You'll want to read :doc:`design` for information on when
you might want to choose one of the other worker classes. Required
libraries may be installed using setuptools' ``extra_require`` feature.
A string referring to one of the following bundled classes:
* ``sync``
* ``eventlet`` - Requires eventlet >= 0.9.7 (or install it via
``pip install gunicorn[eventlet]``)
* ``gevent`` - Requires gevent >= 0.13 (or install it via
``pip install gunicorn[gevent]``)
* ``tornado`` - Requires tornado >= 0.2 (or install it via
``pip install gunicorn[tornado]``)
* ``gthread`` - Python 2 requires the futures package to be installed
(or install it via ``pip install gunicorn[gthread]``)
* ``gaiohttp`` - Deprecated.
Optionally, you can provide your own worker by giving Gunicorn a
Python path to a subclass of ``gunicorn.workers.base.Worker``.
This alternative syntax will load the gevent class:
``gunicorn.workers.ggevent.GeventWorker``.
.. deprecated:: 19.8
The ``gaiohttp`` worker is deprecated. Please use
``aiohttp.worker.GunicornWebWorker`` instead. See
:ref:`asyncio-workers` for more information on how to use it.
"""
class WorkerThreads(Setting):
name = "threads"
section = "Worker Processes"
cli = ["--threads"]
meta = "INT"
validator = validate_pos_int
type = int
default = 1
desc = """\
The number of worker threads for handling requests.
Run each worker with the specified number of threads.
A positive integer generally in the ``2-4 x $(NUM_CORES)`` range.
You'll want to vary this a bit to find the best for your particular
application's work load.
If it is not defined, the default is ``1``.
This setting only affects the Gthread worker type.
.. note::
If you try to use the ``sync`` worker type and set the ``threads``
setting to more than 1, the ``gthread`` worker type will be used
instead.
"""
class WorkerConnections(Setting):
name = "worker_connections"
section = "Worker Processes"
cli = ["--worker-connections"]
meta = "INT"
validator = validate_pos_int
type = int
default = 1000
desc = """\
The maximum number of simultaneous clients.
This setting only affects the Eventlet and Gevent worker types.
"""
class MaxRequests(Setting):
name = "max_requests"
section = "Worker Processes"
cli = ["--max-requests"]
meta = "INT"
validator = validate_pos_int
type = int
default = 0
desc = """\
The maximum number of requests a worker will process before restarting.
Any value greater than zero will limit the number of requests a work
will process before automatically restarting. This is a simple method
to help limit the damage of memory leaks.
If this is set to zero (the default) then the automatic worker
restarts are disabled.
"""
class MaxRequestsJitter(Setting):
name = "max_requests_jitter"
section = "Worker Processes"
cli = ["--max-requests-jitter"]
meta = "INT"
validator = validate_pos_int
type = int
default = 0
desc = """\
The maximum jitter to add to the *max_requests* setting.
The jitter causes the restart per worker to be randomized by
``randint(0, max_requests_jitter)``. This is intended to stagger worker
restarts to avoid all workers restarting at the same time.
.. versionadded:: 19.2
"""
class Timeout(Setting):
name = "timeout"
section = "Worker Processes"
cli = ["-t", "--timeout"]
meta = "INT"
validator = validate_pos_int
type = int
default = 30
desc = """\
Workers silent for more than this many seconds are killed and restarted.
Generally set to thirty seconds. Only set this noticeably higher if
you're sure of the repercussions for sync workers. For the non sync
workers it just means that the worker process is still communicating and
is not tied to the length of time required to handle a single request.
"""
class GracefulTimeout(Setting):
name = "graceful_timeout"
section = "Worker Processes"
cli = ["--graceful-timeout"]
meta = "INT"
validator = validate_pos_int
type = int
default = 30
desc = """\
Timeout for graceful workers restart.
After receiving a restart signal, workers have this much time to finish
serving requests. Workers still alive after the timeout (starting from
the receipt of the restart signal) are force killed.
"""
class Keepalive(Setting):
name = "keepalive"
section = "Worker Processes"
cli = ["--keep-alive"]
meta = "INT"
validator = validate_pos_int
type = int
default = 2
desc = """\
The number of seconds to wait for requests on a Keep-Alive connection.
Generally set in the 1-5 seconds range for servers with direct connection
to the client (e.g. when you don't have separate load balancer). When
Gunicorn is deployed behind a load balancer, it often makes sense to
set this to a higher value.
.. note::
``sync`` worker does not support persistent connections and will
ignore this option.
"""
class LimitRequestLine(Setting):
name = "limit_request_line"
section = "Security"
cli = ["--limit-request-line"]
meta = "INT"
validator = validate_pos_int
type = int
default = 4094
desc = """\
The maximum size of HTTP request line in bytes.
This parameter is used to limit the allowed size of a client's
HTTP request-line. Since the request-line consists of the HTTP
method, URI, and protocol version, this directive places a
restriction on the length of a request-URI allowed for a request
on the server. A server needs this value to be large enough to
hold any of its resource names, including any information that
might be passed in the query part of a GET request. Value is a number
from 0 (unlimited) to 8190.
This parameter can be used to prevent any DDOS attack.
"""
class LimitRequestFields(Setting):
name = "limit_request_fields"
section = "Security"
cli = ["--limit-request-fields"]
meta = "INT"
validator = validate_pos_int
type = int
default = 100
desc = """\
Limit the number of HTTP headers fields in a request.
This parameter is used to limit the number of headers in a request to
prevent DDOS attack. Used with the *limit_request_field_size* it allows
more safety. By default this value is 100 and can't be larger than
32768.
"""
class LimitRequestFieldSize(Setting):
name = "limit_request_field_size"
section = "Security"
cli = ["--limit-request-field_size"]
meta = "INT"
validator = validate_pos_int
type = int
default = 8190
desc = """\
Limit the allowed size of an HTTP request header field.
Value is a positive number or 0. Setting it to 0 will allow unlimited
header field sizes.
.. warning::
Setting this parameter to a very high or unlimited value can open
up for DDOS attacks.
"""
class Reload(Setting):
name = "reload"
section = 'Debugging'
cli = ['--reload']
validator = validate_bool
action = 'store_true'
default = False
desc = '''\
Restart workers when code changes.
This setting is intended for development. It will cause workers to be
restarted whenever application code changes.
The reloader is incompatible with application preloading. When using a
paste configuration be sure that the server block does not import any
application code or the reload will not work as designed.
The default behavior is to attempt inotify with a fallback to file
system polling. Generally, inotify should be preferred if available
because it consumes less system resources.
.. note::
In order to use the inotify reloader, you must have the ``inotify``
package installed.
'''
class ReloadEngine(Setting):
name = "reload_engine"
section = "Debugging"
cli = ["--reload-engine"]
meta = "STRING"
validator = validate_reload_engine
default = "auto"
desc = """\
The implementation that should be used to power :ref:`reload`.
Valid engines are:
* 'auto'
* 'poll'
* 'inotify' (requires inotify)
.. versionadded:: 19.7
"""
class ReloadExtraFiles(Setting):
name = "reload_extra_files"
action = "append"
section = "Debugging"
cli = ["--reload-extra-file"]
meta = "FILES"
validator = validate_list_of_existing_files
default = []
desc = """\
Extends :ref:`reload` option to also watch and reload on additional files
(e.g., templates, configurations, specifications, etc.).
.. versionadded:: 19.8
"""
class Spew(Setting):
name = "spew"
section = "Debugging"
cli = ["--spew"]
validator = validate_bool
action = "store_true"
default = False
desc = """\
Install a trace function that spews every line executed by the server.
This is the nuclear option.
"""
class ConfigCheck(Setting):
name = "check_config"
section = "Debugging"
cli = ["--check-config"]
validator = validate_bool
action = "store_true"
default = False
desc = """\
Check the configuration.
"""
class PreloadApp(Setting):
name = "preload_app"
section = "Server Mechanics"
cli = ["--preload"]
validator = validate_bool
action = "store_true"
default = False
desc = """\
Load application code before the worker processes are forked.
By preloading an application you can save some RAM resources as well as
speed up server boot times. Although, if you defer application loading
to each worker process, you can reload your application code easily by
restarting workers.
"""
class Sendfile(Setting):
name = "sendfile"
section = "Server Mechanics"
cli = ["--no-sendfile"]
validator = validate_bool
action = "store_const"
const = False
desc = """\
Disables the use of ``sendfile()``.
If not set, the value of the ``SENDFILE`` environment variable is used
to enable or disable its usage.
.. versionadded:: 19.2
.. versionchanged:: 19.4
Swapped ``--sendfile`` with ``--no-sendfile`` to actually allow
disabling.
.. versionchanged:: 19.6
added support for the ``SENDFILE`` environment variable
"""
class ReusePort(Setting):
name = "reuse_port"
section = "Server Mechanics"
cli = ["--reuse-port"]
validator = validate_bool
action = "store_true"
default = False
desc = """\
Set the ``SO_REUSEPORT`` flag on the listening socket.
.. versionadded:: 19.8
"""
class Chdir(Setting):
name = "chdir"
section = "Server Mechanics"
cli = ["--chdir"]
validator = validate_chdir
default = util.getcwd()
desc = """\
Chdir to specified directory before apps loading.
"""
class Daemon(Setting):
name = "daemon"
section = "Server Mechanics"
cli = ["-D", "--daemon"]
validator = validate_bool
action = "store_true"
default = False
desc = """\
Daemonize the Gunicorn process.
Detaches the server from the controlling terminal and enters the
background.
"""
class Env(Setting):
name = "raw_env"
action = "append"
section = "Server Mechanics"
cli = ["-e", "--env"]
meta = "ENV"
validator = validate_list_string
default = []
desc = """\
Set environment variable (key=value).
Pass variables to the execution environment. Ex.::
$ gunicorn -b 127.0.0.1:8000 --env FOO=1 test:app
and test for the foo variable environment in your application.
"""
class Pidfile(Setting):
name = "pidfile"
section = "Server Mechanics"
cli = ["-p", "--pid"]
meta = "FILE"
validator = validate_string
default = None
desc = """\
A filename to use for the PID file.
If not set, no PID file will be written.
"""
class WorkerTmpDir(Setting):
name = "worker_tmp_dir"
section = "Server Mechanics"
cli = ["--worker-tmp-dir"]
meta = "DIR"
validator = validate_string
default = None
desc = """\
A directory to use for the worker heartbeat temporary file.
If not set, the default temporary directory will be used.
.. note::
The current heartbeat system involves calling ``os.fchmod`` on
temporary file handlers and may block a worker for arbitrary time
if the directory is on a disk-backed filesystem.
See :ref:`blocking-os-fchmod` for more detailed information
and a solution for avoiding this problem.
"""
class User(Setting):
name = "user"
section = "Server Mechanics"
cli = ["-u", "--user"]
meta = "USER"
if is_posix:
default = os.geteuid()
validator = validate_user
else:
# Under Windows, We do not use this functionality
default = None
validator = None
desc = """\
Switch worker processes to run as this user.
A valid user id (as an integer) or the name of a user that can be
retrieved with a call to ``pwd.getpwnam(value)`` or ``None`` to not
change the worker process user.
"""
class Group(Setting):
name = "group"
section = "Server Mechanics"
cli = ["-g", "--group"]
meta = "GROUP"
if is_posix:
default = os.getegid()
validator = validate_group
else:
# Under Windows, We do not use this functionality
default = None
validator = None
desc = """\
Switch worker process to run as this group.
A valid group id (as an integer) or the name of a user that can be
retrieved with a call to ``pwd.getgrnam(value)`` or ``None`` to not
change the worker processes group.
"""
class Umask(Setting):
name = "umask"
section = "Server Mechanics"
cli = ["-m", "--umask"]
meta = "INT"
validator = validate_pos_int
type = auto_int
default = 0
desc = """\
A bit mask for the file mode on files written by Gunicorn.
Note that this affects unix socket permissions.
A valid value for the ``os.umask(mode)`` call or a string compatible
with ``int(value, 0)`` (``0`` means Python guesses the base, so values
like ``0``, ``0xFF``, ``0022`` are valid for decimal, hex, and octal
representations)
"""
class Initgroups(Setting):
name = "initgroups"
section = "Server Mechanics"
cli = ["--initgroups"]
validator = validate_bool
action = 'store_true'
default = False
desc = """\
If true, set the worker process's group access list with all of the
groups of which the specified username is a member, plus the specified
group id.
.. versionadded:: 19.7
"""
class TmpUploadDir(Setting):
name = "tmp_upload_dir"
section = "Server Mechanics"
meta = "DIR"
validator = validate_string
default = None
desc = """\
Directory to store temporary request data as they are read.
This may disappear in the near future.
This path should be writable by the process permissions set for Gunicorn
workers. If not specified, Gunicorn will choose a system generated
temporary directory.
"""
class SecureSchemeHeader(Setting):
name = "secure_scheme_headers"
section = "Server Mechanics"
validator = validate_dict
default = {
"X-FORWARDED-PROTOCOL": "ssl",
"X-FORWARDED-PROTO": "https",
"X-FORWARDED-SSL": "on"
}
desc = """\
A dictionary containing headers and values that the front-end proxy
uses to indicate HTTPS requests. These tell Gunicorn to set
``wsgi.url_scheme`` to ``https``, so your application can tell that the
request is secure.
The dictionary should map upper-case header names to exact string
values. The value comparisons are case-sensitive, unlike the header
names, so make sure they're exactly what your front-end proxy sends
when handling HTTPS requests.
It is important that your front-end proxy configuration ensures that
the headers defined here can not be passed directly from the client.
"""
class ForwardedAllowIPS(Setting):
name = "forwarded_allow_ips"
section = "Server Mechanics"
cli = ["--forwarded-allow-ips"]
meta = "STRING"
validator = validate_string_to_list
default = os.environ.get("FORWARDED_ALLOW_IPS", "127.0.0.1")
desc = """\
Front-end's IPs from which allowed to handle set secure headers.
(comma separate).
Set to ``*`` to disable checking of Front-end IPs (useful for setups
where you don't know in advance the IP address of Front-end, but
you still trust the environment).
By default, the value of the ``FORWARDED_ALLOW_IPS`` environment
variable. If it is not defined, the default is ``"127.0.0.1"``.
"""
class AccessLog(Setting):
name = "accesslog"
section = "Logging"
cli = ["--access-logfile"]
meta = "FILE"
validator = validate_string
default = None
desc = """\
The Access log file to write to.
``'-'`` means log to stdout.
"""
class DisableRedirectAccessToSyslog(Setting):
name = "disable_redirect_access_to_syslog"
section = "Logging"
cli = ["--disable-redirect-access-to-syslog"]
validator = validate_bool
action = 'store_true'
default = False
desc = """\
Disable redirect access logs to syslog.
.. versionadded:: 19.8
"""
class AccessLogFormat(Setting):
name = "access_log_format"
section = "Logging"
cli = ["--access-logformat"]
meta = "STRING"
validator = validate_string
default = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
desc = """\
The access log format.
=========== ===========
Identifier Description
=========== ===========
h remote address
l ``'-'``
u user name
t date of the request
r status line (e.g. ``GET / HTTP/1.1``)
m request method
U URL path without query string
q query string
H protocol
s status
B response length
b response length or ``'-'`` (CLF format)
f referer
a user agent
T request time in seconds
D request time in microseconds
L request time in decimal seconds
p process ID
{Header}i request header
{Header}o response header
{Variable}e environment variable
=========== ===========
"""
class ErrorLog(Setting):
name = "errorlog"
section = "Logging"
cli = ["--error-logfile", "--log-file"]
meta = "FILE"
validator = validate_string
default = '-'
desc = """\
The Error log file to write to.
Using ``'-'`` for FILE makes gunicorn log to stderr.
.. versionchanged:: 19.2
Log to stderr by default.
"""
class Loglevel(Setting):
name = "loglevel"
section = "Logging"
cli = ["--log-level"]
meta = "LEVEL"
validator = validate_string
default = "info"
desc = """\
The granularity of Error log outputs.
Valid level names are:
* debug
* info
* warning
* error
* critical
"""
class CaptureOutput(Setting):
name = "capture_output"
section = "Logging"
cli = ["--capture-output"]
validator = validate_bool
action = 'store_true'
default = False
desc = """\
Redirect stdout/stderr to specified file in :ref:`errorlog`.
.. versionadded:: 19.6
"""
class LoggerClass(Setting):
name = "logger_class"
section = "Logging"
cli = ["--logger-class"]
meta = "STRING"
validator = validate_class
default = "zato.server.ext.zunicorn.glogging.Logger"
desc = """\
The logger you want to use to log events in Gunicorn.
The default class (``zato.server.ext.zunicorn.glogging.Logger``) handle most of
normal usages in logging. It provides error and access logging.
You can provide your own logger by giving Gunicorn a
Python path to a subclass like ``zato.server.ext.zunicorn.glogging.Logger``.
"""
class LogConfig(Setting):
name = "logconfig"
section = "Logging"
cli = ["--log-config"]
meta = "FILE"
validator = validate_string
default = None
desc = """\
The log config file to use.
Gunicorn uses the standard Python logging module's Configuration
file format.
"""
class LogConfigDict(Setting):
name = "logconfig_dict"
section = "Logging"
cli = ["--log-config-dict"]
validator = validate_dict
default = {}
desc = """\
The log config dictionary to use, using the standard Python
logging module's dictionary configuration format. This option
takes precedence over the :ref:`logconfig` option, which uses the
older file configuration format.
Format: https://docs.python.org/3/library/logging.config.html#logging.config.dictConfig
.. versionadded:: 19.8
"""
class SyslogTo(Setting):
name = "syslog_addr"
section = "Logging"
cli = ["--log-syslog-to"]
meta = "SYSLOG_ADDR"
validator = validate_string
if PLATFORM == "darwin":
default = "unix:///var/run/syslog"
elif PLATFORM in ('freebsd', 'dragonfly', ):
default = "unix:///var/run/log"
elif PLATFORM == "openbsd":
default = "unix:///dev/log"
else:
default = "udp://localhost:514"
desc = """\
Address to send syslog messages.
Address is a string of the form:
* ``unix://PATH#TYPE`` : for unix domain socket. ``TYPE`` can be ``stream``
for the stream driver or ``dgram`` for the dgram driver.
``stream`` is the default.
* ``udp://HOST:PORT`` : for UDP sockets
* ``tcp://HOST:PORT`` : for TCP sockets
"""
class Syslog(Setting):
name = "syslog"
section = "Logging"
cli = ["--log-syslog"]
validator = validate_bool
action = 'store_true'
default = False
desc = """\
Send *Gunicorn* logs to syslog.
.. versionchanged:: 19.8
You can now disable sending access logs by using the
:ref:`disable-redirect-access-to-syslog` setting.
"""
class SyslogPrefix(Setting):
name = "syslog_prefix"
section = "Logging"
cli = ["--log-syslog-prefix"]
meta = "SYSLOG_PREFIX"
validator = validate_string
default = None
desc = """\
Makes Gunicorn use the parameter as program-name in the syslog entries.
All entries will be prefixed by ``gunicorn.<prefix>``. By default the
program name is the name of the process.
"""
class SyslogFacility(Setting):
name = "syslog_facility"
section = "Logging"
cli = ["--log-syslog-facility"]
meta = "SYSLOG_FACILITY"
validator = validate_string
default = "user"
desc = """\
Syslog facility name
"""
class EnableStdioInheritance(Setting):
name = "enable_stdio_inheritance"
section = "Logging"
cli = ["-R", "--enable-stdio-inheritance"]
validator = validate_bool
default = False
action = "store_true"
desc = """\
Enable stdio inheritance.
Enable inheritance for stdio file descriptors in daemon mode.
Note: To disable the Python stdout buffering, you can to set the user
environment variable ``PYTHONUNBUFFERED`` .
"""
# statsD monitoring
class StatsdHost(Setting):
name = "statsd_host"
section = "Logging"
cli = ["--statsd-host"]
meta = "STATSD_ADDR"
default = None
validator = validate_hostport
desc = """\
``host:port`` of the statsd server to log to.
.. versionadded:: 19.1
"""
class StatsdPrefix(Setting):
name = "statsd_prefix"
section = "Logging"
cli = ["--statsd-prefix"]
meta = "STATSD_PREFIX"
default = ""
validator = validate_string
desc = """\
Prefix to use when emitting statsd metrics (a trailing ``.`` is added,
if not provided).
.. versionadded:: 19.2
"""
class Procname(Setting):
name = "proc_name"
section = "Process Naming"
cli = ["-n", "--name"]
meta = "STRING"
validator = validate_string
default = None
desc = """\
A base to use with setproctitle for process naming.
This affects things like ``ps`` and ``top``. If you're going to be
running more than one instance of Gunicorn you'll probably want to set a
name to tell them apart. This requires that you install the setproctitle
module.
If not set, the *default_proc_name* setting will be used.
"""
class DefaultProcName(Setting):
name = "default_proc_name"
section = "Process Naming"
validator = validate_string
default = "gunicorn"
desc = """\
Internal setting that is adjusted for each type of application.
"""
class PythonPath(Setting):
name = "pythonpath"
section = "Server Mechanics"
cli = ["--pythonpath"]
meta = "STRING"
validator = validate_string
default = None
desc = """\
A comma-separated list of directories to add to the Python path.
e.g.
``'/home/djangoprojects/myproject,/home/python/mylibrary'``.
"""
class Paste(Setting):
name = "paste"
section = "Server Mechanics"
cli = ["--paste", "--paster"]
meta = "STRING"
validator = validate_string
default = None
desc = """\
Load a PasteDeploy config file. The argument may contain a ``#``
symbol followed by the name of an app section from the config file,
e.g. ``production.ini#admin``.
At this time, using alternate server blocks is not supported. Use the
command line arguments to control server configuration instead.
"""
class OnStarting(Setting):
name = "on_starting"
section = "Server Hooks"
validator = validate_callable(1)
type = six.callable
def on_starting(server):
pass
default = staticmethod(on_starting)
desc = """\
Called just before the master process is initialized.
The callable needs to accept a single instance variable for the Arbiter.
"""
class OnReload(Setting):
name = "on_reload"
section = "Server Hooks"
validator = validate_callable(1)
type = six.callable
def on_reload(server):
pass
default = staticmethod(on_reload)
desc = """\
Called to recycle workers during a reload via SIGHUP.
The callable needs to accept a single instance variable for the Arbiter.
"""
class WhenReady(Setting):
name = "when_ready"
section = "Server Hooks"
validator = validate_callable(1)
type = six.callable
def when_ready(server):
pass
default = staticmethod(when_ready)
desc = """\
Called just after the server is started.
The callable needs to accept a single instance variable for the Arbiter.
"""
class Prefork(Setting):
name = "pre_fork"
section = "Server Hooks"
validator = validate_callable(2)
type = six.callable
def pre_fork(server, worker):
pass
default = staticmethod(pre_fork)
desc = """\
Called just before a worker is forked.
The callable needs to accept two instance variables for the Arbiter and
new Worker.
"""
class Postfork(Setting):
name = "post_fork"
section = "Server Hooks"
validator = validate_callable(2)
type = six.callable
def post_fork(server, worker):
pass
default = staticmethod(post_fork)
desc = """\
Called just after a worker has been forked.
The callable needs to accept two instance variables for the Arbiter and
new Worker.
"""
class PostWorkerInit(Setting):
name = "post_worker_init"
section = "Server Hooks"
validator = validate_callable(1)
type = six.callable
def post_worker_init(worker):
pass
default = staticmethod(post_worker_init)
desc = """\
Called just after a worker has initialized the application.
The callable needs to accept one instance variable for the initialized
Worker.
"""
class WorkerInt(Setting):
name = "worker_int"
section = "Server Hooks"
validator = validate_callable(1)
type = six.callable
def worker_int(worker):
pass
default = staticmethod(worker_int)
desc = """\
Called just after a worker exited on SIGINT or SIGQUIT.
The callable needs to accept one instance variable for the initialized
Worker.
"""
class WorkerAbort(Setting):
name = "worker_abort"
section = "Server Hooks"
validator = validate_callable(1)
type = six.callable
def worker_abort(worker):
pass
default = staticmethod(worker_abort)
desc = """\
Called when a worker received the SIGABRT signal.
This call generally happens on timeout.
The callable needs to accept one instance variable for the initialized
Worker.
"""
class PreExec(Setting):
name = "pre_exec"
section = "Server Hooks"
validator = validate_callable(1)
type = six.callable
def pre_exec(server):
pass
default = staticmethod(pre_exec)
desc = """\
Called just before a new master process is forked.
The callable needs to accept a single instance variable for the Arbiter.
"""
class PreRequest(Setting):
name = "pre_request"
section = "Server Hooks"
validator = validate_callable(2)
type = six.callable
def pre_request(worker, req):
worker.log.debug("%s %s" % (req.method, req.path))
default = staticmethod(pre_request)
desc = """\
Called just before a worker processes the request.
The callable needs to accept two instance variables for the Worker and
the Request.
"""
class PostRequest(Setting):
name = "post_request"
section = "Server Hooks"
validator = validate_post_request
type = six.callable
def post_request(worker, req, environ, resp):
pass
default = staticmethod(post_request)
desc = """\
Called after a worker processes the request.
The callable needs to accept two instance variables for the Worker and
the Request.
"""
class ChildExit(Setting):
name = "child_exit"
section = "Server Hooks"
validator = validate_callable(2)
type = six.callable
def child_exit(server, worker):
pass
default = staticmethod(child_exit)
desc = """\
Called just after a worker has been exited, in the master process.
The callable needs to accept two instance variables for the Arbiter and
the just-exited Worker.
.. versionadded:: 19.7
"""
class WorkerExit(Setting):
name = "worker_exit"
section = "Server Hooks"
validator = validate_callable(2)
type = six.callable
def worker_exit(server, worker):
pass
default = staticmethod(worker_exit)
desc = """\
Called just after a worker has been exited, in the worker process.
The callable needs to accept two instance variables for the Arbiter and
the just-exited Worker.
"""
class BeforePidKill(Setting):
name = "before_pid_kill"
section = "Server Hooks"
validator = validate_callable(2)
type = six.callable
def before_pid_kill(server, worker):
pass
default = staticmethod(before_pid_kill)
desc = """\
Called just before a worker is to exit.
"""
class NumWorkersChanged(Setting):
name = "nworkers_changed"
section = "Server Hooks"
validator = validate_callable(3)
type = six.callable
def nworkers_changed(server, new_value, old_value):
pass
default = staticmethod(nworkers_changed)
desc = """\
Called just after *num_workers* has been changed.
The callable needs to accept an instance variable of the Arbiter and
two integers of number of workers after and before change.
If the number of workers is set for the first time, *old_value* would
be ``None``.
"""
class OnExit(Setting):
name = "on_exit"
section = "Server Hooks"
validator = validate_callable(1)
def on_exit(server):
pass
default = staticmethod(on_exit)
desc = """\
Called just before exiting Gunicorn.
The callable needs to accept a single instance variable for the Arbiter.
"""
class ProxyProtocol(Setting):
name = "proxy_protocol"
section = "Server Mechanics"
cli = ["--proxy-protocol"]
validator = validate_bool
default = False
action = "store_true"
desc = """\
Enable detect PROXY protocol (PROXY mode).
Allow using HTTP and Proxy together. It may be useful for work with
stunnel as HTTPS frontend and Gunicorn as HTTP server.
PROXY protocol: http://haproxy.1wt.eu/download/1.5/doc/proxy-protocol.txt
Example for stunnel config::
[https]
protocol = proxy
accept = 443
connect = 80
cert = /etc/ssl/certs/stunnel.pem
key = /etc/ssl/certs/stunnel.key
"""
class ProxyAllowFrom(Setting):
name = "proxy_allow_ips"
section = "Server Mechanics"
cli = ["--proxy-allow-from"]
validator = validate_string_to_list
default = "127.0.0.1"
desc = """\
Front-end's IPs from which allowed accept proxy requests (comma separate).
Set to ``*`` to disable checking of Front-end IPs (useful for setups
where you don't know in advance the IP address of Front-end, but
you still trust the environment)
"""
class KeyFile(Setting):
name = "keyfile"
section = "SSL"
cli = ["--keyfile"]
meta = "FILE"
validator = validate_string
default = None
desc = """\
SSL key file
"""
class CertFile(Setting):
name = "certfile"
section = "SSL"
cli = ["--certfile"]
meta = "FILE"
validator = validate_string
default = None
desc = """\
SSL certificate file
"""
class SSLVersion(Setting):
name = "ssl_version"
section = "SSL"
cli = ["--ssl-version"]
validator = validate_pos_int
default = ssl.PROTOCOL_SSLv23
desc = """\
SSL version to use (see stdlib ssl module's)
.. versionchanged:: 19.7
The default value has been changed from ``ssl.PROTOCOL_TLSv1`` to
``ssl.PROTOCOL_SSLv23``.
"""
class CertReqs(Setting):
name = "cert_reqs"
section = "SSL"
cli = ["--cert-reqs"]
validator = validate_pos_int
default = ssl.CERT_NONE
desc = """\
Whether client certificate is required (see stdlib ssl module's)
"""
class CACerts(Setting):
name = "ca_certs"
section = "SSL"
cli = ["--ca-certs"]
meta = "FILE"
validator = validate_string
default = None
desc = """\
CA certificates file
"""
class SuppressRaggedEOFs(Setting):
name = "suppress_ragged_eofs"
section = "SSL"
cli = ["--suppress-ragged-eofs"]
action = "store_true"
default = True
validator = validate_bool
desc = """\
Suppress ragged EOFs (see stdlib ssl module's)
"""
class DoHandshakeOnConnect(Setting):
name = "do_handshake_on_connect"
section = "SSL"
cli = ["--do-handshake-on-connect"]
validator = validate_bool
action = "store_true"
default = False
desc = """\
Whether to perform SSL handshake on socket connect (see stdlib ssl module's)
"""
if sys.version_info >= (2, 7):
class Ciphers(Setting):
name = "ciphers"
section = "SSL"
cli = ["--ciphers"]
validator = validate_string
default = 'TLSv1'
desc = """\
Ciphers to use (see stdlib ssl module's)
"""
class PasteGlobalConf(Setting):
name = "raw_paste_global_conf"
action = "append"
section = "Server Mechanics"
cli = ["--paste-global"]
meta = "CONF"
validator = validate_list_string
default = []
desc = """\
Set a PasteDeploy global config variable in ``key=value`` form.
The option can be specified multiple times.
The variables are passed to the the PasteDeploy entrypoint. Example::
$ gunicorn -b 127.0.0.1:8000 --paste development.ini --paste-global FOO=1 --paste-global BAR=2
.. versionadded:: 19.7
"""
| 55,586
|
Python
|
.py
| 1,590
| 28.065409
| 106
| 0.625735
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,163
|
glogging.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/glogging.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# flake8: noqa
import base64
import binascii
import time
import logging
logging.Logger.manager.emittedNoHandlerWarning = 1
from logging.config import fileConfig
try:
from logging.config import dictConfig
except ImportError:
# python 2.6
dictConfig = None
import os
import socket
import sys
import threading
import traceback
from zato.server.ext.zunicorn import util
from zato.server.ext.zunicorn.six import PY3, string_types
# syslog facility codes
SYSLOG_FACILITIES = {
"auth": 4,
"authpriv": 10,
"cron": 9,
"daemon": 3,
"ftp": 11,
"kern": 0,
"lpr": 6,
"mail": 2,
"news": 7,
"security": 4, # DEPRECATED
"syslog": 5,
"user": 1,
"uucp": 8,
"local0": 16,
"local1": 17,
"local2": 18,
"local3": 19,
"local4": 20,
"local5": 21,
"local6": 22,
"local7": 23
}
CONFIG_DEFAULTS = dict(
version=1,
disable_existing_loggers=False,
loggers={
"root": {"level": "INFO", "handlers": ["console"]},
"zato": {
"level": "INFO",
"handlers": ["error_console"],
"propagate": True,
"qualname": "zato"
},
"gunicorn.access": {
"level": "INFO",
"handlers": ["console"],
"propagate": True,
"qualname": "gunicorn.access"
}
},
handlers={
"console": {
"class": "logging.StreamHandler",
"formatter": "generic",
"stream": "ext://sys.stdout"
},
"error_console": {
"class": "logging.StreamHandler",
"formatter": "generic",
"stream": "ext://sys.stderr"
},
},
formatters={
"generic": {
"format": "%(asctime)s [%(process)d] [%(levelname)s] %(message)s",
"datefmt": "[%Y-%m-%d %H:%M:%S %z]",
"class": "logging.Formatter"
}
}
)
def loggers():
""" get list of all loggers """
root = logging.root
existing = root.manager.loggerDict.keys()
return [logging.getLogger(name) for name in existing]
class SafeAtoms(dict):
def __init__(self, atoms):
dict.__init__(self)
for key, value in atoms.items():
if isinstance(value, string_types):
self[key] = value.replace('"', '\\"')
else:
self[key] = value
def __getitem__(self, k):
if k.startswith("{"):
kl = k.lower()
if kl in self:
return super(SafeAtoms, self).__getitem__(kl)
else:
return "-"
if k in self:
return super(SafeAtoms, self).__getitem__(k)
else:
return '-'
def parse_syslog_address(addr):
# unix domain socket type depends on backend
# SysLogHandler will try both when given None
if addr.startswith("unix://"):
sock_type = None
# set socket type only if explicitly requested
parts = addr.split("#", 1)
if len(parts) == 2:
addr = parts[0]
if parts[1] == "dgram":
sock_type = socket.SOCK_DGRAM
return (sock_type, addr.split("unix://")[1])
if addr.startswith("udp://"):
addr = addr.split("udp://")[1]
socktype = socket.SOCK_DGRAM
elif addr.startswith("tcp://"):
addr = addr.split("tcp://")[1]
socktype = socket.SOCK_STREAM
else:
raise RuntimeError("invalid syslog address")
if '[' in addr and ']' in addr:
host = addr.split(']')[0][1:].lower()
elif ':' in addr:
host = addr.split(':')[0].lower()
elif addr == "":
host = "localhost"
else:
host = addr.lower()
addr = addr.split(']')[-1]
if ":" in addr:
port = addr.split(':', 1)[1]
if not port.isdigit():
raise RuntimeError("%r is not a valid port number." % port)
port = int(port)
else:
port = 514
return (socktype, (host, port))
class Logger:
LOG_LEVELS = {
"critical": logging.CRITICAL,
"error": logging.ERROR,
"warning": logging.WARNING,
"info": logging.INFO,
"debug": logging.DEBUG
}
loglevel = logging.INFO
error_fmt = r"%(asctime)s [%(process)d] [%(levelname)s] %(message)s"
datefmt = r"[%Y-%m-%d %H:%M:%S %z]"
access_fmt = "%(message)s"
syslog_fmt = "[%(process)d] %(message)s"
atoms_wrapper_class = SafeAtoms
def __init__(self, cfg):
self.error_log = logging.getLogger("zato")
self.error_log.propagate = False
self.access_log = logging.getLogger("gunicorn.access")
self.access_log.propagate = False
self.error_handlers = []
self.access_handlers = []
self.logfile = None
self.lock = threading.Lock()
self.cfg = cfg
def setup(self, cfg):
self.loglevel = self.LOG_LEVELS.get(cfg.loglevel.lower(), logging.INFO)
self.error_log.setLevel(self.loglevel)
self.access_log.setLevel(logging.INFO)
# set gunicorn.main handler
if self.cfg.capture_output and cfg.errorlog != "-":
for stream in sys.stdout, sys.stderr:
stream.flush()
self.logfile = open(cfg.errorlog, 'a+')
os.dup2(self.logfile.fileno(), sys.stdout.fileno())
os.dup2(self.logfile.fileno(), sys.stderr.fileno())
self._set_handler(self.error_log, cfg.errorlog,
logging.Formatter(self.error_fmt, self.datefmt))
# set gunicorn.access handler
if cfg.accesslog is not None:
self._set_handler(self.access_log, cfg.accesslog,
fmt=logging.Formatter(self.access_fmt), stream=sys.stdout)
# set syslog handler
if cfg.syslog:
self._set_syslog_handler(
self.error_log, cfg, self.syslog_fmt, "error"
)
if not cfg.disable_redirect_access_to_syslog:
self._set_syslog_handler(
self.access_log, cfg, self.syslog_fmt, "access"
)
if dictConfig is None and cfg.logconfig_dict:
util.warn("Dictionary-based log configuration requires "
"Python 2.7 or above.")
if dictConfig and cfg.logconfig_dict:
config = CONFIG_DEFAULTS.copy()
config.update(cfg.logconfig_dict)
try:
dictConfig(config)
except (
AttributeError,
ImportError,
ValueError,
TypeError
) as exc:
raise RuntimeError(str(exc))
elif cfg.logconfig:
if os.path.exists(cfg.logconfig):
defaults = CONFIG_DEFAULTS.copy()
defaults['__file__'] = cfg.logconfig
defaults['here'] = os.path.dirname(cfg.logconfig)
fileConfig(cfg.logconfig, defaults=defaults,
disable_existing_loggers=False)
else:
msg = "Error: log config '%s' not found"
raise RuntimeError(msg % cfg.logconfig)
def critical(self, msg, *args, **kwargs):
self.error_log.critical(msg, *args, **kwargs)
def error(self, msg, *args, **kwargs):
self.error_log.error(msg, *args, **kwargs)
def warning(self, msg, *args, **kwargs):
self.error_log.warning(msg, *args, **kwargs)
def info(self, msg, *args, **kwargs):
self.error_log.info(msg, *args, **kwargs)
def debug(self, msg, *args, **kwargs):
self.error_log.debug(msg, *args, **kwargs)
def exception(self, msg, *args, **kwargs):
self.error_log.exception(msg, *args, **kwargs)
def log(self, lvl, msg, *args, **kwargs):
if isinstance(lvl, string_types):
lvl = self.LOG_LEVELS.get(lvl.lower(), logging.INFO)
self.error_log.log(lvl, msg, *args, **kwargs)
def atoms(self, resp, req, environ, request_time):
""" Gets atoms for log formating.
"""
status = resp.status
if isinstance(status, str):
status = status.split(None, 1)[0]
atoms = {
'h': environ.get('REMOTE_ADDR', '-'),
'l': '-',
'u': self._get_user(environ) or '-',
't': self.now(),
'r': "%s %s %s" % (environ['REQUEST_METHOD'],
environ['RAW_URI'], environ["SERVER_PROTOCOL"]),
's': status,
'm': environ.get('REQUEST_METHOD'),
'U': environ.get('PATH_INFO'),
'q': environ.get('QUERY_STRING'),
'H': environ.get('SERVER_PROTOCOL'),
'b': getattr(resp, 'sent', None) is not None and str(resp.sent) or '-',
'B': getattr(resp, 'sent', None),
'f': environ.get('HTTP_REFERER', '-'),
'a': environ.get('HTTP_USER_AGENT', '-'),
'T': request_time.seconds,
'D': (request_time.seconds*1000000) + request_time.microseconds,
'L': "%d.%06d" % (request_time.seconds, request_time.microseconds),
'p': "<%s>" % os.getpid()
}
# add request headers
if hasattr(req, 'headers'):
req_headers = req.headers
else:
req_headers = req
if hasattr(req_headers, "items"):
req_headers = req_headers.items()
atoms.update(dict([("{%s}i" % k.lower(), v) for k, v in req_headers]))
resp_headers = resp.headers
if hasattr(resp_headers, "items"):
resp_headers = resp_headers.items()
# add response headers
atoms.update(dict([("{%s}o" % k.lower(), v) for k, v in resp_headers]))
# add environ variables
environ_variables = environ.items()
atoms.update(dict([("{%s}e" % k.lower(), v) for k, v in environ_variables]))
return atoms
def access(self, resp, req, environ, request_time):
""" See http://httpd.apache.org/docs/2.0/logs.html#combined
for format details
"""
if not (self.cfg.accesslog or self.cfg.logconfig or
self.cfg.logconfig_dict or
(self.cfg.syslog and not self.cfg.disable_redirect_access_to_syslog)):
return
# wrap atoms:
# - make sure atoms will be test case insensitively
# - if atom doesn't exist replace it by '-'
safe_atoms = self.atoms_wrapper_class(self.atoms(resp, req, environ,
request_time))
try:
self.access_log.info(self.cfg.access_log_format, safe_atoms)
except:
self.error(traceback.format_exc())
def now(self):
""" return date in Apache Common Log Format """
return time.strftime('[%d/%b/%Y:%H:%M:%S %z]')
def reopen_files(self):
if self.cfg.capture_output and self.cfg.errorlog != "-":
for stream in sys.stdout, sys.stderr:
stream.flush()
with self.lock:
if self.logfile is not None:
self.logfile.close()
self.logfile = open(self.cfg.errorlog, 'a+')
os.dup2(self.logfile.fileno(), sys.stdout.fileno())
os.dup2(self.logfile.fileno(), sys.stderr.fileno())
for log in loggers():
for handler in log.handlers:
if isinstance(handler, logging.FileHandler):
handler.acquire()
try:
if handler.stream:
try:
handler.close()
except AttributeError:
# Ignore errors from cloghandler.py
"""
cloghandler.py", line 230, in close
if not self.stream_lock.closed:
AttributeError: 'NoneType' object has no attribute 'closed'
"""
pass
handler.stream = handler._open()
finally:
handler.release()
def close_on_exec(self):
for log in loggers():
for handler in log.handlers:
if isinstance(handler, logging.FileHandler):
handler.acquire()
try:
if handler.stream:
util.close_on_exec(handler.stream.fileno())
finally:
handler.release()
def _get_gunicorn_handler(self, log):
for h in log.handlers:
if getattr(h, "_gunicorn", False):
return h
def _set_handler(self, log, output, fmt, stream=None):
# remove previous gunicorn log handler
h = self._get_gunicorn_handler(log)
if h:
log.handlers.remove(h)
if output is not None:
if output == "-":
h = logging.StreamHandler(stream)
else:
util.check_is_writeable(output)
h = logging.FileHandler(output)
# make sure the user can reopen the file
try:
os.chown(h.baseFilename, self.cfg.user, self.cfg.group)
except OSError:
# it's probably OK there, we assume the user has given
# /dev/null as a parameter.
pass
h.setFormatter(fmt)
h._gunicorn = True
log.addHandler(h)
def _set_syslog_handler(self, log, cfg, fmt, name):
# setup format
if not cfg.syslog_prefix:
prefix = cfg.proc_name.replace(":", ".")
else:
prefix = cfg.syslog_prefix
prefix = "gunicorn.%s.%s" % (prefix, name)
# set format
fmt = logging.Formatter(r"%s: %s" % (prefix, fmt))
# syslog facility
try:
facility = SYSLOG_FACILITIES[cfg.syslog_facility.lower()]
except KeyError:
raise RuntimeError("unknown facility name")
# parse syslog address
socktype, addr = parse_syslog_address(cfg.syslog_addr)
# finally setup the syslog handler
if sys.version_info >= (2, 7):
h = logging.handlers.SysLogHandler(address=addr,
facility=facility, socktype=socktype)
else:
# socktype is only supported in 2.7 and sup
# fix issue #541
h = logging.handlers.SysLogHandler(address=addr,
facility=facility)
h.setFormatter(fmt)
h._gunicorn = True
log.addHandler(h)
def _get_user(self, environ):
user = None
http_auth = environ.get("HTTP_AUTHORIZATION")
if http_auth and http_auth.startswith('Basic'):
auth = http_auth.split(" ", 1)
if len(auth) == 2:
try:
# b64decode doesn't accept unicode in Python < 3.3
# so we need to convert it to a byte string
auth = base64.b64decode(auth[1].strip().encode('utf-8'))
if PY3: # b64decode returns a byte string in Python 3
auth = auth.decode('utf-8')
auth = auth.split(":", 1)
except (TypeError, binascii.Error, UnicodeDecodeError) as exc:
self.debug("Couldn't get username: %s", exc)
return user
if len(auth) == 2:
user = auth[0]
return user
| 17,295
|
Python
|
.py
| 436
| 28.456422
| 91
| 0.543038
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,164
|
argparse_compat.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/argparse_compat.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# Author: Steven J. Bethard <steven.bethard@gmail.com>.
"""Command-line parsing library
This module is an optparse-inspired command-line parsing library that:
- handles both optional and positional arguments
- produces highly informative usage messages
- supports parsers that dispatch to sub-parsers
The following is a simple usage example that sums integers from the
command-line and writes the result to a file::
parser = argparse.ArgumentParser(
description='sum the integers at the command line')
parser.add_argument(
'integers', metavar='int', nargs='+', type=int,
help='an integer to be summed')
parser.add_argument(
'--log', default=sys.stdout, type=argparse.FileType('w'),
help='the file where the sum should be written')
args = parser.parse_args()
args.log.write('%s' % sum(args.integers))
args.log.close()
The module contains the following public classes:
- ArgumentParser -- The main entry point for command-line parsing. As the
example above shows, the add_argument() method is used to populate
the parser with actions for optional and positional arguments. Then
the parse_args() method is invoked to convert the args at the
command-line into an object with attributes.
- ArgumentError -- The exception raised by ArgumentParser objects when
there are errors with the parser's actions. Errors raised while
parsing the command-line are caught by ArgumentParser and emitted
as command-line messages.
- FileType -- A factory for defining types of files to be created. As the
example above shows, instances of FileType are typically passed as
the type= argument of add_argument() calls.
- Action -- The base class for parser actions. Typically actions are
selected by passing strings like 'store_true' or 'append_const' to
the action= argument of add_argument(). However, for greater
customization of ArgumentParser actions, subclasses of Action may
be defined and passed as the action= argument.
- HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter,
ArgumentDefaultsHelpFormatter -- Formatter classes which
may be passed as the formatter_class= argument to the
ArgumentParser constructor. HelpFormatter is the default,
RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser
not to change the formatting for help text, and
ArgumentDefaultsHelpFormatter adds information about argument defaults
to the help.
All other classes in this module are considered implementation details.
(Also note that HelpFormatter and RawDescriptionHelpFormatter are only
considered public as object names -- the API of the formatter objects is
still considered an implementation detail.)
"""
# flake8: noqa
__version__ = '1.2.1'
__all__ = [
'ArgumentParser',
'ArgumentError',
'ArgumentTypeError',
'FileType',
'HelpFormatter',
'ArgumentDefaultsHelpFormatter',
'RawDescriptionHelpFormatter',
'RawTextHelpFormatter',
'Namespace',
'Action',
'ONE_OR_MORE',
'OPTIONAL',
'PARSER',
'REMAINDER',
'SUPPRESS',
'ZERO_OR_MORE',
]
import copy as _copy
import os as _os
import re as _re
import sys as _sys
import textwrap as _textwrap
from gettext import gettext as _
try:
set
except NameError:
# for python < 2.4 compatibility (sets module is there since 2.3):
from sets import Set as set
try:
basestring
except NameError:
basestring = str
try:
sorted
except NameError:
# for python < 2.4 compatibility:
def sorted(iterable, reverse=False):
result = list(iterable)
result.sort()
if reverse:
result.reverse()
return result
def _callable(obj):
return hasattr(obj, '__call__') or hasattr(obj, '__bases__')
SUPPRESS = '==SUPPRESS=='
OPTIONAL = '?'
ZERO_OR_MORE = '*'
ONE_OR_MORE = '+'
PARSER = 'A...'
REMAINDER = '...'
_UNRECOGNIZED_ARGS_ATTR = '_unrecognized_args'
# =============================
# Utility functions and classes
# =============================
class _AttributeHolder:
"""Abstract base class that provides __repr__.
The __repr__ method returns a string in the format::
ClassName(attr=name, attr=name, ...)
The attributes are determined either by a class-level attribute,
'_kwarg_names', or by inspecting the instance __dict__.
"""
def __repr__(self):
type_name = type(self).__name__
arg_strings = []
for arg in self._get_args():
arg_strings.append(repr(arg))
for name, value in self._get_kwargs():
arg_strings.append('%s=%r' % (name, value))
return '%s(%s)' % (type_name, ', '.join(arg_strings))
def _get_kwargs(self):
return sorted(self.__dict__.items())
def _get_args(self):
return []
def _ensure_value(namespace, name, value):
if getattr(namespace, name, None) is None:
setattr(namespace, name, value)
return getattr(namespace, name)
# ===============
# Formatting Help
# ===============
class HelpFormatter:
"""Formatter for generating usage messages and argument help strings.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def __init__(self,
prog,
indent_increment=2,
max_help_position=24,
width=None):
# default setting for width
if width is None:
try:
width = int(_os.environ['COLUMNS'])
except (KeyError, ValueError):
width = 80
width -= 2
self._prog = prog
self._indent_increment = indent_increment
self._max_help_position = max_help_position
self._width = width
self._current_indent = 0
self._level = 0
self._action_max_length = 0
self._root_section = self._Section(self, None)
self._current_section = self._root_section
self._whitespace_matcher = _re.compile(r'\s+')
self._long_break_matcher = _re.compile(r'\n\n\n+')
# ===============================
# Section and indentation methods
# ===============================
def _indent(self):
self._current_indent += self._indent_increment
self._level += 1
def _dedent(self):
self._current_indent -= self._indent_increment
assert self._current_indent >= 0, 'Indent decreased below 0.'
self._level -= 1
class _Section:
def __init__(self, formatter, parent, heading=None):
self.formatter = formatter
self.parent = parent
self.heading = heading
self.items = []
def format_help(self):
# format the indented section
if self.parent is not None:
self.formatter._indent()
join = self.formatter._join_parts
for func, args in self.items:
func(*args)
item_help = join([func(*args) for func, args in self.items])
if self.parent is not None:
self.formatter._dedent()
# return nothing if the section was empty
if not item_help:
return ''
# add the heading if the section was non-empty
if self.heading is not SUPPRESS and self.heading is not None:
current_indent = self.formatter._current_indent
heading = '%*s%s:\n' % (current_indent, '', self.heading)
else:
heading = ''
# join the section-initial newline, the heading and the help
return join(['\n', heading, item_help, '\n'])
def _add_item(self, func, args):
self._current_section.items.append((func, args))
# ========================
# Message building methods
# ========================
def start_section(self, heading):
self._indent()
section = self._Section(self, self._current_section, heading)
self._add_item(section.format_help, [])
self._current_section = section
def end_section(self):
self._current_section = self._current_section.parent
self._dedent()
def add_text(self, text):
if text is not SUPPRESS and text is not None:
self._add_item(self._format_text, [text])
def add_usage(self, usage, actions, groups, prefix=None):
if usage is not SUPPRESS:
args = usage, actions, groups, prefix
self._add_item(self._format_usage, args)
def add_argument(self, action):
if action.help is not SUPPRESS:
# find all invocations
get_invocation = self._format_action_invocation
invocations = [get_invocation(action)]
for subaction in self._iter_indented_subactions(action):
invocations.append(get_invocation(subaction))
# update the maximum item length
invocation_length = max([len(s) for s in invocations])
action_length = invocation_length + self._current_indent
self._action_max_length = max(self._action_max_length,
action_length)
# add the item to the list
self._add_item(self._format_action, [action])
def add_arguments(self, actions):
for action in actions:
self.add_argument(action)
# =======================
# Help-formatting methods
# =======================
def format_help(self):
help = self._root_section.format_help()
if help:
help = self._long_break_matcher.sub('\n\n', help)
help = help.strip('\n') + '\n'
return help
def _join_parts(self, part_strings):
return ''.join([part
for part in part_strings
if part and part is not SUPPRESS])
def _format_usage(self, usage, actions, groups, prefix):
if prefix is None:
prefix = _('usage: ')
# if usage is specified, use that
if usage is not None:
usage = usage % dict(prog=self._prog)
# if no optionals or positionals are available, usage is just prog
elif usage is None and not actions:
usage = '%(prog)s' % dict(prog=self._prog)
# if optionals and positionals are available, calculate usage
elif usage is None:
prog = '%(prog)s' % dict(prog=self._prog)
# split optionals from positionals
optionals = []
positionals = []
for action in actions:
if action.option_strings:
optionals.append(action)
else:
positionals.append(action)
# build full usage string
format = self._format_actions_usage
action_usage = format(optionals + positionals, groups)
usage = ' '.join([s for s in [prog, action_usage] if s])
# wrap the usage parts if it's too long
text_width = self._width - self._current_indent
if len(prefix) + len(usage) > text_width:
# break usage into wrappable parts
part_regexp = r'\(.*?\)+|\[.*?\]+|\S+'
opt_usage = format(optionals, groups)
pos_usage = format(positionals, groups)
opt_parts = _re.findall(part_regexp, opt_usage)
pos_parts = _re.findall(part_regexp, pos_usage)
assert ' '.join(opt_parts) == opt_usage
assert ' '.join(pos_parts) == pos_usage
# helper for wrapping lines
def get_lines(parts, indent, prefix=None):
lines = []
line = []
if prefix is not None:
line_len = len(prefix) - 1
else:
line_len = len(indent) - 1
for part in parts:
if line_len + 1 + len(part) > text_width:
lines.append(indent + ' '.join(line))
line = []
line_len = len(indent) - 1
line.append(part)
line_len += len(part) + 1
if line:
lines.append(indent + ' '.join(line))
if prefix is not None:
lines[0] = lines[0][len(indent):]
return lines
# if prog is short, follow it with optionals or positionals
if len(prefix) + len(prog) <= 0.75 * text_width:
indent = ' ' * (len(prefix) + len(prog) + 1)
if opt_parts:
lines = get_lines([prog] + opt_parts, indent, prefix)
lines.extend(get_lines(pos_parts, indent))
elif pos_parts:
lines = get_lines([prog] + pos_parts, indent, prefix)
else:
lines = [prog]
# if prog is long, put it on its own line
else:
indent = ' ' * len(prefix)
parts = opt_parts + pos_parts
lines = get_lines(parts, indent)
if len(lines) > 1:
lines = []
lines.extend(get_lines(opt_parts, indent))
lines.extend(get_lines(pos_parts, indent))
lines = [prog] + lines
# join lines into usage
usage = '\n'.join(lines)
# prefix with 'usage:'
return '%s%s\n\n' % (prefix, usage)
def _format_actions_usage(self, actions, groups):
# find group indices and identify actions in groups
group_actions = set()
inserts = {}
for group in groups:
try:
start = actions.index(group._group_actions[0])
except ValueError:
continue
else:
end = start + len(group._group_actions)
if actions[start:end] == group._group_actions:
for action in group._group_actions:
group_actions.add(action)
if not group.required:
if start in inserts:
inserts[start] += ' ['
else:
inserts[start] = '['
inserts[end] = ']'
else:
if start in inserts:
inserts[start] += ' ('
else:
inserts[start] = '('
inserts[end] = ')'
for i in range(start + 1, end):
inserts[i] = '|'
# collect all actions format strings
parts = []
for i, action in enumerate(actions):
# suppressed arguments are marked with None
# remove | separators for suppressed arguments
if action.help is SUPPRESS:
parts.append(None)
if inserts.get(i) == '|':
inserts.pop(i)
elif inserts.get(i + 1) == '|':
inserts.pop(i + 1)
# produce all arg strings
elif not action.option_strings:
part = self._format_args(action, action.dest)
# if it's in a group, strip the outer []
if action in group_actions:
if part[0] == '[' and part[-1] == ']':
part = part[1:-1]
# add the action string to the list
parts.append(part)
# produce the first way to invoke the option in brackets
else:
option_string = action.option_strings[0]
# if the Optional doesn't take a value, format is:
# -s or --long
if action.nargs == 0:
part = '%s' % option_string
# if the Optional takes a value, format is:
# -s ARGS or --long ARGS
else:
default = action.dest.upper()
args_string = self._format_args(action, default)
part = '%s %s' % (option_string, args_string)
# make it look optional if it's not required or in a group
if not action.required and action not in group_actions:
part = '[%s]' % part
# add the action string to the list
parts.append(part)
# insert things at the necessary indices
for i in sorted(inserts, reverse=True):
parts[i:i] = [inserts[i]]
# join all the action items with spaces
text = ' '.join([item for item in parts if item is not None])
# clean up separators for mutually exclusive groups
open = r'[\[(]'
close = r'[\])]'
text = _re.sub(r'(%s) ' % open, r'\1', text)
text = _re.sub(r' (%s)' % close, r'\1', text)
text = _re.sub(r'%s *%s' % (open, close), r'', text)
text = _re.sub(r'\(([^|]*)\)', r'\1', text)
text = text.strip()
# return the text
return text
def _format_text(self, text):
if '%(prog)' in text:
text = text % dict(prog=self._prog)
text_width = self._width - self._current_indent
indent = ' ' * self._current_indent
return self._fill_text(text, text_width, indent) + '\n\n'
def _format_action(self, action):
# determine the required width and the entry label
help_position = min(self._action_max_length + 2,
self._max_help_position)
help_width = self._width - help_position
action_width = help_position - self._current_indent - 2
action_header = self._format_action_invocation(action)
# ho nelp; start on same line and add a final newline
if not action.help:
tup = self._current_indent, '', action_header
action_header = '%*s%s\n' % tup
# short action name; start on the same line and pad two spaces
elif len(action_header) <= action_width:
tup = self._current_indent, '', action_width, action_header
action_header = '%*s%-*s ' % tup
indent_first = 0
# long action name; start on the next line
else:
tup = self._current_indent, '', action_header
action_header = '%*s%s\n' % tup
indent_first = help_position
# collect the pieces of the action help
parts = [action_header]
# if there was help for the action, add lines of help text
if action.help:
help_text = self._expand_help(action)
help_lines = self._split_lines(help_text, help_width)
parts.append('%*s%s\n' % (indent_first, '', help_lines[0]))
for line in help_lines[1:]:
parts.append('%*s%s\n' % (help_position, '', line))
# or add a newline if the description doesn't end with one
elif not action_header.endswith('\n'):
parts.append('\n')
# if there are any sub-actions, add their help as well
for subaction in self._iter_indented_subactions(action):
parts.append(self._format_action(subaction))
# return a single string
return self._join_parts(parts)
def _format_action_invocation(self, action):
if not action.option_strings:
metavar, = self._metavar_formatter(action, action.dest)(1)
return metavar
else:
parts = []
# if the Optional doesn't take a value, format is:
# -s, --long
if action.nargs == 0:
parts.extend(action.option_strings)
# if the Optional takes a value, format is:
# -s ARGS, --long ARGS
else:
default = action.dest.upper()
args_string = self._format_args(action, default)
for option_string in action.option_strings:
parts.append('%s %s' % (option_string, args_string))
return ', '.join(parts)
def _metavar_formatter(self, action, default_metavar):
if action.metavar is not None:
result = action.metavar
elif action.choices is not None:
choice_strs = [str(choice) for choice in action.choices]
result = '{%s}' % ','.join(choice_strs)
else:
result = default_metavar
def format(tuple_size):
if isinstance(result, tuple):
return result
else:
return (result, ) * tuple_size
return format
def _format_args(self, action, default_metavar):
get_metavar = self._metavar_formatter(action, default_metavar)
if action.nargs is None:
result = '%s' % get_metavar(1)
elif action.nargs == OPTIONAL:
result = '[%s]' % get_metavar(1)
elif action.nargs == ZERO_OR_MORE:
result = '[%s [%s ...]]' % get_metavar(2)
elif action.nargs == ONE_OR_MORE:
result = '%s [%s ...]' % get_metavar(2)
elif action.nargs == REMAINDER:
result = '...'
elif action.nargs == PARSER:
result = '%s ...' % get_metavar(1)
else:
formats = ['%s' for _ in range(action.nargs)]
result = ' '.join(formats) % get_metavar(action.nargs)
return result
def _expand_help(self, action):
params = dict(vars(action), prog=self._prog)
for name in list(params):
if params[name] is SUPPRESS:
del params[name]
for name in list(params):
if hasattr(params[name], '__name__'):
params[name] = params[name].__name__
if params.get('choices') is not None:
choices_str = ', '.join([str(c) for c in params['choices']])
params['choices'] = choices_str
return self._get_help_string(action) % params
def _iter_indented_subactions(self, action):
try:
get_subactions = action._get_subactions
except AttributeError:
pass
else:
self._indent()
for subaction in get_subactions():
yield subaction
self._dedent()
def _split_lines(self, text, width):
text = self._whitespace_matcher.sub(' ', text).strip()
return _textwrap.wrap(text, width)
def _fill_text(self, text, width, indent):
text = self._whitespace_matcher.sub(' ', text).strip()
return _textwrap.fill(text, width, initial_indent=indent,
subsequent_indent=indent)
def _get_help_string(self, action):
return action.help
class RawDescriptionHelpFormatter(HelpFormatter):
"""Help message formatter which retains any formatting in descriptions.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _fill_text(self, text, width, indent):
return ''.join([indent + line for line in text.splitlines(True)])
class RawTextHelpFormatter(RawDescriptionHelpFormatter):
"""Help message formatter which retains formatting of all help text.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _split_lines(self, text, width):
return text.splitlines()
class ArgumentDefaultsHelpFormatter(HelpFormatter):
"""Help message formatter which adds default values to argument help.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _get_help_string(self, action):
help = action.help
if '%(default)' not in action.help:
if action.default is not SUPPRESS:
defaulting_nargs = [OPTIONAL, ZERO_OR_MORE]
if action.option_strings or action.nargs in defaulting_nargs:
help += ' (default: %(default)s)'
return help
# =====================
# Options and Arguments
# =====================
def _get_action_name(argument):
if argument is None:
return None
elif argument.option_strings:
return '/'.join(argument.option_strings)
elif argument.metavar not in (None, SUPPRESS):
return argument.metavar
elif argument.dest not in (None, SUPPRESS):
return argument.dest
else:
return None
class ArgumentError(Exception):
"""An error from creating or using an argument (optional or positional).
The string value of this exception is the message, augmented with
information about the argument that caused it.
"""
def __init__(self, argument, message):
self.argument_name = _get_action_name(argument)
self.message = message
def __str__(self):
if self.argument_name is None:
format = '%(message)s'
else:
format = 'argument %(argument_name)s: %(message)s'
return format % dict(message=self.message,
argument_name=self.argument_name)
class ArgumentTypeError(Exception):
"""An error from trying to convert a command line string to a type."""
pass
# ==============
# Action classes
# ==============
class Action(_AttributeHolder):
"""Information about how to convert command line strings to Python objects.
Action objects are used by an ArgumentParser to represent the information
needed to parse a single argument from one or more strings from the
command line. The keyword arguments to the Action constructor are also
all attributes of Action instances.
Keyword Arguments:
- option_strings -- A list of command-line option strings which
should be associated with this action.
- dest -- The name of the attribute to hold the created object(s)
- nargs -- The number of command-line arguments that should be
consumed. By default, one argument will be consumed and a single
value will be produced. Other values include:
- N (an integer) consumes N arguments (and produces a list)
- '?' consumes zero or one arguments
- '*' consumes zero or more arguments (and produces a list)
- '+' consumes one or more arguments (and produces a list)
Note that the difference between the default and nargs=1 is that
with the default, a single value will be produced, while with
nargs=1, a list containing a single value will be produced.
- const -- The value to be produced if the option is specified and the
option uses an action that takes no values.
- default -- The value to be produced if the option is not specified.
- type -- The type which the command-line arguments should be converted
to, should be one of 'string', 'int', 'float', 'complex' or a
callable object that accepts a single string argument. If None,
'string' is assumed.
- choices -- A container of values that should be allowed. If not None,
after a command-line argument has been converted to the appropriate
type, an exception will be raised if it is not a member of this
collection.
- required -- True if the action must always be specified at the
command line. This is only meaningful for optional command-line
arguments.
- help -- The help string describing the argument.
- metavar -- The name to be used for the option's argument with the
help string. If None, the 'dest' value will be used as the name.
"""
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
self.option_strings = option_strings
self.dest = dest
self.nargs = nargs
self.const = const
self.default = default
self.type = type
self.choices = choices
self.required = required
self.help = help
self.metavar = metavar
def _get_kwargs(self):
names = [
'option_strings',
'dest',
'nargs',
'const',
'default',
'type',
'choices',
'help',
'metavar',
]
return [(name, getattr(self, name)) for name in names]
def __call__(self, parser, namespace, values, option_string=None):
raise NotImplementedError(_('.__call__() not defined'))
class _StoreAction(Action):
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
if nargs == 0:
raise ValueError('nargs for store actions must be > 0; if you '
'have nothing to store, actions such as store '
'true or store const may be more appropriate')
if const is not None and nargs != OPTIONAL:
raise ValueError('nargs must be %r to supply const' % OPTIONAL)
super(_StoreAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values)
class _StoreConstAction(Action):
def __init__(self,
option_strings,
dest,
const,
default=None,
required=False,
help=None,
metavar=None):
super(_StoreConstAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
const=const,
default=default,
required=required,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, self.const)
class _StoreTrueAction(_StoreConstAction):
def __init__(self,
option_strings,
dest,
default=False,
required=False,
help=None):
super(_StoreTrueAction, self).__init__(
option_strings=option_strings,
dest=dest,
const=True,
default=default,
required=required,
help=help)
class _StoreFalseAction(_StoreConstAction):
def __init__(self,
option_strings,
dest,
default=True,
required=False,
help=None):
super(_StoreFalseAction, self).__init__(
option_strings=option_strings,
dest=dest,
const=False,
default=default,
required=required,
help=help)
class _AppendAction(Action):
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
if nargs == 0:
raise ValueError('nargs for append actions must be > 0; if arg '
'strings are not supplying the value to append, '
'the append const action may be more appropriate')
if const is not None and nargs != OPTIONAL:
raise ValueError('nargs must be %r to supply const' % OPTIONAL)
super(_AppendAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
items = _copy.copy(_ensure_value(namespace, self.dest, []))
items.append(values)
setattr(namespace, self.dest, items)
class _AppendConstAction(Action):
def __init__(self,
option_strings,
dest,
const,
default=None,
required=False,
help=None,
metavar=None):
super(_AppendConstAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
const=const,
default=default,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
items = _copy.copy(_ensure_value(namespace, self.dest, []))
items.append(self.const)
setattr(namespace, self.dest, items)
class _CountAction(Action):
def __init__(self,
option_strings,
dest,
default=None,
required=False,
help=None):
super(_CountAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
default=default,
required=required,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
new_count = _ensure_value(namespace, self.dest, 0) + 1
setattr(namespace, self.dest, new_count)
class _HelpAction(Action):
def __init__(self,
option_strings,
dest=SUPPRESS,
default=SUPPRESS,
help=None):
super(_HelpAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
parser.print_help()
parser.exit()
class _VersionAction(Action):
def __init__(self,
option_strings,
version=None,
dest=SUPPRESS,
default=SUPPRESS,
help="show program's version number and exit"):
super(_VersionAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
self.version = version
def __call__(self, parser, namespace, values, option_string=None):
version = self.version
if version is None:
version = parser.version
formatter = parser._get_formatter()
formatter.add_text(version)
parser.exit(message=formatter.format_help())
class _SubParsersAction(Action):
class _ChoicesPseudoAction(Action):
def __init__(self, name, help):
sup = super(_SubParsersAction._ChoicesPseudoAction, self)
sup.__init__(option_strings=[], dest=name, help=help)
def __init__(self,
option_strings,
prog,
parser_class,
dest=SUPPRESS,
help=None,
metavar=None):
self._prog_prefix = prog
self._parser_class = parser_class
self._name_parser_map = {}
self._choices_actions = []
super(_SubParsersAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=PARSER,
choices=self._name_parser_map,
help=help,
metavar=metavar)
def add_parser(self, name, **kwargs):
# set prog from the existing prefix
if kwargs.get('prog') is None:
kwargs['prog'] = '%s %s' % (self._prog_prefix, name)
# create a pseudo-action to hold the choice help
if 'help' in kwargs:
help = kwargs.pop('help')
choice_action = self._ChoicesPseudoAction(name, help)
self._choices_actions.append(choice_action)
# create the parser and add it to the map
parser = self._parser_class(**kwargs)
self._name_parser_map[name] = parser
return parser
def _get_subactions(self):
return self._choices_actions
def __call__(self, parser, namespace, values, option_string=None):
parser_name = values[0]
arg_strings = values[1:]
# set the parser name if requested
if self.dest is not SUPPRESS:
setattr(namespace, self.dest, parser_name)
# select the parser
try:
parser = self._name_parser_map[parser_name]
except KeyError:
tup = parser_name, ', '.join(self._name_parser_map)
msg = _('unknown parser %r (choices: %s)' % tup)
raise ArgumentError(self, msg)
# parse all the remaining options into the namespace
# store any unrecognized options on the object, so that the top
# level parser can decide what to do with them
namespace, arg_strings = parser.parse_known_args(arg_strings, namespace)
if arg_strings:
vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, [])
getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings)
# ==============
# Type classes
# ==============
class FileType:
"""Factory for creating file object types
Instances of FileType are typically passed as type= arguments to the
ArgumentParser add_argument() method.
Keyword Arguments:
- mode -- A string indicating how the file is to be opened. Accepts the
same values as the builtin open() function.
- bufsize -- The file's desired buffer size. Accepts the same values as
the builtin open() function.
"""
def __init__(self, mode='r', bufsize=None):
self._mode = mode
self._bufsize = bufsize
def __call__(self, string):
# the special argument "-" means sys.std{in,out}
if string == '-':
if 'r' in self._mode:
return _sys.stdin
elif 'w' in self._mode:
return _sys.stdout
else:
msg = _('argument "-" with mode %r' % self._mode)
raise ValueError(msg)
# all other arguments are used as file names
if self._bufsize:
return open(string, self._mode, self._bufsize)
else:
return open(string, self._mode)
def __repr__(self):
args = [self._mode, self._bufsize]
args_str = ', '.join([repr(arg) for arg in args if arg is not None])
return '%s(%s)' % (type(self).__name__, args_str)
# ===========================
# Optional and Positional Parsing
# ===========================
class Namespace(_AttributeHolder):
"""Simple object for storing attributes.
Implements equality by attribute names and values, and provides a simple
string representation.
"""
def __init__(self, **kwargs):
for name in kwargs:
setattr(self, name, kwargs[name])
__hash__ = None
def __eq__(self, other):
return vars(self) == vars(other)
def __ne__(self, other):
return not (self == other)
def __contains__(self, key):
return key in self.__dict__
class _ActionsContainer:
def __init__(self,
description,
prefix_chars,
argument_default,
conflict_handler):
super(_ActionsContainer, self).__init__()
self.description = description
self.argument_default = argument_default
self.prefix_chars = prefix_chars
self.conflict_handler = conflict_handler
# set up registries
self._registries = {}
# register actions
self.register('action', None, _StoreAction)
self.register('action', 'store', _StoreAction)
self.register('action', 'store_const', _StoreConstAction)
self.register('action', 'store_true', _StoreTrueAction)
self.register('action', 'store_false', _StoreFalseAction)
self.register('action', 'append', _AppendAction)
self.register('action', 'append_const', _AppendConstAction)
self.register('action', 'count', _CountAction)
self.register('action', 'help', _HelpAction)
self.register('action', 'version', _VersionAction)
self.register('action', 'parsers', _SubParsersAction)
# raise an exception if the conflict handler is invalid
self._get_handler()
# action storage
self._actions = []
self._option_string_actions = {}
# groups
self._action_groups = []
self._mutually_exclusive_groups = []
# defaults storage
self._defaults = {}
# determines whether an "option" looks like a negative number
self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$')
# whether or not there are any optionals that look like negative
# numbers -- uses a list so it can be shared and edited
self._has_negative_number_optionals = []
# ====================
# Registration methods
# ====================
def register(self, registry_name, value, object):
registry = self._registries.setdefault(registry_name, {})
registry[value] = object
def _registry_get(self, registry_name, value, default=None):
return self._registries[registry_name].get(value, default)
# ==================================
# Namespace default accessor methods
# ==================================
def set_defaults(self, **kwargs):
self._defaults.update(kwargs)
# if these defaults match any existing arguments, replace
# the previous default on the object with the new one
for action in self._actions:
if action.dest in kwargs:
action.default = kwargs[action.dest]
def get_default(self, dest):
for action in self._actions:
if action.dest == dest and action.default is not None:
return action.default
return self._defaults.get(dest, None)
# =======================
# Adding argument actions
# =======================
def add_argument(self, *args, **kwargs):
"""
add_argument(dest, ..., name=value, ...)
add_argument(option_string, option_string, ..., name=value, ...)
"""
# if no positional args are supplied or only one is supplied and
# it doesn't look like an option string, parse a positional
# argument
chars = self.prefix_chars
if not args or len(args) == 1 and args[0][0] not in chars:
if args and 'dest' in kwargs:
raise ValueError('dest supplied twice for positional argument')
kwargs = self._get_positional_kwargs(*args, **kwargs)
# otherwise, we're adding an optional argument
else:
kwargs = self._get_optional_kwargs(*args, **kwargs)
# if no default was supplied, use the parser-level default
if 'default' not in kwargs:
dest = kwargs['dest']
if dest in self._defaults:
kwargs['default'] = self._defaults[dest]
elif self.argument_default is not None:
kwargs['default'] = self.argument_default
# create the action object, and add it to the parser
action_class = self._pop_action_class(kwargs)
if not _callable(action_class):
raise ValueError('unknown action "%s"' % action_class)
action = action_class(**kwargs)
# raise an error if the action type is not callable
type_func = self._registry_get('type', action.type, action.type)
if not _callable(type_func):
raise ValueError('%r is not callable' % type_func)
return self._add_action(action)
def add_argument_group(self, *args, **kwargs):
group = _ArgumentGroup(self, *args, **kwargs)
self._action_groups.append(group)
return group
def add_mutually_exclusive_group(self, **kwargs):
group = _MutuallyExclusiveGroup(self, **kwargs)
self._mutually_exclusive_groups.append(group)
return group
def _add_action(self, action):
# resolve any conflicts
self._check_conflict(action)
# add to actions list
self._actions.append(action)
action.container = self
# index the action by any option strings it has
for option_string in action.option_strings:
self._option_string_actions[option_string] = action
# set the flag if any option strings look like negative numbers
for option_string in action.option_strings:
if self._negative_number_matcher.match(option_string):
if not self._has_negative_number_optionals:
self._has_negative_number_optionals.append(True)
# return the created action
return action
def _remove_action(self, action):
self._actions.remove(action)
def _add_container_actions(self, container):
# collect groups by titles
title_group_map = {}
for group in self._action_groups:
if group.title in title_group_map:
msg = _('cannot merge actions - two groups are named %r')
raise ValueError(msg % (group.title))
title_group_map[group.title] = group
# map each action to its group
group_map = {}
for group in container._action_groups:
# if a group with the title exists, use that, otherwise
# create a new group matching the container's group
if group.title not in title_group_map:
title_group_map[group.title] = self.add_argument_group(
title=group.title,
description=group.description,
conflict_handler=group.conflict_handler)
# map the actions to their new group
for action in group._group_actions:
group_map[action] = title_group_map[group.title]
# add container's mutually exclusive groups
# NOTE: if add_mutually_exclusive_group ever gains title= and
# description= then this code will need to be expanded as above
for group in container._mutually_exclusive_groups:
mutex_group = self.add_mutually_exclusive_group(
required=group.required)
# map the actions to their new mutex group
for action in group._group_actions:
group_map[action] = mutex_group
# add all actions to this container or their group
for action in container._actions:
group_map.get(action, self)._add_action(action)
def _get_positional_kwargs(self, dest, **kwargs):
# make sure required is not specified
if 'required' in kwargs:
msg = _("'required' is an invalid argument for positionals")
raise TypeError(msg)
# mark positional arguments as required if at least one is
# always required
if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]:
kwargs['required'] = True
if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs:
kwargs['required'] = True
# return the keyword arguments with no option strings
return dict(kwargs, dest=dest, option_strings=[])
def _get_optional_kwargs(self, *args, **kwargs):
# determine short and long option strings
option_strings = []
long_option_strings = []
for option_string in args:
# error on strings that don't start with an appropriate prefix
if not option_string[0] in self.prefix_chars:
msg = _('invalid option string %r: '
'must start with a character %r')
tup = option_string, self.prefix_chars
raise ValueError(msg % tup)
# strings starting with two prefix characters are long options
option_strings.append(option_string)
if option_string[0] in self.prefix_chars:
if len(option_string) > 1:
if option_string[1] in self.prefix_chars:
long_option_strings.append(option_string)
# infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x'
dest = kwargs.pop('dest', None)
if dest is None:
if long_option_strings:
dest_option_string = long_option_strings[0]
else:
dest_option_string = option_strings[0]
dest = dest_option_string.lstrip(self.prefix_chars)
if not dest:
msg = _('dest= is required for options like %r')
raise ValueError(msg % option_string)
dest = dest.replace('-', '_')
# return the updated keyword arguments
return dict(kwargs, dest=dest, option_strings=option_strings)
def _pop_action_class(self, kwargs, default=None):
action = kwargs.pop('action', default)
return self._registry_get('action', action, action)
def _get_handler(self):
# determine function from conflict handler string
handler_func_name = '_handle_conflict_%s' % self.conflict_handler
try:
return getattr(self, handler_func_name)
except AttributeError:
msg = _('invalid conflict_resolution value: %r')
raise ValueError(msg % self.conflict_handler)
def _check_conflict(self, action):
# find all options that conflict with this option
confl_optionals = []
for option_string in action.option_strings:
if option_string in self._option_string_actions:
confl_optional = self._option_string_actions[option_string]
confl_optionals.append((option_string, confl_optional))
# resolve any conflicts
if confl_optionals:
conflict_handler = self._get_handler()
conflict_handler(action, confl_optionals)
def _handle_conflict_error(self, action, conflicting_actions):
message = _('conflicting option string(s): %s')
conflict_string = ', '.join([option_string
for option_string, _unused
in conflicting_actions])
raise ArgumentError(action, message % conflict_string)
def _handle_conflict_resolve(self, action, conflicting_actions):
# remove all conflicting options
for option_string, action in conflicting_actions:
# remove the conflicting option
action.option_strings.remove(option_string)
self._option_string_actions.pop(option_string, None)
# if the option now has no option string, remove it from the
# container holding it
if not action.option_strings:
action.container._remove_action(action)
class _ArgumentGroup(_ActionsContainer):
def __init__(self, container, title=None, description=None, **kwargs):
# add any missing keyword arguments by checking the container
update = kwargs.setdefault
update('conflict_handler', container.conflict_handler)
update('prefix_chars', container.prefix_chars)
update('argument_default', container.argument_default)
super_init = super(_ArgumentGroup, self).__init__
super_init(description=description, **kwargs)
# group attributes
self.title = title
self._group_actions = []
# share most attributes with the container
self._registries = container._registries
self._actions = container._actions
self._option_string_actions = container._option_string_actions
self._defaults = container._defaults
self._has_negative_number_optionals = \
container._has_negative_number_optionals
def _add_action(self, action):
action = super(_ArgumentGroup, self)._add_action(action)
self._group_actions.append(action)
return action
def _remove_action(self, action):
super(_ArgumentGroup, self)._remove_action(action)
self._group_actions.remove(action)
class _MutuallyExclusiveGroup(_ArgumentGroup):
def __init__(self, container, required=False):
super(_MutuallyExclusiveGroup, self).__init__(container)
self.required = required
self._container = container
def _add_action(self, action):
if action.required:
msg = _('mutually exclusive arguments must be optional')
raise ValueError(msg)
action = self._container._add_action(action)
self._group_actions.append(action)
return action
def _remove_action(self, action):
self._container._remove_action(action)
self._group_actions.remove(action)
class ArgumentParser(_AttributeHolder, _ActionsContainer):
"""Object for parsing command line strings into Python objects.
Keyword Arguments:
- prog -- The name of the program (default: sys.argv[0])
- usage -- A usage message (default: auto-generated from arguments)
- description -- A description of what the program does
- epilog -- Text following the argument descriptions
- parents -- Parsers whose arguments should be copied into this one
- formatter_class -- HelpFormatter class for printing help messages
- prefix_chars -- Characters that prefix optional arguments
- fromfile_prefix_chars -- Characters that prefix files containing
additional arguments
- argument_default -- The default value for all arguments
- conflict_handler -- String indicating how to handle conflicts
- add_help -- Add a -h/-help option
"""
def __init__(self,
prog=None,
usage=None,
description=None,
epilog=None,
version=None,
parents=[],
formatter_class=HelpFormatter,
prefix_chars='-',
fromfile_prefix_chars=None,
argument_default=None,
conflict_handler='error',
add_help=True):
if version is not None:
import warnings
warnings.warn(
"""The "version" argument to ArgumentParser is deprecated. """
"""Please use """
""""add_argument(..., action='version', version="N", ...)" """
"""instead""", DeprecationWarning)
superinit = super(ArgumentParser, self).__init__
superinit(description=description,
prefix_chars=prefix_chars,
argument_default=argument_default,
conflict_handler=conflict_handler)
# default setting for prog
if prog is None:
prog = _os.path.basename(_sys.argv[0])
self.prog = prog
self.usage = usage
self.epilog = epilog
self.version = version
self.formatter_class = formatter_class
self.fromfile_prefix_chars = fromfile_prefix_chars
self.add_help = add_help
add_group = self.add_argument_group
self._positionals = add_group(_('positional arguments'))
self._optionals = add_group(_('optional arguments'))
self._subparsers = None
# register types
def identity(string):
return string
self.register('type', None, identity)
# add help and version arguments if necessary
# (using explicit default to override global argument_default)
if '-' in prefix_chars:
default_prefix = '-'
else:
default_prefix = prefix_chars[0]
if self.add_help:
self.add_argument(
default_prefix+'h', default_prefix*2+'help',
action='help', default=SUPPRESS,
help=_('show this help message and exit'))
if self.version:
self.add_argument(
default_prefix+'v', default_prefix*2+'version',
action='version', default=SUPPRESS,
version=self.version,
help=_("show program's version number and exit"))
# add parent arguments and defaults
for parent in parents:
self._add_container_actions(parent)
try:
defaults = parent._defaults
except AttributeError:
pass
else:
self._defaults.update(defaults)
# =======================
# Pretty __repr__ methods
# =======================
def _get_kwargs(self):
names = [
'prog',
'usage',
'description',
'version',
'formatter_class',
'conflict_handler',
'add_help',
]
return [(name, getattr(self, name)) for name in names]
# ==================================
# Optional/Positional adding methods
# ==================================
def add_subparsers(self, **kwargs):
if self._subparsers is not None:
self.error(_('cannot have multiple subparser arguments'))
# add the parser class to the arguments if it's not present
kwargs.setdefault('parser_class', type(self))
if 'title' in kwargs or 'description' in kwargs:
title = _(kwargs.pop('title', 'subcommands'))
description = _(kwargs.pop('description', None))
self._subparsers = self.add_argument_group(title, description)
else:
self._subparsers = self._positionals
# prog defaults to the usage message of this parser, skipping
# optional arguments and with no "usage:" prefix
if kwargs.get('prog') is None:
formatter = self._get_formatter()
positionals = self._get_positional_actions()
groups = self._mutually_exclusive_groups
formatter.add_usage(self.usage, positionals, groups, '')
kwargs['prog'] = formatter.format_help().strip()
# create the parsers action and add it to the positionals list
parsers_class = self._pop_action_class(kwargs, 'parsers')
action = parsers_class(option_strings=[], **kwargs)
self._subparsers._add_action(action)
# return the created parsers action
return action
def _add_action(self, action):
if action.option_strings:
self._optionals._add_action(action)
else:
self._positionals._add_action(action)
return action
def _get_optional_actions(self):
return [action
for action in self._actions
if action.option_strings]
def _get_positional_actions(self):
return [action
for action in self._actions
if not action.option_strings]
# =====================================
# Command line argument parsing methods
# =====================================
def parse_args(self, args=None, namespace=None):
args, argv = self.parse_known_args(args, namespace)
if argv:
msg = _('unrecognized arguments: %s')
self.error(msg % ' '.join(argv))
return args
def parse_known_args(self, args=None, namespace=None):
# args default to the system args
if args is None:
args = _sys.argv[1:]
# default Namespace built from parser defaults
if namespace is None:
namespace = Namespace()
# add any action defaults that aren't present
for action in self._actions:
if action.dest is not SUPPRESS:
if not hasattr(namespace, action.dest):
if action.default is not SUPPRESS:
default = action.default
if isinstance(action.default, basestring):
default = self._get_value(action, default)
setattr(namespace, action.dest, default)
# add any parser defaults that aren't present
for dest in self._defaults:
if not hasattr(namespace, dest):
setattr(namespace, dest, self._defaults[dest])
# parse the arguments and exit if there are any errors
try:
namespace, args = self._parse_known_args(args, namespace)
if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR):
args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR))
delattr(namespace, _UNRECOGNIZED_ARGS_ATTR)
return namespace, args
except ArgumentError:
err = _sys.exc_info()[1]
self.error(str(err))
def _parse_known_args(self, arg_strings, namespace):
# replace arg strings that are file references
if self.fromfile_prefix_chars is not None:
arg_strings = self._read_args_from_files(arg_strings)
# map all mutually exclusive arguments to the other arguments
# they can't occur with
action_conflicts = {}
for mutex_group in self._mutually_exclusive_groups:
group_actions = mutex_group._group_actions
for i, mutex_action in enumerate(mutex_group._group_actions):
conflicts = action_conflicts.setdefault(mutex_action, [])
conflicts.extend(group_actions[:i])
conflicts.extend(group_actions[i + 1:])
# find all option indices, and determine the arg_string_pattern
# which has an 'O' if there is an option at an index,
# an 'A' if there is an argument, or a '-' if there is a '--'
option_string_indices = {}
arg_string_pattern_parts = []
arg_strings_iter = iter(arg_strings)
for i, arg_string in enumerate(arg_strings_iter):
# all args after -- are non-options
if arg_string == '--':
arg_string_pattern_parts.append('-')
for arg_string in arg_strings_iter:
arg_string_pattern_parts.append('A')
# otherwise, add the arg to the arg strings
# and note the index if it was an option
else:
option_tuple = self._parse_optional(arg_string)
if option_tuple is None:
pattern = 'A'
else:
option_string_indices[i] = option_tuple
pattern = 'O'
arg_string_pattern_parts.append(pattern)
# join the pieces together to form the pattern
arg_strings_pattern = ''.join(arg_string_pattern_parts)
# converts arg strings to the appropriate and then takes the action
seen_actions = set()
seen_non_default_actions = set()
def take_action(action, argument_strings, option_string=None):
seen_actions.add(action)
argument_values = self._get_values(action, argument_strings)
# error if this argument is not allowed with other previously
# seen arguments, assuming that actions that use the default
# value don't really count as "present"
if argument_values is not action.default:
seen_non_default_actions.add(action)
for conflict_action in action_conflicts.get(action, []):
if conflict_action in seen_non_default_actions:
msg = _('not allowed with argument %s')
action_name = _get_action_name(conflict_action)
raise ArgumentError(action, msg % action_name)
# take the action if we didn't receive a SUPPRESS value
# (e.g. from a default)
if argument_values is not SUPPRESS:
action(self, namespace, argument_values, option_string)
# function to convert arg_strings into an optional action
def consume_optional(start_index):
# get the optional identified at this index
option_tuple = option_string_indices[start_index]
action, option_string, explicit_arg = option_tuple
# identify additional optionals in the same arg string
# (e.g. -xyz is the same as -x -y -z if no args are required)
match_argument = self._match_argument
action_tuples = []
while True:
# if we found no optional action, skip it
if action is None:
extras.append(arg_strings[start_index])
return start_index + 1
# if there is an explicit argument, try to match the
# optional's string arguments to only this
if explicit_arg is not None:
arg_count = match_argument(action, 'A')
# if the action is a single-dash option and takes no
# arguments, try to parse more single-dash options out
# of the tail of the option string
chars = self.prefix_chars
if arg_count == 0 and option_string[1] not in chars:
action_tuples.append((action, [], option_string))
char = option_string[0]
option_string = char + explicit_arg[0]
new_explicit_arg = explicit_arg[1:] or None
optionals_map = self._option_string_actions
if option_string in optionals_map:
action = optionals_map[option_string]
explicit_arg = new_explicit_arg
else:
msg = _('ignored explicit argument %r')
raise ArgumentError(action, msg % explicit_arg)
# if the action expect exactly one argument, we've
# successfully matched the option; exit the loop
elif arg_count == 1:
stop = start_index + 1
args = [explicit_arg]
action_tuples.append((action, args, option_string))
break
# error if a double-dash option did not use the
# explicit argument
else:
msg = _('ignored explicit argument %r')
raise ArgumentError(action, msg % explicit_arg)
# if there is no explicit argument, try to match the
# optional's string arguments with the following strings
# if successful, exit the loop
else:
start = start_index + 1
selected_patterns = arg_strings_pattern[start:]
arg_count = match_argument(action, selected_patterns)
stop = start + arg_count
args = arg_strings[start:stop]
action_tuples.append((action, args, option_string))
break
# add the Optional to the list and return the index at which
# the Optional's string args stopped
assert action_tuples
for action, args, option_string in action_tuples:
take_action(action, args, option_string)
return stop
# the list of Positionals left to be parsed; this is modified
# by consume_positionals()
positionals = self._get_positional_actions()
# function to convert arg_strings into positional actions
def consume_positionals(start_index):
# match as many Positionals as possible
match_partial = self._match_arguments_partial
selected_pattern = arg_strings_pattern[start_index:]
arg_counts = match_partial(positionals, selected_pattern)
# slice off the appropriate arg strings for each Positional
# and add the Positional and its args to the list
for action, arg_count in zip(positionals, arg_counts):
args = arg_strings[start_index: start_index + arg_count]
start_index += arg_count
take_action(action, args)
# slice off the Positionals that we just parsed and return the
# index at which the Positionals' string args stopped
positionals[:] = positionals[len(arg_counts):]
return start_index
# consume Positionals and Optionals alternately, until we have
# passed the last option string
extras = []
start_index = 0
if option_string_indices:
max_option_string_index = max(option_string_indices)
else:
max_option_string_index = -1
while start_index <= max_option_string_index:
# consume any Positionals preceding the next option
next_option_string_index = min([
index
for index in option_string_indices
if index >= start_index])
if start_index != next_option_string_index:
positionals_end_index = consume_positionals(start_index)
# only try to parse the next optional if we didn't consume
# the option string during the positionals parsing
if positionals_end_index > start_index:
start_index = positionals_end_index
continue
else:
start_index = positionals_end_index
# if we consumed all the positionals we could and we're not
# at the index of an option string, there were extra arguments
if start_index not in option_string_indices:
strings = arg_strings[start_index:next_option_string_index]
extras.extend(strings)
start_index = next_option_string_index
# consume the next optional and any arguments for it
start_index = consume_optional(start_index)
# consume any positionals following the last Optional
stop_index = consume_positionals(start_index)
# if we didn't consume all the argument strings, there were extras
extras.extend(arg_strings[stop_index:])
# if we didn't use all the Positional objects, there were too few
# arg strings supplied.
if positionals:
self.error(_('too few arguments'))
# make sure all required actions were present
for action in self._actions:
if action.required:
if action not in seen_actions:
name = _get_action_name(action)
self.error(_('argument %s is required') % name)
# make sure all required groups had one option present
for group in self._mutually_exclusive_groups:
if group.required:
for action in group._group_actions:
if action in seen_non_default_actions:
break
# if no actions were used, report the error
else:
names = [_get_action_name(action)
for action in group._group_actions
if action.help is not SUPPRESS]
msg = _('one of the arguments %s is required')
self.error(msg % ' '.join(names))
# return the updated namespace and the extra arguments
return namespace, extras
def _read_args_from_files(self, arg_strings):
# expand arguments referencing files
new_arg_strings = []
for arg_string in arg_strings:
# for regular arguments, just add them back into the list
if arg_string[0] not in self.fromfile_prefix_chars:
new_arg_strings.append(arg_string)
# replace arguments referencing files with the file content
else:
try:
args_file = open(arg_string[1:])
try:
arg_strings = []
for arg_line in args_file.read().splitlines():
for arg in self.convert_arg_line_to_args(arg_line):
arg_strings.append(arg)
arg_strings = self._read_args_from_files(arg_strings)
new_arg_strings.extend(arg_strings)
finally:
args_file.close()
except IOError:
err = _sys.exc_info()[1]
self.error(str(err))
# return the modified argument list
return new_arg_strings
def convert_arg_line_to_args(self, arg_line):
return [arg_line]
def _match_argument(self, action, arg_strings_pattern):
# match the pattern for this action to the arg strings
nargs_pattern = self._get_nargs_pattern(action)
match = _re.match(nargs_pattern, arg_strings_pattern)
# raise an exception if we weren't able to find a match
if match is None:
nargs_errors = {
None: _('expected one argument'),
OPTIONAL: _('expected at most one argument'),
ONE_OR_MORE: _('expected at least one argument'),
}
default = _('expected %s argument(s)') % action.nargs
msg = nargs_errors.get(action.nargs, default)
raise ArgumentError(action, msg)
# return the number of arguments matched
return len(match.group(1))
def _match_arguments_partial(self, actions, arg_strings_pattern):
# progressively shorten the actions list by slicing off the
# final actions until we find a match
result = []
for i in range(len(actions), 0, -1):
actions_slice = actions[:i]
pattern = ''.join([self._get_nargs_pattern(action)
for action in actions_slice])
match = _re.match(pattern, arg_strings_pattern)
if match is not None:
result.extend([len(string) for string in match.groups()])
break
# return the list of arg string counts
return result
def _parse_optional(self, arg_string):
# if it's an empty string, it was meant to be a positional
if not arg_string:
return None
# if it doesn't start with a prefix, it was meant to be positional
if not arg_string[0] in self.prefix_chars:
return None
# if the option string is present in the parser, return the action
if arg_string in self._option_string_actions:
action = self._option_string_actions[arg_string]
return action, arg_string, None
# if it's just a single character, it was meant to be positional
if len(arg_string) == 1:
return None
# if the option string before the "=" is present, return the action
if '=' in arg_string:
option_string, explicit_arg = arg_string.split('=', 1)
if option_string in self._option_string_actions:
action = self._option_string_actions[option_string]
return action, option_string, explicit_arg
# search through all possible prefixes of the option string
# and all actions in the parser for possible interpretations
option_tuples = self._get_option_tuples(arg_string)
# if multiple actions match, the option string was ambiguous
if len(option_tuples) > 1:
options = ', '.join([option_string_elem
for action_elem, option_string_elem, explicit_arg_elem in option_tuples])
tup = arg_string, options
self.error(_('ambiguous option: %s could match %s') % tup)
# if exactly one action matched, this segmentation is good,
# so return the parsed action
elif len(option_tuples) == 1:
option_tuple, = option_tuples
return option_tuple
# if it was not found as an option, but it looks like a negative
# number, it was meant to be positional
# unless there are negative-number-like options
if self._negative_number_matcher.match(arg_string):
if not self._has_negative_number_optionals:
return None
# if it contains a space, it was meant to be a positional
if ' ' in arg_string:
return None
# it was meant to be an optional but there is no such option
# in this parser (though it might be a valid option in a subparser)
return None, arg_string, None
def _get_option_tuples(self, option_string):
result = []
# option strings starting with two prefix characters are only
# split at the '='
chars = self.prefix_chars
if option_string[0] in chars and option_string[1] in chars:
if '=' in option_string:
option_prefix, explicit_arg = option_string.split('=', 1)
else:
option_prefix = option_string
explicit_arg = None
for option_string in self._option_string_actions:
if option_string.startswith(option_prefix):
action = self._option_string_actions[option_string]
tup = action, option_string, explicit_arg
result.append(tup)
# single character options can be concatenated with their arguments
# but multiple character options always have to have their argument
# separate
elif option_string[0] in chars and option_string[1] not in chars:
option_prefix = option_string
explicit_arg = None
short_option_prefix = option_string[:2]
short_explicit_arg = option_string[2:]
for option_string in self._option_string_actions:
if option_string == short_option_prefix:
action = self._option_string_actions[option_string]
tup = action, option_string, short_explicit_arg
result.append(tup)
elif option_string.startswith(option_prefix):
action = self._option_string_actions[option_string]
tup = action, option_string, explicit_arg
result.append(tup)
# shouldn't ever get here
else:
self.error(_('unexpected option string: %s') % option_string)
# return the collected option tuples
return result
def _get_nargs_pattern(self, action):
# in all examples below, we have to allow for '--' args
# which are represented as '-' in the pattern
nargs = action.nargs
# the default (None) is assumed to be a single argument
if nargs is None:
nargs_pattern = '(-*A-*)'
# allow zero or one arguments
elif nargs == OPTIONAL:
nargs_pattern = '(-*A?-*)'
# allow zero or more arguments
elif nargs == ZERO_OR_MORE:
nargs_pattern = '(-*[A-]*)'
# allow one or more arguments
elif nargs == ONE_OR_MORE:
nargs_pattern = '(-*A[A-]*)'
# allow any number of options or arguments
elif nargs == REMAINDER:
nargs_pattern = '([-AO]*)'
# allow one argument followed by any number of options or arguments
elif nargs == PARSER:
nargs_pattern = '(-*A[-AO]*)'
# all others should be integers
else:
nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs)
# if this is an optional action, -- is not allowed
if action.option_strings:
nargs_pattern = nargs_pattern.replace('-*', '')
nargs_pattern = nargs_pattern.replace('-', '')
# return the pattern
return nargs_pattern
# ========================
# Value conversion methods
# ========================
def _get_values(self, action, arg_strings):
# for everything but PARSER args, strip out '--'
if action.nargs not in [PARSER, REMAINDER]:
arg_strings = [s for s in arg_strings if s != '--']
# optional argument produces a default when not present
if not arg_strings and action.nargs == OPTIONAL:
if action.option_strings:
value = action.const
else:
value = action.default
if isinstance(value, basestring):
value = self._get_value(action, value)
self._check_value(action, value)
# when nargs='*' on a positional, if there were no command-line
# args, use the default if it is anything other than None
elif (not arg_strings and action.nargs == ZERO_OR_MORE and
not action.option_strings):
if action.default is not None:
value = action.default
else:
value = arg_strings
self._check_value(action, value)
# single argument or optional argument produces a single value
elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]:
arg_string, = arg_strings
value = self._get_value(action, arg_string)
self._check_value(action, value)
# REMAINDER arguments convert all values, checking none
elif action.nargs == REMAINDER:
value = [self._get_value(action, v) for v in arg_strings]
# PARSER arguments convert all values, but check only the first
elif action.nargs == PARSER:
value = [self._get_value(action, v) for v in arg_strings]
self._check_value(action, value[0])
# all other types of nargs produce a list
else:
value = [self._get_value(action, v) for v in arg_strings]
for v in value:
self._check_value(action, v)
# return the converted value
return value
def _get_value(self, action, arg_string):
type_func = self._registry_get('type', action.type, action.type)
if not _callable(type_func):
msg = _('%r is not callable')
raise ArgumentError(action, msg % type_func)
# convert the value to the appropriate type
try:
result = type_func(arg_string)
# ArgumentTypeErrors indicate errors
except ArgumentTypeError:
name = getattr(action.type, '__name__', repr(action.type))
msg = str(_sys.exc_info()[1])
raise ArgumentError(action, msg)
# TypeErrors or ValueErrors also indicate errors
except (TypeError, ValueError):
name = getattr(action.type, '__name__', repr(action.type))
msg = _('invalid %s value: %r')
raise ArgumentError(action, msg % (name, arg_string))
# return the converted value
return result
def _check_value(self, action, value):
# converted value must be one of the choices (if specified)
if action.choices is not None and value not in action.choices:
tup = value, ', '.join(map(repr, action.choices))
msg = _('invalid choice: %r (choose from %s)') % tup
raise ArgumentError(action, msg)
# =======================
# Help-formatting methods
# =======================
def format_usage(self):
formatter = self._get_formatter()
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups)
return formatter.format_help()
def format_help(self):
formatter = self._get_formatter()
# usage
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups)
# description
formatter.add_text(self.description)
# positionals, optionals and user-defined groups
for action_group in self._action_groups:
formatter.start_section(action_group.title)
formatter.add_text(action_group.description)
formatter.add_arguments(action_group._group_actions)
formatter.end_section()
# epilog
formatter.add_text(self.epilog)
# determine help from format above
return formatter.format_help()
def format_version(self):
import warnings
warnings.warn(
'The format_version method is deprecated -- the "version" '
'argument to ArgumentParser is no longer supported.',
DeprecationWarning)
formatter = self._get_formatter()
formatter.add_text(self.version)
return formatter.format_help()
def _get_formatter(self):
return self.formatter_class(prog=self.prog)
# =====================
# Help-printing methods
# =====================
def print_usage(self, file=None):
if file is None:
file = _sys.stdout
self._print_message(self.format_usage(), file)
def print_help(self, file=None):
if file is None:
file = _sys.stdout
self._print_message(self.format_help(), file)
def print_version(self, file=None):
import warnings
warnings.warn(
'The print_version method is deprecated -- the "version" '
'argument to ArgumentParser is no longer supported.',
DeprecationWarning)
self._print_message(self.format_version(), file)
def _print_message(self, message, file=None):
if message:
if file is None:
file = _sys.stderr
file.write(message)
# ===============
# Exiting methods
# ===============
def exit(self, status=0, message=None):
if message:
self._print_message(message, _sys.stderr)
_sys.exit(status)
def error(self, message):
"""error(message: string)
Prints a usage message incorporating the message to stderr and
exits.
If you override this in a subclass, it should not return -- it
should either exit or raise an exception.
"""
self.print_usage(_sys.stderr)
self.exit(2, _('%s: error: %s\n') % (self.prog, message))
| 89,152
|
Python
|
.py
| 1,971
| 33.526636
| 89
| 0.577854
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,165
|
selectors.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/selectors.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
"""Selectors module.
This module allows high-level and efficient I/O multiplexing, built upon the
`select` module primitives.
The following code adapted from trollius.selectors.
"""
from abc import ABCMeta, abstractmethod
from collections import namedtuple, Mapping
import math
import select
import sys
from zato.server.ext.zunicorn._compat import wrap_error, InterruptedError
from zato.server.ext.zunicorn import six
# generic events, that must be mapped to implementation-specific ones
EVENT_READ = (1 << 0)
EVENT_WRITE = (1 << 1)
def _fileobj_to_fd(fileobj):
"""Return a file descriptor from a file object.
Parameters:
fileobj -- file object or file descriptor
Returns:
corresponding file descriptor
Raises:
ValueError if the object is invalid
"""
if isinstance(fileobj, six.integer_types):
fd = fileobj
else:
try:
fd = int(fileobj.fileno())
except (AttributeError, TypeError, ValueError):
raise ValueError("Invalid file object: "
"{0!r}".format(fileobj))
if fd < 0:
raise ValueError("Invalid file descriptor: {0}".format(fd))
return fd
SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data'])
"""Object used to associate a file object to its backing file descriptor,
selected event mask and attached data."""
class _SelectorMapping(Mapping):
"""Mapping of file objects to selector keys."""
def __init__(self, selector):
self._selector = selector
def __len__(self):
return len(self._selector._fd_to_key)
def __getitem__(self, fileobj):
try:
fd = self._selector._fileobj_lookup(fileobj)
return self._selector._fd_to_key[fd]
except KeyError:
raise KeyError("{0!r} is not registered".format(fileobj))
def __iter__(self):
return iter(self._selector._fd_to_key)
class BaseSelector(six.with_metaclass(ABCMeta)):
"""Selector abstract base class.
A selector supports registering file objects to be monitored for specific
I/O events.
A file object is a file descriptor or any object with a `fileno()` method.
An arbitrary object can be attached to the file object, which can be used
for example to store context information, a callback, etc.
A selector can use various implementations (select(), poll(), epoll()...)
depending on the platform. The default `Selector` class uses the most
efficient implementation on the current platform.
"""
@abstractmethod
def register(self, fileobj, events, data=None):
"""Register a file object.
Parameters:
fileobj -- file object or file descriptor
events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE)
data -- attached data
Returns:
SelectorKey instance
Raises:
ValueError if events is invalid
KeyError if fileobj is already registered
OSError if fileobj is closed or otherwise is unacceptable to
the underlying system call (if a system call is made)
Note:
OSError may or may not be raised
"""
raise NotImplementedError
@abstractmethod
def unregister(self, fileobj):
"""Unregister a file object.
Parameters:
fileobj -- file object or file descriptor
Returns:
SelectorKey instance
Raises:
KeyError if fileobj is not registered
Note:
If fileobj is registered but has since been closed this does
*not* raise OSError (even if the wrapped syscall does)
"""
raise NotImplementedError
def modify(self, fileobj, events, data=None):
"""Change a registered file object monitored events or attached data.
Parameters:
fileobj -- file object or file descriptor
events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE)
data -- attached data
Returns:
SelectorKey instance
Raises:
Anything that unregister() or register() raises
"""
self.unregister(fileobj)
return self.register(fileobj, events, data)
@abstractmethod
def select(self, timeout=None):
"""Perform the actual selection, until some monitored file objects are
ready or a timeout expires.
Parameters:
timeout -- if timeout > 0, this specifies the maximum wait time, in
seconds
if timeout <= 0, the select() call won't block, and will
report the currently ready file objects
if timeout is None, select() will block until a monitored
file object becomes ready
Returns:
list of (key, events) for ready file objects
`events` is a bitwise mask of EVENT_READ|EVENT_WRITE
"""
raise NotImplementedError
def close(self):
"""Close the selector.
This must be called to make sure that any underlying resource is freed.
"""
pass
def get_key(self, fileobj):
"""Return the key associated to a registered file object.
Returns:
SelectorKey for this file object
"""
mapping = self.get_map()
try:
return mapping[fileobj]
except KeyError:
raise KeyError("{0!r} is not registered".format(fileobj))
@abstractmethod
def get_map(self):
"""Return a mapping of file objects to selector keys."""
raise NotImplementedError
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
class _BaseSelectorImpl(BaseSelector):
"""Base selector implementation."""
def __init__(self):
# this maps file descriptors to keys
self._fd_to_key = {}
# read-only mapping returned by get_map()
self._map = _SelectorMapping(self)
def _fileobj_lookup(self, fileobj):
"""Return a file descriptor from a file object.
This wraps _fileobj_to_fd() to do an exhaustive search in case
the object is invalid but we still have it in our map. This
is used by unregister() so we can unregister an object that
was previously registered even if it is closed. It is also
used by _SelectorMapping.
"""
try:
return _fileobj_to_fd(fileobj)
except ValueError:
# Do an exhaustive search.
for key in self._fd_to_key.values():
if key.fileobj is fileobj:
return key.fd
# Raise ValueError after all.
raise
def register(self, fileobj, events, data=None):
if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)):
raise ValueError("Invalid events: {0!r}".format(events))
key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data)
if key.fd in self._fd_to_key:
raise KeyError("{0!r} (FD {1}) is already registered"
.format(fileobj, key.fd))
self._fd_to_key[key.fd] = key
return key
def unregister(self, fileobj):
try:
key = self._fd_to_key.pop(self._fileobj_lookup(fileobj))
except KeyError:
raise KeyError("{0!r} is not registered".format(fileobj))
return key
def modify(self, fileobj, events, data=None):
# TODO: Subclasses can probably optimize this even further.
try:
key = self._fd_to_key[self._fileobj_lookup(fileobj)]
except KeyError:
raise KeyError("{0!r} is not registered".format(fileobj))
if events != key.events:
self.unregister(fileobj)
key = self.register(fileobj, events, data)
elif data != key.data:
# Use a shortcut to update the data.
key = key._replace(data=data)
self._fd_to_key[key.fd] = key
return key
def close(self):
self._fd_to_key.clear()
def get_map(self):
return self._map
def _key_from_fd(self, fd):
"""Return the key associated to a given file descriptor.
Parameters:
fd -- file descriptor
Returns:
corresponding key, or None if not found
"""
try:
return self._fd_to_key[fd]
except KeyError:
return None
class SelectSelector(_BaseSelectorImpl):
"""Select-based selector."""
def __init__(self):
super(SelectSelector, self).__init__()
self._readers = set()
self._writers = set()
def register(self, fileobj, events, data=None):
key = super(SelectSelector, self).register(fileobj, events, data)
if events & EVENT_READ:
self._readers.add(key.fd)
if events & EVENT_WRITE:
self._writers.add(key.fd)
return key
def unregister(self, fileobj):
key = super(SelectSelector, self).unregister(fileobj)
self._readers.discard(key.fd)
self._writers.discard(key.fd)
return key
if sys.platform == 'win32':
def _select(self, r, w, _, timeout=None):
r, w, x = select.select(r, w, w, timeout)
return r, w + x, []
else:
_select = select.select
def select(self, timeout=None):
timeout = None if timeout is None else max(timeout, 0)
ready = []
try:
r, w, _ = wrap_error(self._select,
self._readers, self._writers, [], timeout)
except InterruptedError:
return ready
r = set(r)
w = set(w)
for fd in r | w:
events = 0
if fd in r:
events |= EVENT_READ
if fd in w:
events |= EVENT_WRITE
key = self._key_from_fd(fd)
if key:
ready.append((key, events & key.events))
return ready
if hasattr(select, 'poll'):
class PollSelector(_BaseSelectorImpl):
"""Poll-based selector."""
def __init__(self):
super(PollSelector, self).__init__()
self._poll = select.poll()
def register(self, fileobj, events, data=None):
key = super(PollSelector, self).register(fileobj, events, data)
poll_events = 0
if events & EVENT_READ:
poll_events |= select.POLLIN
if events & EVENT_WRITE:
poll_events |= select.POLLOUT
self._poll.register(key.fd, poll_events)
return key
def unregister(self, fileobj):
key = super(PollSelector, self).unregister(fileobj)
self._poll.unregister(key.fd)
return key
def select(self, timeout=None):
if timeout is None:
timeout = None
elif timeout <= 0:
timeout = 0
else:
# poll() has a resolution of 1 millisecond, round away from
# zero to wait *at least* timeout seconds.
timeout = int(math.ceil(timeout * 1e3))
ready = []
try:
fd_event_list = wrap_error(self._poll.poll, timeout)
except InterruptedError:
return ready
for fd, event in fd_event_list:
events = 0
if event & ~select.POLLIN:
events |= EVENT_WRITE
if event & ~select.POLLOUT:
events |= EVENT_READ
key = self._key_from_fd(fd)
if key:
ready.append((key, events & key.events))
return ready
if hasattr(select, 'epoll'):
class EpollSelector(_BaseSelectorImpl):
"""Epoll-based selector."""
def __init__(self):
super(EpollSelector, self).__init__()
self._epoll = select.epoll()
def fileno(self):
return self._epoll.fileno()
def register(self, fileobj, events, data=None):
key = super(EpollSelector, self).register(fileobj, events, data)
epoll_events = 0
if events & EVENT_READ:
epoll_events |= select.EPOLLIN
if events & EVENT_WRITE:
epoll_events |= select.EPOLLOUT
self._epoll.register(key.fd, epoll_events)
return key
def unregister(self, fileobj):
key = super(EpollSelector, self).unregister(fileobj)
try:
self._epoll.unregister(key.fd)
except OSError:
# This can happen if the FD was closed since it
# was registered.
pass
return key
def select(self, timeout=None):
if timeout is None:
timeout = -1
elif timeout <= 0:
timeout = 0
else:
# epoll_wait() has a resolution of 1 millisecond, round away
# from zero to wait *at least* timeout seconds.
timeout = math.ceil(timeout * 1e3) * 1e-3
max_ev = len(self._fd_to_key)
ready = []
try:
fd_event_list = wrap_error(self._epoll.poll, timeout, max_ev)
except InterruptedError:
return ready
for fd, event in fd_event_list:
events = 0
if event & ~select.EPOLLIN:
events |= EVENT_WRITE
if event & ~select.EPOLLOUT:
events |= EVENT_READ
key = self._key_from_fd(fd)
if key:
ready.append((key, events & key.events))
return ready
def close(self):
self._epoll.close()
super(EpollSelector, self).close()
if hasattr(select, 'devpoll'):
class DevpollSelector(_BaseSelectorImpl):
"""Solaris /dev/poll selector."""
def __init__(self):
super(DevpollSelector, self).__init__()
self._devpoll = select.devpoll()
def fileno(self):
return self._devpoll.fileno()
def register(self, fileobj, events, data=None):
key = super(DevpollSelector, self).register(fileobj, events, data)
poll_events = 0
if events & EVENT_READ:
poll_events |= select.POLLIN
if events & EVENT_WRITE:
poll_events |= select.POLLOUT
self._devpoll.register(key.fd, poll_events)
return key
def unregister(self, fileobj):
key = super(DevpollSelector, self).unregister(fileobj)
self._devpoll.unregister(key.fd)
return key
def select(self, timeout=None):
if timeout is None:
timeout = None
elif timeout <= 0:
timeout = 0
else:
# devpoll() has a resolution of 1 millisecond, round away from
# zero to wait *at least* timeout seconds.
timeout = math.ceil(timeout * 1e3)
ready = []
try:
fd_event_list = self._devpoll.poll(timeout)
except InterruptedError:
return ready
for fd, event in fd_event_list:
events = 0
if event & ~select.POLLIN:
events |= EVENT_WRITE
if event & ~select.POLLOUT:
events |= EVENT_READ
key = self._key_from_fd(fd)
if key:
ready.append((key, events & key.events))
return ready
def close(self):
self._devpoll.close()
super(DevpollSelector, self).close()
if hasattr(select, 'kqueue'):
class KqueueSelector(_BaseSelectorImpl):
"""Kqueue-based selector."""
def __init__(self):
super(KqueueSelector, self).__init__()
self._kqueue = select.kqueue()
def fileno(self):
return self._kqueue.fileno()
def register(self, fileobj, events, data=None):
key = super(KqueueSelector, self).register(fileobj, events, data)
if events & EVENT_READ:
kev = select.kevent(key.fd, select.KQ_FILTER_READ,
select.KQ_EV_ADD)
self._kqueue.control([kev], 0, 0)
if events & EVENT_WRITE:
kev = select.kevent(key.fd, select.KQ_FILTER_WRITE,
select.KQ_EV_ADD)
self._kqueue.control([kev], 0, 0)
return key
def unregister(self, fileobj):
key = super(KqueueSelector, self).unregister(fileobj)
if key.events & EVENT_READ:
kev = select.kevent(key.fd, select.KQ_FILTER_READ,
select.KQ_EV_DELETE)
try:
self._kqueue.control([kev], 0, 0)
except OSError:
# This can happen if the FD was closed since it
# was registered.
pass
if key.events & EVENT_WRITE:
kev = select.kevent(key.fd, select.KQ_FILTER_WRITE,
select.KQ_EV_DELETE)
try:
self._kqueue.control([kev], 0, 0)
except OSError:
# See comment above.
pass
return key
def select(self, timeout=None):
timeout = None if timeout is None else max(timeout, 0)
max_ev = len(self._fd_to_key)
ready = []
try:
kev_list = wrap_error(self._kqueue.control,
None, max_ev, timeout)
except InterruptedError:
return ready
for kev in kev_list:
fd = kev.ident
flag = kev.filter
events = 0
if flag == select.KQ_FILTER_READ:
events |= EVENT_READ
if flag == select.KQ_FILTER_WRITE:
events |= EVENT_WRITE
key = self._key_from_fd(fd)
if key:
ready.append((key, events & key.events))
return ready
def close(self):
self._kqueue.close()
super(KqueueSelector, self).close()
# Choose the best implementation: roughly, epoll|kqueue|devpoll > poll > select.
# select() also can't accept a FD > FD_SETSIZE (usually around 1024)
if 'KqueueSelector' in globals():
DefaultSelector = KqueueSelector
elif 'EpollSelector' in globals():
DefaultSelector = EpollSelector
elif 'DevpollSelector' in globals():
DefaultSelector = DevpollSelector
elif 'PollSelector' in globals():
DefaultSelector = PollSelector
else:
DefaultSelector = SelectSelector
| 20,393
|
Python
|
.py
| 507
| 29.426036
| 80
| 0.583
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,166
|
reloader.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/reloader.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# flake8: noqa
import os
import os.path
import re
import sys
import time
import threading
COMPILED_EXT_RE = re.compile(r'py[co]$')
class Reloader(threading.Thread):
def __init__(self, extra_files=None, interval=1, callback=None):
super(Reloader, self).__init__()
self.setDaemon(True)
self._extra_files = set(extra_files or ())
self._extra_files_lock = threading.RLock()
self._interval = interval
self._callback = callback
def add_extra_file(self, filename):
with self._extra_files_lock:
self._extra_files.add(filename)
def get_files(self):
fnames = [
COMPILED_EXT_RE.sub('py', module.__file__)
for module in tuple(sys.modules.values())
if getattr(module, '__file__', None)
]
with self._extra_files_lock:
fnames.extend(self._extra_files)
return fnames
def run(self):
mtimes = {}
while True:
for filename in self.get_files():
try:
mtime = os.stat(filename).st_mtime
except OSError:
continue
old_time = mtimes.get(filename)
if old_time is None:
mtimes[filename] = mtime
continue
elif mtime > old_time:
if self._callback:
self._callback(filename)
time.sleep(self._interval)
has_inotify = False
if sys.platform.startswith('linux'):
try:
from inotify.adapters import Inotify
import inotify.constants
has_inotify = True
except ImportError:
pass
if has_inotify:
class InotifyReloader(threading.Thread):
event_mask = (inotify.constants.IN_CREATE | inotify.constants.IN_DELETE
| inotify.constants.IN_DELETE_SELF | inotify.constants.IN_MODIFY
| inotify.constants.IN_MOVE_SELF | inotify.constants.IN_MOVED_FROM
| inotify.constants.IN_MOVED_TO)
def __init__(self, extra_files=None, callback=None):
super(InotifyReloader, self).__init__()
self.setDaemon(True)
self._callback = callback
self._dirs = set()
self._watcher = Inotify()
for extra_file in extra_files:
self.add_extra_file(extra_file)
def add_extra_file(self, filename):
dirname = os.path.dirname(filename)
if dirname in self._dirs:
return
self._watcher.add_watch(dirname, mask=self.event_mask)
self._dirs.add(dirname)
def get_dirs(self):
fnames = [
os.path.dirname(COMPILED_EXT_RE.sub('py', module.__file__))
for module in tuple(sys.modules.values())
if hasattr(module, '__file__')
]
return set(fnames)
def run(self):
self._dirs = self.get_dirs()
for dirname in self._dirs:
self._watcher.add_watch(dirname, mask=self.event_mask)
for event in self._watcher.event_gen():
if event is None:
continue
filename = event[3]
self._callback(filename)
else:
class InotifyReloader:
def __init__(self, callback=None):
raise ImportError('You must have the inotify module installed to '
'use the inotify reloader')
preferred_reloader = InotifyReloader if has_inotify else Reloader
reloader_engines = {
'auto': preferred_reloader,
'poll': Reloader,
'inotify': InotifyReloader,
}
| 5,083
|
Python
|
.py
| 127
| 31.007874
| 88
| 0.627898
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,167
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
SERVER_SOFTWARE = 'Zato'
| 1,388
|
Python
|
.py
| 29
| 46.62069
| 65
| 0.797337
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,168
|
debug.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/debug.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
"""The debug module contains utilities and functions for better
debugging Gunicorn."""
# flake8: noqa
import sys
import linecache
import re
import inspect
__all__ = ['spew', 'unspew']
_token_spliter = re.compile(r'\W+')
class Spew:
def __init__(self, trace_names=None, show_values=True):
self.trace_names = trace_names
self.show_values = show_values
def __call__(self, frame, event, arg):
if event == 'line':
lineno = frame.f_lineno
if '__file__' in frame.f_globals:
filename = frame.f_globals['__file__']
if (filename.endswith('.pyc') or
filename.endswith('.pyo')):
filename = filename[:-1]
name = frame.f_globals['__name__']
line = linecache.getline(filename, lineno)
else:
name = '[unknown]'
try:
src = inspect.getsourcelines(frame)
line = src[lineno]
except IOError:
line = 'Unknown code named [%s]. VM instruction #%d' % (
frame.f_code.co_name, frame.f_lasti)
if self.trace_names is None or name in self.trace_names:
if not self.show_values:
return self
details = []
tokens = _token_spliter.split(line)
for tok in tokens:
if tok in frame.f_globals:
details.append('%s=%r' % (tok, frame.f_globals[tok]))
if tok in frame.f_locals:
details.append('%s=%r' % (tok, frame.f_locals[tok]))
if details:
print("\t%s" % ' '.join(details))
return self
def spew(trace_names=None, show_values=False):
"""Install a trace hook which writes incredibly detailed logs
about what code is being executed to stdout.
"""
sys.settrace(Spew(trace_names, show_values))
def unspew():
"""Remove the trace hook installed by spew.
"""
sys.settrace(None)
| 3,466
|
Python
|
.py
| 81
| 34.62963
| 77
| 0.636553
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,169
|
sock.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/sock.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# flake8: noqa
import errno
import os
import socket
import stat
import sys
import time
from zato.common.util.platform_ import is_posix
from zato.server.ext.zunicorn import util
from zato.server.ext.zunicorn.six import string_types
class BaseSocket:
def __init__(self, address, conf, log, fd=None):
self.log = log
self.conf = conf
self.cfg_addr = address
if fd is None:
sock = socket.socket(self.FAMILY, socket.SOCK_STREAM)
bound = False
else:
sock = socket.fromfd(fd, self.FAMILY, socket.SOCK_STREAM)
os.close(fd)
bound = True
self.sock = self.set_options(sock, bound=bound)
def __str__(self):
return "<socket %d>" % self.sock.fileno()
def __getattr__(self, name):
return getattr(self.sock, name)
def set_options(self, sock, bound=False):
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if (self.conf.reuse_port
and hasattr(socket, 'SO_REUSEPORT')): # pragma: no cover
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except socket.error as err:
if err.errno not in (errno.ENOPROTOOPT, errno.EINVAL):
raise
if not bound:
self.bind(sock)
sock.setblocking(0)
# make sure that the socket can be inherited
if hasattr(sock, "set_inheritable"):
sock.set_inheritable(True)
sock.listen(self.conf.backlog)
return sock
def bind(self, sock):
sock.bind(self.cfg_addr)
def close(self):
if self.sock is None:
return
try:
self.sock.close()
except socket.error as e:
self.log.info("Error while closing socket %s", str(e))
self.sock = None
class TCPSocket(BaseSocket):
FAMILY = socket.AF_INET
def __str__(self):
if self.conf.is_ssl:
scheme = "https"
else:
scheme = "http"
addr = self.sock.getsockname()
return "%s://%s:%d" % (scheme, addr[0], addr[1])
def set_options(self, sock, bound=False):
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
return super(TCPSocket, self).set_options(sock, bound=bound)
class TCP6Socket(TCPSocket):
FAMILY = socket.AF_INET6
def __str__(self):
(host, port, _, _) = self.sock.getsockname()
return "http://[%s]:%d" % (host, port)
class UnixSocket(BaseSocket):
# This is POSIX-only
if is_posix:
FAMILY = socket.AF_UNIX
else:
FAMILY = None
def __init__(self, addr, conf, log, fd=None):
if fd is None:
try:
st = os.stat(addr)
except OSError as e:
if e.args[0] != errno.ENOENT:
raise
else:
if stat.S_ISSOCK(st.st_mode):
os.remove(addr)
else:
raise ValueError("%r is not a socket" % addr)
super(UnixSocket, self).__init__(addr, conf, log, fd=fd)
def __str__(self):
return "unix:%s" % self.cfg_addr
def bind(self, sock):
old_umask = os.umask(self.conf.umask)
sock.bind(self.cfg_addr)
util.chown(self.cfg_addr, self.conf.uid, self.conf.gid)
os.umask(old_umask)
def _sock_type(addr):
if isinstance(addr, tuple):
if util.is_ipv6(addr[0]):
sock_type = TCP6Socket
else:
sock_type = TCPSocket
elif isinstance(addr, string_types):
sock_type = UnixSocket
else:
raise TypeError("Unable to create socket from: %r" % addr)
return sock_type
def create_sockets(conf, log, fds=None):
"""
Create a new socket for the configured addresses or file descriptors.
If a configured address is a tuple then a TCP socket is created.
If it is a string, a Unix socket is created. Otherwise, a TypeError is
raised.
"""
listeners = []
# get it only once
laddr = conf.address
# check ssl config early to raise the error on startup
# only the certfile is needed since it can contains the keyfile
if conf.certfile and not os.path.exists(conf.certfile):
raise ValueError('certfile "%s" does not exist' % conf.certfile)
if conf.keyfile and not os.path.exists(conf.keyfile):
raise ValueError('keyfile "%s" does not exist' % conf.keyfile)
# sockets are already bound
if fds is not None:
for fd in fds:
sock = socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM)
sock_name = sock.getsockname()
sock_type = _sock_type(sock_name)
listener = sock_type(sock_name, conf, log, fd=fd)
listeners.append(listener)
return listeners
# no sockets is bound, first initialization of gunicorn in this env.
for addr in laddr:
sock_type = _sock_type(addr)
sock = None
for i in range(5):
try:
sock = sock_type(addr, conf, log)
except socket.error as e:
if e.args[0] == errno.EADDRINUSE:
log.error("Connection in use: %s", str(addr))
if e.args[0] == errno.EADDRNOTAVAIL:
log.error("Invalid address: %s", str(addr))
if i < 5:
msg = "connection to {addr} failed: {error}"
log.debug(msg.format(addr=str(addr), error=str(e)))
log.error("Retrying in 1 second.")
time.sleep(1)
else:
break
if sock is None:
log.error("Can't connect to %s", str(addr))
sys.exit(1)
listeners.append(sock)
return listeners
def close_sockets(listeners, unlink=True):
for sock in listeners:
sock_name = sock.getsockname()
sock.close()
if unlink and _sock_type(sock_name) is UnixSocket:
os.unlink(sock_name)
| 7,421
|
Python
|
.py
| 191
| 30.544503
| 74
| 0.623502
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,170
|
six.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/six.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# flake8: noqa
"""Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2014 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import functools
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.8.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
# For pyflakes
from zato.common.py23_.past.builtins import basestring, execfile, long, unicode
execfile = execfile
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X:
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr:
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
# This is a bit ugly, but it avoids running this again.
delattr(obj.__class__, self.name)
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter:
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
class Iterator:
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
else:
def iterkeys(d, **kw):
return iter(d.iterkeys(**kw))
def itervalues(d, **kw):
return iter(d.itervalues(**kw))
def iteritems(d, **kw):
return iter(d.iteritems(**kw))
def iterlists(d, **kw):
return iter(d.iterlists(**kw))
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
def iterbytes(buf):
return (ord(byte) for byte in buf)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
__spec__ = None
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
| 28,446
|
Python
|
.py
| 654
| 37.137615
| 98
| 0.660458
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,171
|
systemd.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/systemd.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import os
SD_LISTEN_FDS_START = 3
def listen_fds(unset_environment=True):
"""
Get the number of sockets inherited from systemd socket activation.
:param unset_environment: clear systemd environment variables unless False
:type unset_environment: bool
:return: the number of sockets to inherit from systemd socket activation
:rtype: int
Returns zero immediately if $LISTEN_PID is not set to the current pid.
Otherwise, returns the number of systemd activation sockets specified by
$LISTEN_FDS.
When $LISTEN_PID matches the current pid, unsets the environment variables
unless the ``unset_environment`` flag is ``False``.
.. note::
Unlike the sd_listen_fds C function, this implementation does not set
the FD_CLOEXEC flag because the gunicorn arbiter never needs to do this.
.. seealso::
`<https://www.freedesktop.org/software/systemd/man/sd_listen_fds.html>`_
"""
fds = int(os.environ.get('LISTEN_FDS', 0))
listen_pid = int(os.environ.get('LISTEN_PID', 0))
if listen_pid != os.getpid():
return 0
if unset_environment:
os.environ.pop('LISTEN_PID', None)
os.environ.pop('LISTEN_FDS', None)
return fds
| 2,598
|
Python
|
.py
| 56
| 42.821429
| 80
| 0.754163
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,172
|
message.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/http/message.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# flake8: noqa
import regex as re
import socket
from errno import ENOTCONN
from zato.server.ext.zunicorn._compat import bytes_to_str
from zato.server.ext.zunicorn.http.unreader import SocketUnreader
from zato.server.ext.zunicorn.http.body import ChunkedReader, LengthReader, EOFReader, Body
from zato.server.ext.zunicorn.http.errors import (InvalidHeader, InvalidHeaderName, NoMoreData,
InvalidRequestLine, InvalidRequestMethod, InvalidHTTPVersion,
LimitRequestLine, LimitRequestHeaders)
from zato.server.ext.zunicorn.http.errors import InvalidProxyLine, ForbiddenProxyRequest
from zato.server.ext.zunicorn.http.errors import InvalidSchemeHeaders
from zato.server.ext.zunicorn.six import BytesIO, string_types
from zato.server.ext.zunicorn.util import split_request_uri
MAX_REQUEST_LINE = 8190
MAX_HEADERS = 32768
DEFAULT_MAX_HEADERFIELD_SIZE = 8190
HEADER_RE = re.compile(r"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\"]")
METHOD_RE = re.compile(r"[A-Z0-9$-_.]{3,20}")
VERSION_RE = re.compile(r"HTTP/(\d+)\.(\d+)")
class Message:
def __init__(self, cfg, unreader, MAX_HEADERS=MAX_HEADERS,
DEFAULT_MAX_HEADERFIELD_SIZE=DEFAULT_MAX_HEADERFIELD_SIZE):
self.cfg = cfg
self.unreader = unreader
self.version = None
self.headers = []
self.trailers = []
self.body = None
self.scheme = "https" if cfg.is_ssl else "http"
# set headers limits
self.limit_request_fields = MAX_HEADERS
self.limit_request_field_size = DEFAULT_MAX_HEADERFIELD_SIZE
# set max header buffer size
max_header_field_size = self.limit_request_field_size or DEFAULT_MAX_HEADERFIELD_SIZE
self.max_buffer_headers = self.limit_request_fields * \
(max_header_field_size + 2) + 4
unused = self.parse(self.unreader)
self.unreader.unread(unused)
self.set_body_reader()
def parse(self, unreader):
raise NotImplementedError()
def parse_headers(self, data, bytes_to_str=bytes_to_str):
cfg = self.cfg
headers = []
# Split lines on \r\n keeping the \r\n on each line
lines = [bytes_to_str(line) + "\r\n" for line in data.split(b"\r\n")]
# handle scheme headers
scheme_header = False
secure_scheme_headers = {}
if '*' in cfg.forwarded_allow_ips:
secure_scheme_headers = cfg.secure_scheme_headers
elif isinstance(self.unreader, SocketUnreader):
remote_addr = self.unreader.sock.getpeername()
if isinstance(remote_addr, tuple):
remote_host = remote_addr[0]
if remote_host in cfg.forwarded_allow_ips:
secure_scheme_headers = cfg.secure_scheme_headers
elif isinstance(remote_addr, string_types):
secure_scheme_headers = cfg.secure_scheme_headers
# Parse headers into key/value pairs paying attention
# to continuation lines.
self_limit_request_fields = self.limit_request_fields
self_limit_request_field_size = self.limit_request_field_size
lines_pop = lines.pop
while lines:
if len(headers) >= self_limit_request_fields:
raise LimitRequestHeaders("limit request headers fields")
# Parse initial header name : value pair.
curr = lines_pop(0)
header_length = len(curr)
if curr.find(":") < 0:
raise InvalidHeader(curr.strip())
name, value = curr.split(":", 1)
name = name.rstrip(" \t").upper()
if HEADER_RE.search(name):
raise InvalidHeaderName(name)
name, value = name.strip(), [value.lstrip()]
# Consume value continuation lines
while lines and lines[0].startswith((" ", "\t")):
curr = lines_pop(0)
header_length += len(curr)
if header_length > self_limit_request_field_size > 0:
raise LimitRequestHeaders("limit request headers "
+ "fields size")
value.append(curr)
value = ''.join(value).rstrip()
if header_length > self_limit_request_field_size > 0:
raise LimitRequestHeaders("limit request headers fields size")
if name in secure_scheme_headers:
secure = value == secure_scheme_headers[name]
scheme = "https" if secure else "http"
if scheme_header:
if scheme != self.scheme:
raise InvalidSchemeHeaders()
else:
scheme_header = True
self.scheme = scheme
headers.append((name, value))
return headers
def set_body_reader(self):
chunked = False
content_length = None
for (name, value) in self.headers:
if name == "CONTENT-LENGTH":
content_length = value
elif name == "TRANSFER-ENCODING":
chunked = value.lower() == "chunked"
elif name == "SEC-WEBSOCKET-KEY1":
content_length = 8
if chunked:
self.body = Body(ChunkedReader(self, self.unreader))
elif content_length is not None:
try:
content_length = int(content_length)
except ValueError:
raise InvalidHeader("CONTENT-LENGTH", req=self)
if content_length < 0:
raise InvalidHeader("CONTENT-LENGTH", req=self)
self.body = Body(LengthReader(self.unreader, content_length))
else:
self.body = Body(EOFReader(self.unreader))
def should_close(self):
for (h, v) in self.headers:
if h == "CONNECTION":
v = v.lower().strip()
if v == "close":
return True
elif v == "keep-alive":
return False
break
return self.version <= (1, 0)
class Request(Message):
def __init__(self, cfg, unreader, req_number=1):
self.method = None
self.uri = None
self.path = None
self.query = None
self.fragment = None
# get max request line size
self.limit_request_line = cfg.limit_request_line
if (self.limit_request_line < 0
or self.limit_request_line >= MAX_REQUEST_LINE):
self.limit_request_line = MAX_REQUEST_LINE
self.req_number = req_number
self.proxy_protocol_info = None
super(Request, self).__init__(cfg, unreader)
def get_data(self, unreader, buf, stop=False, NoMoreData=NoMoreData):
data = unreader.read()
if not data:
if stop:
raise StopIteration()
raise NoMoreData(buf.getvalue())
buf.write(data)
def parse(self, unreader, BytesIO=BytesIO):
buf = BytesIO()
self.get_data(unreader, buf, stop=True)
# get request line
line, rbuf = self.read_line(unreader, buf, self.limit_request_line)
# proxy protocol
if self.cfg.proxy_protocol:
if self.proxy_protocol(bytes_to_str(line)):
# get next request line
buf = BytesIO()
buf.write(rbuf)
line, rbuf = self.read_line(unreader, buf, self.limit_request_line)
self.parse_request_line(line)
buf = BytesIO()
buf.write(rbuf)
# Headers
data = buf.getvalue()
idx = data.find(b"\r\n\r\n")
done = data[:2] == b"\r\n"
data_find = data.find
self_get_data = self.get_data
buf_getvalue = buf.getvalue
self_max_buffer_headers = self.max_buffer_headers
while True:
idx = data_find(b"\r\n\r\n")
done = data[:2] == b"\r\n"
if idx < 0 and not done:
self_get_data(unreader, buf)
data = buf_getvalue()
if len(data) > self_max_buffer_headers:
raise LimitRequestHeaders("max buffer headers")
else:
break
if done:
self.unreader.unread(data[2:])
return b""
self.headers = self.parse_headers(data[:idx])
ret = data[idx + 4:]
buf = None
return ret
def read_line(self, unreader, buf, limit=0):
data = buf.getvalue()
data_find = data.find
self_get_data = self.get_data
buf_getvalue = buf.getvalue
while True:
idx = data_find(b"\r\n")
if idx >= 0:
# check if the request line is too large
if idx > limit > 0:
raise LimitRequestLine(idx, limit)
break
elif len(data) - 2 > limit > 0:
raise LimitRequestLine(len(data), limit)
self_get_data(unreader, buf)
data = buf_getvalue()
return (data[:idx], # request line,
data[idx + 2:]) # residue in the buffer, skip \r\n
def proxy_protocol(self, line):
"""\
Detect, check and parse proxy protocol.
:raises: ForbiddenProxyRequest, InvalidProxyLine.
:return: True for proxy protocol line else False
"""
if not self.cfg.proxy_protocol:
return False
if self.req_number != 1:
return False
if not line.startswith("PROXY"):
return False
self.proxy_protocol_access_check()
self.parse_proxy_protocol(line)
return True
def proxy_protocol_access_check(self):
# check in allow list
if isinstance(self.unreader, SocketUnreader):
try:
remote_host = self.unreader.sock.getpeername()[0]
except socket.error as e:
if e.args[0] == ENOTCONN:
raise ForbiddenProxyRequest("UNKNOW")
raise
if ("*" not in self.cfg.proxy_allow_ips and
remote_host not in self.cfg.proxy_allow_ips):
raise ForbiddenProxyRequest(remote_host)
def parse_proxy_protocol(self, line):
bits = line.split()
if len(bits) != 6:
raise InvalidProxyLine(line)
# Extract data
proto = bits[1]
s_addr = bits[2]
d_addr = bits[3]
# Validation
if proto not in ["TCP4", "TCP6"]:
raise InvalidProxyLine("protocol '%s' not supported" % proto)
if proto == "TCP4":
try:
socket.inet_pton(socket.AF_INET, s_addr)
socket.inet_pton(socket.AF_INET, d_addr)
except socket.error:
raise InvalidProxyLine(line)
elif proto == "TCP6":
try:
socket.inet_pton(socket.AF_INET6, s_addr)
socket.inet_pton(socket.AF_INET6, d_addr)
except socket.error:
raise InvalidProxyLine(line)
try:
s_port = int(bits[4])
d_port = int(bits[5])
except ValueError:
raise InvalidProxyLine("invalid port %s" % line)
if not ((0 <= s_port <= 65535) and (0 <= d_port <= 65535)):
raise InvalidProxyLine("invalid port %s" % line)
# Set data
self.proxy_protocol_info = {
"proxy_protocol": proto,
"client_addr": s_addr,
"client_port": s_port,
"proxy_addr": d_addr,
"proxy_port": d_port
}
def parse_request_line(self, line_bytes, METHOD_RE=METHOD_RE, VERSION_RE=VERSION_RE,
split_request_uri=split_request_uri, bytes_to_str=bytes_to_str):
bits = [bytes_to_str(bit) for bit in line_bytes.split(None, 2)]
if len(bits) != 3:
raise InvalidRequestLine(bytes_to_str(line_bytes))
# Method
if not METHOD_RE.match(bits[0]):
raise InvalidRequestMethod(bits[0])
self.method = bits[0].upper()
# URI
self.uri = bits[1]
try:
parts = split_request_uri(self.uri)
except ValueError:
raise InvalidRequestLine(bytes_to_str(line_bytes))
self.path = parts.path or ""
self.query = parts.query or ""
self.fragment = parts.fragment or ""
# Version
match = VERSION_RE.match(bits[2])
if match is None:
raise InvalidHTTPVersion(bits[2])
self.version = (int(match.group(1)), int(match.group(2)))
def set_body_reader(self, EOFReader=EOFReader, Body=Body, LengthReader=LengthReader):
super(Request, self).set_body_reader()
if isinstance(self.body.reader, EOFReader):
self.body = Body(LengthReader(self.unreader, 0))
| 14,190
|
Python
|
.py
| 332
| 32.26506
| 95
| 0.597968
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,173
|
errors.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/http/errors.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# We don't need to call super() in __init__ methods of our
# BaseException and Exception classes because we also define
# our own __str__ methods so there is no need to pass 'message'
# to the base class to get a meaningful output from 'str(exc)'.
# pylint: disable=super-init-not-called
class ParseException(Exception):
pass
class NoMoreData(IOError):
def __init__(self, buf=None):
self.buf = buf
def __str__(self):
return "No more data after: %r" % self.buf
class InvalidRequestLine(ParseException):
def __init__(self, req):
self.req = req
self.code = 400
def __str__(self):
return "Invalid HTTP request line: %r" % self.req
class InvalidRequestMethod(ParseException):
def __init__(self, method):
self.method = method
def __str__(self):
return "Invalid HTTP method: %r" % self.method
class InvalidHTTPVersion(ParseException):
def __init__(self, version):
self.version = version
def __str__(self):
return "Invalid HTTP Version: %r" % self.version
class InvalidHeader(ParseException):
def __init__(self, hdr, req=None):
self.hdr = hdr
self.req = req
def __str__(self):
return "Invalid HTTP Header: %r" % self.hdr
class InvalidHeaderName(ParseException):
def __init__(self, hdr):
self.hdr = hdr
def __str__(self):
return "Invalid HTTP header name: %r" % self.hdr
class InvalidChunkSize(IOError):
def __init__(self, data):
self.data = data
def __str__(self):
return "Invalid chunk size: %r" % self.data
class ChunkMissingTerminator(IOError):
def __init__(self, term):
self.term = term
def __str__(self):
return "Invalid chunk terminator is not '\\r\\n': %r" % self.term
class LimitRequestLine(ParseException):
def __init__(self, size, max_size):
self.size = size
self.max_size = max_size
def __str__(self):
return "Request Line is too large (%s > %s)" % (self.size, self.max_size)
class LimitRequestHeaders(ParseException):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class InvalidProxyLine(ParseException):
def __init__(self, line):
self.line = line
self.code = 400
def __str__(self):
return "Invalid PROXY line: %r" % self.line
class ForbiddenProxyRequest(ParseException):
def __init__(self, host):
self.host = host
self.code = 403
def __str__(self):
return "Proxy request from %r not allowed" % self.host
class InvalidSchemeHeaders(ParseException):
def __str__(self):
return "Contradictory scheme headers"
| 4,086
|
Python
|
.py
| 103
| 34.864078
| 81
| 0.69352
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,174
|
_sendfile.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/http/_sendfile.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import errno
import os
import sys
try:
import ctypes
import ctypes.util
except MemoryError:
# selinux execmem denial
# https://bugzilla.redhat.com/show_bug.cgi?id=488396
raise ImportError
SUPPORTED_PLATFORMS = (
'darwin',
'freebsd',
'dragonfly',
'linux2')
if sys.platform not in SUPPORTED_PLATFORMS:
raise ImportError("sendfile isn't supported on this platform")
_libc = ctypes.CDLL(ctypes.util.find_library("c"), use_errno=True)
_sendfile = _libc.sendfile
def sendfile(fdout, fdin, offset, nbytes):
if sys.platform == 'darwin':
_sendfile.argtypes = [ctypes.c_int, ctypes.c_int, ctypes.c_uint64,
ctypes.POINTER(ctypes.c_uint64), ctypes.c_voidp,
ctypes.c_int]
_nbytes = ctypes.c_uint64(nbytes)
result = _sendfile(fdin, fdout, offset, _nbytes, None, 0)
if result == -1:
e = ctypes.get_errno()
if e == errno.EAGAIN and _nbytes.value is not None:
return _nbytes.value
raise OSError(e, os.strerror(e))
return _nbytes.value
elif sys.platform in ('freebsd', 'dragonfly',):
_sendfile.argtypes = [ctypes.c_int, ctypes.c_int, ctypes.c_uint64,
ctypes.c_uint64, ctypes.c_voidp,
ctypes.POINTER(ctypes.c_uint64), ctypes.c_int]
_sbytes = ctypes.c_uint64()
result = _sendfile(fdin, fdout, offset, nbytes, None, _sbytes, 0)
if result == -1:
e = ctypes.get_errno()
if e == errno.EAGAIN and _sbytes.value is not None:
return _sbytes.value
raise OSError(e, os.strerror(e))
return _sbytes.value
else:
_sendfile.argtypes = [ctypes.c_int, ctypes.c_int,
ctypes.POINTER(ctypes.c_uint64), ctypes.c_size_t]
_offset = ctypes.c_uint64(offset)
sent = _sendfile(fdout, fdin, _offset, nbytes)
if sent == -1:
e = ctypes.get_errno()
raise OSError(e, os.strerror(e))
return sent
| 3,453
|
Python
|
.py
| 81
| 36.037037
| 78
| 0.674814
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,175
|
body.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/http/body.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from io import BytesIO
from zato.server.ext.zunicorn.http.errors import (NoMoreData, ChunkMissingTerminator,
InvalidChunkSize)
from zato.server.ext.zunicorn import six
class ChunkedReader:
def __init__(self, req, unreader, BytesIO=BytesIO):
self.req = req
self.parser = self.parse_chunked(unreader)
self.buf = BytesIO()
def read(self, size, integer_types=six.integer_types, BytesIO=BytesIO):
if not isinstance(size, integer_types):
raise TypeError("size must be an integral type")
if size < 0:
raise ValueError("Size must be positive.")
if size == 0:
return b""
buf_tell = self.buf.tell
buf_write = self.buf.write
if self.parser:
while buf_tell() < size:
try:
buf_write(next(self.parser))
except StopIteration:
self.parser = None
break
data = self.buf.getvalue()
ret, rest = data[:size], data[size:]
self.buf = BytesIO()
self.buf.write(rest)
return ret
def parse_trailers(self, unreader, data, BytesIO=BytesIO):
buf = BytesIO()
buf.write(data)
buf_getvalue = buf.getvalue
value = buf_getvalue()
idx = value.find(b"\r\n\r\n")
done = value[:2] == b"\r\n"
while idx < 0 and not done:
self.get_data(unreader, buf.write)
idx = buf_getvalue().find(b"\r\n\r\n")
done = buf_getvalue()[:2] == b"\r\n"
if done:
unreader.unread(buf_getvalue()[2:])
return b""
self.req.trailers = self.req.parse_headers(buf_getvalue()[:idx])
unreader.unread(buf_getvalue()[idx + 4:])
def parse_chunked(self, unreader):
(size, rest) = self.parse_chunk_size(unreader)
while size > 0:
while size > len(rest):
size -= len(rest)
yield rest
rest = unreader.read()
if not rest:
raise NoMoreData()
yield rest[:size]
# Remove \r\n after chunk
rest = rest[size:]
while len(rest) < 2:
rest += unreader.read()
if rest[:2] != b'\r\n':
raise ChunkMissingTerminator(rest[:2])
(size, rest) = self.parse_chunk_size(unreader, data=rest[2:])
def parse_chunk_size(self, unreader, data=None, BytesIO=BytesIO):
buf = BytesIO()
buf_write = buf.write
if data is not None:
buf_write(data)
idx = buf.getvalue().find(b"\r\n")
while idx < 0:
self.get_data(unreader, buf_write)
idx = buf.getvalue().find(b"\r\n")
data = buf.getvalue()
line, rest_chunk = data[:idx], data[idx + 2:]
chunk_size = line.split(b";", 1)[0].strip()
try:
chunk_size = int(chunk_size, 16)
except ValueError:
raise InvalidChunkSize(chunk_size)
if chunk_size == 0:
try:
self.parse_trailers(unreader, rest_chunk)
except NoMoreData:
pass
return (0, None)
return (chunk_size, rest_chunk)
def get_data(self, unreader, buf_write, NoMoreData=NoMoreData):
data = unreader.read()
if not data:
raise NoMoreData()
buf_write(data)
class LengthReader:
def __init__(self, unreader, length):
self.unreader = unreader
self.length = length
def read(self, size, integer_types=six.integer_types, BytesIO=BytesIO):
if not isinstance(size, integer_types):
raise TypeError("size must be an integral type")
size = min(self.length, size)
if size < 0:
raise ValueError("Size must be positive.")
if size == 0:
return b""
buf = BytesIO()
buf_write = buf.write
buf_tell = buf.tell
self_unreader_read = self.unreader.read
data = self_unreader_read()
while data:
buf_write(data)
if buf_tell() >= size:
break
data = self_unreader_read()
buf = buf.getvalue()
ret, rest = buf[:size], buf[size:]
self.unreader.unread(rest)
self.length -= size
return ret
class EOFReader:
def __init__(self, unreader):
self.unreader = unreader
self.buf = six.BytesIO()
self.finished = False
def read(self, size):
if not isinstance(size, six.integer_types):
raise TypeError("size must be an integral type")
if size < 0:
raise ValueError("Size must be positive.")
if size == 0:
return b""
if self.finished:
data = self.buf.getvalue()
ret, rest = data[:size], data[size:]
self.buf = six.BytesIO()
self.buf.write(rest)
return ret
data = self.unreader.read()
while data:
self.buf.write(data)
if self.buf.tell() > size:
break
data = self.unreader.read()
if not data:
self.finished = True
data = self.buf.getvalue()
ret, rest = data[:size], data[size:]
self.buf = six.BytesIO()
self.buf.write(rest)
return ret
class Body:
def __init__(self, reader, BytesIO=BytesIO):
self.reader = reader
self.buf = BytesIO()
def __iter__(self):
return self
def __next__(self):
ret = self.readline()
if not ret:
raise StopIteration()
return ret
next = __next__
def getsize(self, size):
if size is None:
return six.MAXSIZE
elif not isinstance(size, six.integer_types):
raise TypeError("size must be an integral type")
elif size < 0:
return six.MAXSIZE
return size
def read(self, size=None, BytesIO=BytesIO):
size = self.getsize(size)
if size == 0:
return b""
if size < self.buf.tell():
data = self.buf.getvalue()
ret, rest = data[:size], data[size:]
self.buf = six.BytesIO()
self.buf.write(rest)
return ret
self_buf_tell = self.buf.tell
self_buf_write = self.buf.write
self_reader_read = self.reader.read
while size > self_buf_tell():
data = self_reader_read(1024)
if not data:
break
self_buf_write(data)
data = self.buf.getvalue()
ret, rest = data[:size], data[size:]
self.buf = BytesIO()
self.buf.write(rest)
return ret
def readline(self, size=None, BytesIO=BytesIO):
size = self.getsize(size)
if size == 0:
return b""
data = self.buf.getvalue()
self.buf = BytesIO()
ret = []
data_find = data.find
ret_append = ret.append
self_reader_read = self.reader.read
self_buf_write = self.buf.write
while 1:
idx = data_find(b"\n", 0, size)
idx = idx + 1 if idx >= 0 else size if len(data) >= size else 0
if idx:
ret_append(data[:idx])
self_buf_write(data[idx:])
break
ret_append(data)
size -= len(data)
data = self_reader_read(min(1024, size))
if not data:
break
return b"".join(ret)
def readlines(self, size=None):
ret = []
data = self.read()
while data:
pos = data.find(b"\n")
if pos < 0:
ret.append(data)
data = b""
else:
line, data = data[:pos + 1], data[pos + 1:]
ret.append(line)
return ret
| 9,325
|
Python
|
.py
| 256
| 26.671875
| 85
| 0.569716
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,176
|
unreader.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/http/unreader.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import os
from zato.server.ext.zunicorn import six
# Classes that can undo reading data from
# a given type of data source.
class Unreader:
def __init__(self):
self.buf = six.BytesIO()
def chunk(self):
raise NotImplementedError()
def read(self, size=None):
if size is not None and not isinstance(size, six.integer_types):
raise TypeError("size parameter must be an int or long.")
if size is not None:
if size == 0:
return b""
if size < 0:
size = None
self.buf.seek(0, os.SEEK_END)
if size is None and self.buf.tell():
ret = self.buf.getvalue()
self.buf = six.BytesIO()
return ret
if size is None:
d = self.chunk()
return d
while self.buf.tell() < size:
chunk = self.chunk()
if not chunk:
ret = self.buf.getvalue()
self.buf = six.BytesIO()
return ret
self.buf.write(chunk)
data = self.buf.getvalue()
self.buf = six.BytesIO()
self.buf.write(data[size:])
return data[:size]
def unread(self, data):
self.buf.seek(0, os.SEEK_END)
self.buf.write(data)
class SocketUnreader(Unreader):
def __init__(self, sock, max_chunk=8192):
super(SocketUnreader, self).__init__()
self.sock = sock
self.mxchunk = max_chunk
def chunk(self):
return self.sock.recv(self.mxchunk)
class IterUnreader(Unreader):
def __init__(self, iterable):
super(IterUnreader, self).__init__()
self.iter = iter(iterable)
def chunk(self):
if not self.iter:
return b""
try:
return six.next(self.iter)
except StopIteration:
self.iter = None
return b""
| 3,263
|
Python
|
.py
| 86
| 31.116279
| 72
| 0.658629
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,177
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/http/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from zato.server.ext.zunicorn.http.message import Message, Request
from zato.server.ext.zunicorn.http.parser import RequestParser
__all__ = ['Message', 'Request', 'RequestParser']
| 1,545
|
Python
|
.py
| 31
| 48.548387
| 66
| 0.797342
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,178
|
wsgi.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/http/wsgi.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import io
import logging
import os
import regex as re
from zato.server.ext.zunicorn import SERVER_SOFTWARE, util
from zato.server.ext.zunicorn._compat import unquote_to_wsgi_str
from zato.server.ext.zunicorn.http.message import HEADER_RE
from zato.server.ext.zunicorn.http.errors import InvalidHeader, InvalidHeaderName
from zato.server.ext.zunicorn.six import string_types, binary_type, reraise
try:
# Python 3.3 has os.sendfile().
from os import sendfile
except ImportError:
try:
from ._sendfile import sendfile
except ImportError:
sendfile = None
# Send files in at most 1GB blocks as some operating systems can have problems
# with sending files in blocks over 2GB.
BLKSIZE = 0x3FFFFFFF
HEADER_VALUE_RE = re.compile(r'[\x00-\x1F\x7F]')
log = logging.getLogger(__name__)
class FileWrapper:
def __init__(self, filelike, blksize=8192):
self.filelike = filelike
self.blksize = blksize
if hasattr(filelike, 'close'):
self.close = filelike.close
def __getitem__(self, key):
data = self.filelike.read(self.blksize)
if data:
return data
raise IndexError
def base_environ(cfg, FileWrapper=FileWrapper, SERVER_SOFTWARE=SERVER_SOFTWARE):
return {
"wsgi.version": (1, 0),
"wsgi.multithread": False,
"wsgi.multiprocess": (cfg.workers > 1),
"wsgi.run_once": False,
"wsgi.file_wrapper": FileWrapper,
"SERVER_SOFTWARE": SERVER_SOFTWARE
}
def default_environ(req, sock, cfg, base_environ=base_environ):
env = base_environ(cfg)
env.update({
"wsgi.input": req.body,
"gunicorn.socket": sock,
"REQUEST_METHOD": req.method,
"QUERY_STRING": req.query,
"RAW_URI": req.uri,
"SERVER_PROTOCOL": "HTTP/%s" % ".".join([str(v) for v in req.version])
})
return env
def proxy_environ(req):
info = req.proxy_protocol_info
if not info:
return {}
return {
"PROXY_PROTOCOL": info["proxy_protocol"],
"REMOTE_ADDR": info["client_addr"],
"REMOTE_PORT": str(info["client_port"]),
"PROXY_ADDR": info["proxy_addr"],
"PROXY_PORT": str(info["proxy_port"]),
}
class Response:
def __init__(self, req, sock, cfg):
self.req = req
self.sock = sock
self.version = cfg.server_software
self.status = None
self.chunked = False
self.must_close = False
self.headers = []
self.headers_sent = False
self.response_length = None
self.sent = 0
self.upgrade = False
self.cfg = cfg
def force_close(self):
self.must_close = True
def should_close(self):
if self.must_close or self.req.should_close():
return True
if self.response_length is not None or self.chunked:
return False
if self.req.method == 'HEAD':
return False
if self.status_code < 200 or self.status_code in (204, 304):
return False
return True
def start_response(self, status, headers, exc_info=None):
if exc_info:
try:
if self.status and self.headers_sent:
reraise(exc_info[0], exc_info[1], exc_info[2])
finally:
exc_info = None
elif self.status is not None:
raise AssertionError("Response headers already set!")
self.status = status
# get the status code from the response here so we can use it to check
# the need for the connection header later without parsing the string
# each time.
try:
self.status_code = int(self.status.split()[0])
except ValueError:
self.status_code = None
self.process_headers(headers)
self.chunked = self.is_chunked()
return self.write
def process_headers(self, headers, HEADER_RE=HEADER_RE, HEADER_VALUE_RE=HEADER_VALUE_RE,
string_types=string_types, util_is_hoppish=util.is_hoppish):
self_headers_append = self.headers.append
for name, value in headers:
if not isinstance(name, string_types):
name = str(name)
if not isinstance(value, string_types):
value = str(value)
if HEADER_RE.search(name):
raise InvalidHeaderName('%r' % name)
if HEADER_VALUE_RE.search(value):
raise InvalidHeader('%r' % value)
value = str(value).strip()
lname = name.lower().strip()
if lname == "content-length":
self.response_length = int(value)
elif util_is_hoppish(name):
if lname == "connection":
# handle websocket
if value.lower().strip() == "upgrade":
self.upgrade = True
elif lname == "upgrade":
if value.lower().strip() == "websocket":
self_headers_append((name.strip(), value))
# ignore hopbyhop headers
continue
self_headers_append((name.strip(), value))
def is_chunked(self):
# Only use chunked responses when the client is
# speaking HTTP/1.1 or newer and there was
# no Content-Length header set.
if self.response_length is not None:
return False
elif self.req.version <= (1, 0):
return False
elif self.req.method == 'HEAD':
# Responses to a HEAD request MUST NOT contain a response body.
return False
elif self.status_code in (204, 304):
# Do not use chunked responses when the response is guaranteed to
# not have a response body.
return False
return True
def default_headers(self, util_http_date=util.http_date):
# set the connection header
if self.upgrade:
connection = "upgrade"
elif self.should_close():
connection = "close"
else:
connection = "keep-alive"
headers = [
"HTTP/%s.%s %s\r\n" % (self.req.version[0],
self.req.version[1], self.status),
"Server: %s\r\n" % self.version,
"Date: %s\r\n" % util_http_date(),
"Connection: %s\r\n" % connection
]
if self.chunked:
headers.append("Transfer-Encoding: chunked\r\n")
return headers
def send_headers(self, util_write=util.write, util_to_bytestring=util.to_bytestring):
if self.headers_sent:
return
tosend = self.default_headers()
tosend.extend(["%s: %s\r\n" % (k, v) for k, v in self.headers])
header_str = "%s\r\n" % "".join(tosend)
util_write(self.sock, util_to_bytestring(header_str, "ascii"))
self.headers_sent = True
def write(self, arg, binary_type=binary_type, util_write=util.write):
self.send_headers()
if not isinstance(arg, binary_type):
raise TypeError('{!r} is not a byte'.format(arg))
arglen = len(arg)
tosend = arglen
if self.response_length is not None:
if self.sent >= self.response_length:
# Never write more than self.response_length bytes
return
tosend = min(self.response_length - self.sent, tosend)
if tosend < arglen:
arg = arg[:tosend]
# Sending an empty chunk signals the end of the
# response and prematurely closes the response
if self.chunked and tosend == 0:
return
self.sent += tosend
util_write(self.sock, arg, self.chunked)
def can_sendfile(self):
return self.cfg.sendfile is not False and sendfile is not None
def sendfile(self, respiter):
if self.cfg.is_ssl or not self.can_sendfile():
return False
if not util.has_fileno(respiter.filelike):
return False
fileno = respiter.filelike.fileno()
try:
offset = os.lseek(fileno, 0, os.SEEK_CUR)
if self.response_length is None:
filesize = os.fstat(fileno).st_size
# The file may be special and sendfile will fail.
# It may also be zero-length, but that is okay.
if filesize == 0:
return False
nbytes = filesize - offset
else:
nbytes = self.response_length
except (OSError, io.UnsupportedOperation):
return False
self.send_headers()
if self.is_chunked():
chunk_size = "%X\r\n" % nbytes
self.sock.sendall(chunk_size.encode('utf-8'))
sockno = self.sock.fileno()
sent = 0
while sent != nbytes:
count = min(nbytes - sent, BLKSIZE)
sent += sendfile(sockno, fileno, offset + sent, count)
if self.is_chunked():
self.sock.sendall(b"\r\n")
os.lseek(fileno, offset, os.SEEK_SET)
return True
def write_file(self, respiter):
if not self.sendfile(respiter):
for item in respiter:
self.write(item)
def close(self):
if not self.headers_sent:
self.send_headers()
if self.chunked:
util.write_chunk(self.sock, b"")
def create(req, sock, client, server, cfg, Response=Response, default_environ=default_environ,
string_types=string_types, binary_type=binary_type,
unquote_to_wsgi_str=unquote_to_wsgi_str, proxy_environ=proxy_environ):
resp = Response(req, sock, cfg)
# set initial environ
environ = default_environ(req, sock, cfg)
# default variables
host = None
script_name = os.environ.get("SCRIPT_NAME", "")
# add the headers to the environ
for hdr_name, hdr_value in req.headers:
if hdr_name == "EXPECT":
# handle expect
if hdr_value.lower() == "100-continue":
sock.send(b"HTTP/1.1 100 Continue\r\n\r\n")
elif hdr_name == 'HOST':
host = hdr_value
elif hdr_name == "SCRIPT_NAME":
script_name = hdr_value
elif hdr_name == "CONTENT-TYPE":
environ['CONTENT_TYPE'] = hdr_value
continue
elif hdr_name == "CONTENT-LENGTH":
environ['CONTENT_LENGTH'] = hdr_value
continue
key = 'HTTP_' + hdr_name.replace('-', '_')
if key in environ:
hdr_value = "%s,%s" % (environ[key], hdr_value)
environ[key] = hdr_value
# set the url scheme
environ['wsgi.url_scheme'] = req.scheme
# set the REMOTE_* keys in environ
# authors should be aware that REMOTE_HOST and REMOTE_ADDR
# may not qualify the remote addr:
# http://www.ietf.org/rfc/rfc3875
if isinstance(client, string_types):
environ['REMOTE_ADDR'] = client
elif isinstance(client, binary_type):
environ['REMOTE_ADDR'] = client.decode()
else:
environ['REMOTE_ADDR'] = client[0]
environ['REMOTE_PORT'] = str(client[1])
# handle the SERVER_*
# Normally only the application should use the Host header but since the
# WSGI spec doesn't support unix sockets, we are using it to create
# viable SERVER_* if possible.
if isinstance(server, string_types):
server = server.split(":")
if len(server) == 1:
# unix socket
if host:
server = host.split(':')
if len(server) == 1:
if req.scheme == "http":
server.append(80)
elif req.scheme == "https":
server.append(443)
else:
server.append('')
else:
# no host header given which means that we are not behind a
# proxy, so append an empty port.
server.append('')
environ['SERVER_NAME'] = server[0]
environ['SERVER_PORT'] = str(server[1])
# set the path and script name
path_info = req.path
if script_name:
path_info = path_info.split(script_name, 1)[1]
environ['PATH_INFO'] = unquote_to_wsgi_str(path_info)
environ['SCRIPT_NAME'] = script_name
# override the environ with the correct remote and server address if
# we are behind a proxy using the proxy protocol.
if req.proxy_protocol_info:
environ.update(proxy_environ(req))
return resp, environ
| 13,971
|
Python
|
.py
| 345
| 31.202899
| 94
| 0.607277
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,179
|
parser.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/http/parser.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from zato.server.ext.zunicorn.http.message import Request
from zato.server.ext.zunicorn.http.unreader import SocketUnreader, IterUnreader
class Parser:
mesg_class = None
def __init__(self, cfg, source):
self.cfg = cfg
if hasattr(source, "recv"):
self.unreader = SocketUnreader(source)
else:
self.unreader = IterUnreader(source)
self.mesg = None
# request counter (for keepalive connetions)
self.req_count = 0
def __iter__(self):
return self
def __next__(self):
# Stop if HTTP dictates a stop.
if self.mesg and self.mesg.should_close():
raise StopIteration()
# Discard any unread body of the previous message
if self.mesg:
data = self.mesg.body.read(8192)
while data:
data = self.mesg.body.read(8192)
# Parse the next request
self.req_count += 1
self.mesg = self.mesg_class(self.cfg, self.unreader, self.req_count)
if not self.mesg:
raise StopIteration()
return self.mesg
next = __next__
class RequestParser(Parser):
mesg_class = Request
| 2,555
|
Python
|
.py
| 61
| 36.590164
| 79
| 0.714806
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,180
|
statsd.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/instrument/statsd.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
"Bare-bones implementation of statsD's protocol, client-side"
import socket
import logging
from re import sub
from zato.server.ext.zunicorn.glogging import Logger
from zato.server.ext.zunicorn import six
# Instrumentation constants
METRIC_VAR = "metric"
VALUE_VAR = "value"
MTYPE_VAR = "mtype"
GAUGE_TYPE = "gauge"
COUNTER_TYPE = "counter"
HISTOGRAM_TYPE = "histogram"
class Statsd(Logger):
"""statsD-based instrumentation, that passes as a logger
"""
def __init__(self, cfg):
"""host, port: statsD server
"""
Logger.__init__(self, cfg)
self.prefix = sub(r"^(.+[^.]+)\.*$", "\\g<1>.", cfg.statsd_prefix)
try:
host, port = cfg.statsd_host
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.connect((host, int(port)))
except Exception:
self.sock = None
# Log errors and warnings
def critical(self, msg, *args, **kwargs):
Logger.critical(self, msg, *args, **kwargs)
self.increment("gunicorn.log.critical", 1)
def error(self, msg, *args, **kwargs):
Logger.error(self, msg, *args, **kwargs)
self.increment("gunicorn.log.error", 1)
def warning(self, msg, *args, **kwargs):
Logger.warning(self, msg, *args, **kwargs)
self.increment("gunicorn.log.warning", 1)
def exception(self, msg, *args, **kwargs):
Logger.exception(self, msg, *args, **kwargs)
self.increment("gunicorn.log.exception", 1)
# Special treatement for info, the most common log level
def info(self, msg, *args, **kwargs):
self.log(logging.INFO, msg, *args, **kwargs)
# skip the run-of-the-mill logs
def debug(self, msg, *args, **kwargs):
self.log(logging.DEBUG, msg, *args, **kwargs)
def log(self, lvl, msg, *args, **kwargs):
"""Log a given statistic if metric, value and type are present
"""
try:
extra = kwargs.get("extra", None)
if extra is not None:
metric = extra.get(METRIC_VAR, None)
value = extra.get(VALUE_VAR, None)
typ = extra.get(MTYPE_VAR, None)
if metric and value and typ:
if typ == GAUGE_TYPE:
self.gauge(metric, value)
elif typ == COUNTER_TYPE:
self.increment(metric, value)
elif typ == HISTOGRAM_TYPE:
self.histogram(metric, value)
else:
pass
# Log to parent logger only if there is something to say
if msg:
Logger.log(self, lvl, msg, *args, **kwargs)
except Exception:
Logger.warning(self, "Failed to log to statsd", exc_info=True)
# access logging
def access(self, resp, req, environ, request_time):
"""Measure request duration
request_time is a datetime.timedelta
"""
Logger.access(self, resp, req, environ, request_time)
duration_in_ms = request_time.seconds * 1000 + float(request_time.microseconds) / 10 ** 3
status = resp.status
if isinstance(status, str):
status = int(status.split(None, 1)[0])
self.histogram("gunicorn.request.duration", duration_in_ms)
self.increment("gunicorn.requests", 1)
self.increment("gunicorn.request.status.%d" % status, 1)
# statsD methods
# you can use those directly if you want
def gauge(self, name, value):
self._sock_send("{0}{1}:{2}|g".format(self.prefix, name, value))
def increment(self, name, value, sampling_rate=1.0):
self._sock_send("{0}{1}:{2}|c|@{3}".format(self.prefix, name, value, sampling_rate))
def decrement(self, name, value, sampling_rate=1.0):
self._sock_send("{0}{1}:-{2}|c|@{3}".format(self.prefix, name, value, sampling_rate))
def histogram(self, name, value):
self._sock_send("{0}{1}:{2}|ms".format(self.prefix, name, value))
def _sock_send(self, msg):
try:
if isinstance(msg, six.text_type):
msg = msg.encode("ascii")
if self.sock:
self.sock.send(msg)
except Exception:
Logger.warning(self, "Error sending message to statsd", exc_info=True)
| 5,702
|
Python
|
.py
| 128
| 36.9375
| 97
| 0.642934
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,181
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/instrument/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
| 1,363
|
Python
|
.py
| 28
| 47.428571
| 65
| 0.798193
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,182
|
ggevent.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/workers/ggevent.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# flake8: noqa
from datetime import datetime
from functools import partial
from traceback import format_exc
import errno
import os
import sys
import time
_socket = __import__("socket")
# workaround on osx, disable kqueue
if sys.platform == "darwin":
os.environ['EVENT_NOKQUEUE'] = "1"
try:
import gevent
except ImportError:
raise RuntimeError("You need gevent installed to use this worker.")
from gevent.pool import Pool
from gevent.server import StreamServer
from gevent.socket import wait_write, socket
from gevent import pywsgi
from zato.common.api import OS_Env
from zato.common.util.platform_ import is_windows
from zato.server.ext.zunicorn import SERVER_SOFTWARE
from zato.server.ext.zunicorn.http.wsgi import base_environ
from zato.server.ext.zunicorn.workers.base_async import AsyncWorker
from zato.server.ext.zunicorn.http.wsgi import sendfile as o_sendfile
from zato.server.ext.zunicorn.util import is_forking
def _gevent_sendfile(fdout, fdin, offset, nbytes):
while True:
try:
return o_sendfile(fdout, fdin, offset, nbytes)
except OSError as e:
if e.args[0] == errno.EAGAIN:
wait_write(fdout)
else:
raise
def patch_sendfile():
from zato.server.ext.zunicorn.http import wsgi
if o_sendfile is not None:
setattr(wsgi, "sendfile", _gevent_sendfile)
class GeventWorker(AsyncWorker):
server_class = None
wsgi_handler = None
def patch(self):
from gevent import monkey
monkey.noisy = False
os.environ['DJANGO_SETTINGS_MODULE'] = 'zato.admin.settings'
# if the new version is used make sure to patch subprocess
if gevent.version_info[0] == 0:
monkey.patch_all()
else:
monkey.patch_all(subprocess=True)
# monkey patch sendfile to make it none blocking
patch_sendfile()
# patch sockets
sockets = []
for s in self.sockets:
if sys.version_info[0] == 3:
sockets.append(socket(s.FAMILY, _socket.SOCK_STREAM,
fileno=s.sock.fileno()))
else:
sockets.append(socket(s.FAMILY, _socket.SOCK_STREAM,
_sock=s))
self.sockets = sockets
def notify(self):
super(GeventWorker, self).notify()
if self.ppid != os.getppid():
# We have forked only if we can fork on this system and if memory profiling is not enabled.
needs_fork = is_forking and (not os.environ.get(OS_Env.Zato_Enable_Memory_Profiler))
if needs_fork:
self.log.info("Parent changed, shutting down: %s", self)
sys.exit(0)
def timeout_ctx(self):
return gevent.Timeout(self.cfg.keepalive, False)
def run(self):
servers = []
ssl_args = {}
if self.cfg.is_ssl:
ssl_args = dict(server_side=True, **self.cfg.ssl_options)
for s in self.sockets:
s.setblocking(1)
pool = Pool(self.worker_connections)
if self.server_class is not None:
environ = base_environ(self.cfg)
environ.update({
"wsgi.multithread": True,
"SERVER_SOFTWARE": SERVER_SOFTWARE,
})
server = self.server_class(
s, application=self.wsgi, spawn=pool, log=self.log,
handler_class=self.wsgi_handler, environ=environ,
**ssl_args)
else:
hfun = partial(self.handle, s)
server = StreamServer(s, handle=hfun, spawn=pool, **ssl_args)
server.start()
servers.append(server)
try:
while self.alive:
self.notify()
gevent.sleep(1.0)
# Stop accepting requests
for server in servers:
if hasattr(server, 'close'): # gevent 1.0
server.close()
if hasattr(server, 'kill'): # gevent < 1.0
server.kill()
# Handle current requests until graceful_timeout
ts = time.time()
while time.time() - ts <= self.cfg.graceful_timeout:
accepting = 0
for server in servers:
if server.pool.free_count() != server.pool.size:
accepting += 1
# if no server is accepting a connection, we can exit
if not accepting:
return
self.notify()
gevent.sleep(1.0)
# Force kill all active the handlers
self.log.warning("Worker graceful timeout (pid:%s)" % self.pid)
for server in servers:
server.stop(timeout=1)
except KeyboardInterrupt:
if is_windows:
sys.exit(0)
else:
raise
except Exception as e:
self.log.warning('Exception in GeventWorker.run (pid:%s) -> `%s`', self.pid, format_exc())
def handle(self, listener, client, addr):
# Connected socket timeout defaults to socket.getdefaulttimeout().
# This forces to blocking mode.
client.setblocking(1)
super(GeventWorker, self).handle(listener, client, addr)
def handle_request(self, listener_name, req, sock, addr):
try:
super(GeventWorker, self).handle_request(listener_name, req, sock,
addr)
except gevent.GreenletExit:
pass
except SystemExit:
pass
def handle_quit(self, sig, frame):
# Move this out of the signal handler so we can use
# blocking calls. See #1126
gevent.spawn(super(GeventWorker, self).handle_quit, sig, frame)
def handle_usr1(self, sig, frame):
# Make the gevent workers handle the usr1 signal
# by deferring to a new greenlet. See #1645
gevent.spawn(super(GeventWorker, self).handle_usr1, sig, frame)
def init_process(self):
# monkey patch here
self.patch()
# reinit the hub
from gevent import hub
hub.reinit()
# then initialize the process
super(GeventWorker, self).init_process()
class GeventResponse:
status = None
headers = None
sent = None
def __init__(self, status, headers, clength):
self.status = status
self.headers = headers
self.sent = clength
class PyWSGIHandler(pywsgi.WSGIHandler):
def log_request(self):
start = datetime.fromtimestamp(self.time_start)
finish = datetime.fromtimestamp(self.time_finish)
response_time = finish - start
resp_headers = getattr(self, 'response_headers', {})
resp = GeventResponse(self.status, resp_headers, self.response_length)
if hasattr(self, 'headers'):
req_headers = self.headers.items()
else:
req_headers = []
self.server.log.access(resp, req_headers, self.environ, response_time)
def get_environ(self):
env = super(PyWSGIHandler, self).get_environ()
env['gunicorn.sock'] = self.socket
env['RAW_URI'] = self.path
return env
class PyWSGIServer(pywsgi.WSGIServer):
pass
class GeventPyWSGIWorker(GeventWorker):
"The Gevent StreamServer based workers."
server_class = PyWSGIServer
wsgi_handler = PyWSGIHandler
| 8,886
|
Python
|
.py
| 218
| 31.720183
| 103
| 0.634061
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,183
|
base_async.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/workers/base_async.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import errno
import socket
import ssl
import sys
import zato.server.ext.zunicorn.http as http
import zato.server.ext.zunicorn.http.wsgi as wsgi
import zato.server.ext.zunicorn.util as util
import zato.server.ext.zunicorn.workers.base as base
from zato.server.ext.zunicorn import six
ALREADY_HANDLED = object()
class AsyncWorker(base.Worker):
def __init__(self, *args, **kwargs):
super(AsyncWorker, self).__init__(*args, **kwargs)
self.worker_connections = self.cfg.worker_connections
def timeout_ctx(self):
raise NotImplementedError()
def is_already_handled(self, respiter):
# some workers will need to overload this function to raise a StopIteration
return respiter == ALREADY_HANDLED
def handle(self, listener, client, addr, RequestParser=http.RequestParser, util_close=util.close):
req = None
try:
parser = RequestParser(self.cfg, client)
try:
listener_name = listener.getsockname()
if not self.cfg.keepalive:
req = next(parser)
self.handle_request(listener_name, req, client, addr)
else:
# keepalive loop
proxy_protocol_info = {}
while True:
req = None
with self.timeout_ctx():
req = next(parser)
if not req:
break
if req.proxy_protocol_info:
proxy_protocol_info = req.proxy_protocol_info
else:
req.proxy_protocol_info = proxy_protocol_info
self.handle_request(listener_name, req, client, addr)
except http.errors.NoMoreData as e:
self.log.debug("Ignored premature client disconnection. %s", e)
except StopIteration as e:
self.log.debug("Closing connection. %s", e)
except ssl.SSLError:
# pass to next try-except level
six.reraise(*sys.exc_info())
except EnvironmentError:
# pass to next try-except level
six.reraise(*sys.exc_info())
except Exception as e:
self.handle_error(req, client, addr, e)
except ssl.SSLError as e:
if e.args[0] == ssl.SSL_ERROR_EOF:
self.log.debug("ssl connection closed")
client.close()
else:
self.log.debug("Error processing SSL request.")
self.handle_error(req, client, addr, e)
except EnvironmentError as e:
if e.errno not in (errno.EPIPE, errno.ECONNRESET):
self.log.exception("Socket error processing request.")
else:
if e.errno == errno.ECONNRESET:
self.log.debug("Ignoring connection reset")
else:
self.log.debug("Ignoring EPIPE")
except Exception as e:
self.handle_error(req, client, addr, e)
finally:
util_close(client)
def handle_request(self, listener_name, req, sock, addr, ALREADY_HANDLED=ALREADY_HANDLED):
environ = {}
resp = None
try:
resp, environ = wsgi.create(req, sock, addr, listener_name, self.cfg)
environ["wsgi.multithread"] = True
self.nr += 1
if not self.cfg.keepalive:
resp.force_close()
respiter = self.wsgi(environ, resp.start_response)
if respiter == ALREADY_HANDLED:
return False
try:
if isinstance(respiter, environ['wsgi.file_wrapper']):
resp.write_file(respiter)
else:
for item in respiter:
resp.write(item)
resp.close()
finally:
if hasattr(respiter, "close"):
respiter.close()
if resp.should_close():
raise StopIteration()
except StopIteration:
raise
except EnvironmentError:
# If the original exception was a socket.error we delegate
# handling it to the caller (where handle() might ignore it)
six.reraise(*sys.exc_info())
except Exception:
if resp and resp.headers_sent:
# If the requests have already been sent, we should close the
# connection to indicate the error.
self.log.exception("Error handling request")
try:
sock.shutdown(socket.SHUT_RDWR)
sock.close()
except EnvironmentError:
pass
raise StopIteration()
raise
return True
| 6,297
|
Python
|
.py
| 144
| 32.034722
| 102
| 0.598402
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,184
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/workers/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# supported gunicorn workers.
SUPPORTED_WORKERS = {
"gevent": "zato.server.ext.zunicorn.workers.ggevent.GeventWorker",
"gevent_wsgi": "gunicorn.workers.ggevent.GeventPyWSGIWorker",
"gevent_pywsgi": "gunicorn.workers.ggevent.GeventPyWSGIWorker",
}
| 1,623
|
Python
|
.py
| 34
| 46.147059
| 70
| 0.793801
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,185
|
base.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/workers/base.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# flake8: noqa
from datetime import datetime
import os
from random import randint
import signal
from ssl import SSLError
import sys
import time
import traceback
from zato.common.util.platform_ import is_posix
from zato.server.ext.zunicorn import six
from zato.server.ext.zunicorn import util
from zato.server.ext.zunicorn.workers.workertmp import PassThroughTmp, WorkerTmp
from zato.server.ext.zunicorn.reloader import reloader_engines
from zato.server.ext.zunicorn.http.errors import (
InvalidHeader, InvalidHeaderName, InvalidRequestLine, InvalidRequestMethod,
InvalidHTTPVersion, LimitRequestLine, LimitRequestHeaders,
)
from zato.server.ext.zunicorn.http.errors import InvalidProxyLine, ForbiddenProxyRequest
from zato.server.ext.zunicorn.http.errors import InvalidSchemeHeaders
from zato.server.ext.zunicorn.http.wsgi import default_environ, Response
from zato.server.ext.zunicorn.six import MAXSIZE
class Worker:
if is_posix:
SIGNALS = [getattr(signal, "SIG%s" % x)for x in "ABRT HUP QUIT INT TERM USR1 USR2 WINCH CHLD".split()]
else:
SIGNALS = []
PIPE = []
def __init__(self, age, ppid, sockets, app, timeout, cfg, log):
"""\
This is called pre-fork so it shouldn't do anything to the
current process. If there's a need to make process wide
changes you'll want to do that in ``self.init_process()``.
"""
self.age = age
self.pid = "[booting]"
self.ppid = ppid
self.sockets = sockets
self.app = app
self.timeout = timeout
self.cfg = cfg
self.booted = False
self.aborted = False
self.reloader = None
self.nr = 0
jitter = randint(0, cfg.max_requests_jitter)
self.max_requests = cfg.max_requests + jitter or MAXSIZE
self.alive = True
self.log = log
# Under POSIX, we use a real class that communicates with arbiter via temporary files.
# On other systems, this is a pass-through class that does nothing.
worker_tmp_class = WorkerTmp if is_posix else PassThroughTmp
self.tmp = worker_tmp_class(cfg)
def __str__(self):
return "<Worker %s>" % self.pid
def notify(self):
"""\
Your worker subclass must arrange to have this method called
once every ``self.timeout`` seconds. If you fail in accomplishing
this task, the master process will murder your workers.
"""
self.tmp.notify()
def run(self):
"""\
This is the mainloop of a worker process. You should override
this method in a subclass to provide the intended behaviour
for your particular evil schemes.
"""
raise NotImplementedError()
def init_process(self):
"""\
If you override this method in a subclass, the last statement
in the function should be to call this method with
super(MyWorkerClass, self).init_process() so that the ``run()``
loop is initiated.
"""
# Reseed the random number generator
util.seed()
# set environment' variables
if self.cfg.env:
for k, v in self.cfg.env.items():
os.environ[k] = v
if is_posix:
util.set_owner_process(self.cfg.uid, self.cfg.gid, initgroups=self.cfg.initgroups)
# Reseed the random number generator
util.seed()
# For waking ourselves up
self.PIPE = os.pipe()
for p in self.PIPE:
util.set_non_blocking(p)
util.close_on_exec(p)
# Prevent fd inheritance
for s in self.sockets:
util.close_on_exec(s)
util.close_on_exec(self.tmp.fileno())
self.log.close_on_exec()
self.init_signals()
# start the reloader
if self.cfg.reload:
def changed(fname):
self.log.info("Worker reloading: %s modified", fname)
self.alive = False
self.cfg.worker_int(self)
time.sleep(0.1)
sys.exit(0)
reloader_cls = reloader_engines[self.cfg.reload_engine]
self.reloader = reloader_cls(extra_files=self.cfg.reload_extra_files,
callback=changed)
self.reloader.start()
self.load_wsgi()
self.cfg.post_worker_init(self)
# Enter main run loop
self.booted = True
self.run()
def load_wsgi(self):
try:
self.wsgi = self.app.wsgi()
except SyntaxError as e:
if not self.cfg.reload:
raise
self.log.exception(e)
# fix from PR #1228
# storing the traceback into exc_tb will create a circular reference.
# per https://docs.python.org/2/library/sys.html#sys.exc_info warning,
# delete the traceback after use.
try:
_, exc_val, exc_tb = sys.exc_info()
self.reloader.add_extra_file(exc_val.filename)
tb_string = six.StringIO()
traceback.print_tb(exc_tb, file=tb_string)
self.wsgi = util.make_fail_app(tb_string.getvalue())
finally:
del exc_tb
def init_signals(self):
# reset signaling
for s in self.SIGNALS:
signal.signal(s, signal.SIG_DFL)
# init new signaling
signal.signal(signal.SIGQUIT, self.handle_quit)
signal.signal(signal.SIGTERM, self.handle_exit)
signal.signal(signal.SIGINT, self.handle_quit)
signal.signal(signal.SIGWINCH, self.handle_winch)
signal.signal(signal.SIGUSR1, self.handle_usr1)
signal.signal(signal.SIGABRT, self.handle_abort)
# Don't let SIGTERM and SIGUSR1 disturb active requests
# by interrupting system calls
if hasattr(signal, 'siginterrupt'): # python >= 2.6
signal.siginterrupt(signal.SIGTERM, False)
signal.siginterrupt(signal.SIGUSR1, False)
if hasattr(signal, 'set_wakeup_fd'):
signal.set_wakeup_fd(self.PIPE[1])
def handle_usr1(self, sig, frame):
self.log.reopen_files()
def handle_exit(self, sig, frame):
self.alive = False
def handle_quit(self, sig, frame):
self.alive = False
try:
self.app.zato_wsgi_app.cleanup_on_stop()
except Exception:
# At this poing logging may not be available anymore hence we are using print() instead.
from traceback import format_exc
print('Exception in handle_quit', format_exc())
finally:
time.sleep(0.1)
sys.exit(0)
def handle_abort(self, sig, frame):
self.alive = False
self.cfg.worker_abort(self)
sys.exit(1)
def handle_error(self, req, client, addr, exc):
request_start = datetime.now()
addr = addr or ('', -1) # unix socket case
if isinstance(exc, (InvalidRequestLine, InvalidRequestMethod,
InvalidHTTPVersion, InvalidHeader, InvalidHeaderName,
LimitRequestLine, LimitRequestHeaders,
InvalidProxyLine, ForbiddenProxyRequest,
InvalidSchemeHeaders,
SSLError)):
status_int = 400
reason = "Bad Request"
if isinstance(exc, InvalidRequestLine):
mesg = "Invalid Request Line '%s'" % str(exc)
elif isinstance(exc, InvalidRequestMethod):
mesg = "Invalid Method '%s'" % str(exc)
elif isinstance(exc, InvalidHTTPVersion):
mesg = "Invalid HTTP Version '%s'" % str(exc)
elif isinstance(exc, (InvalidHeaderName, InvalidHeader,)):
mesg = "%s" % str(exc)
if not req and hasattr(exc, "req"):
req = exc.req # for access log
elif isinstance(exc, LimitRequestLine):
mesg = "%s" % str(exc)
elif isinstance(exc, LimitRequestHeaders):
mesg = "Error parsing headers: '%s'" % str(exc)
elif isinstance(exc, InvalidProxyLine):
mesg = "'%s'" % str(exc)
elif isinstance(exc, ForbiddenProxyRequest):
reason = "Forbidden"
mesg = "Request forbidden"
status_int = 403
elif isinstance(exc, InvalidSchemeHeaders):
mesg = "%s" % str(exc)
elif isinstance(exc, SSLError):
reason = "Forbidden"
mesg = "'%s'" % str(exc)
status_int = 403
msg = "Invalid request from ip={ip}: {error}"
self.log.debug(msg.format(ip=addr[0], error=str(exc)))
else:
if hasattr(req, "uri"):
self.log.exception("Error handling request %s", req.uri)
status_int = 500
reason = "Internal Server Error"
mesg = ""
if req is not None:
request_time = datetime.now() - request_start
environ = default_environ(req, client, self.cfg)
environ['REMOTE_ADDR'] = addr[0]
environ['REMOTE_PORT'] = str(addr[1])
resp = Response(req, client, self.cfg)
resp.status = "%s %s" % (status_int, reason)
resp.response_length = len(mesg)
self.log.access(resp, req, environ, request_time)
try:
util.write_error(client, status_int, reason, mesg)
except:
self.log.debug("Failed to send error message.")
def handle_winch(self, sig, fname):
# Ignore SIGWINCH in worker. Fixes a crash on OpenBSD.
self.log.debug("worker: SIGWINCH ignored.")
| 11,177
|
Python
|
.py
| 259
| 33.455598
| 110
| 0.622952
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,186
|
workertmp.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/workers/workertmp.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# flake8: noqa
import os
import platform
import tempfile
from zato.server.ext.zunicorn import util
PLATFORM = platform.system()
IS_CYGWIN = PLATFORM.startswith('CYGWIN')
# ################################################################################################################################
# ################################################################################################################################
class PassThroughTmp:
""" Used under windows in lieu of WorkerTmp.
"""
def __init__(self, *ignored_args, **ignored_kwargs):
pass
def notify(self, *ignored_args, **ignored_kwargs):
pass
def last_update(self, *ignored_args, **ignored_kwargs):
pass
def fileno(self, *ignored_args, **ignored_kwargs):
pass
def close(self, *ignored_args, **ignored_kwargs):
pass
# ################################################################################################################################
# ################################################################################################################################
class WorkerTmp:
def __init__(self, cfg):
old_umask = os.umask(cfg.umask)
fdir = cfg.worker_tmp_dir
if fdir and not os.path.isdir(fdir):
raise RuntimeError("%s doesn't exist. Can't create workertmp." % fdir)
fd, name = tempfile.mkstemp(prefix="wgunicorn-", dir=fdir)
# allows the process to write to the file
util.chown(name, cfg.uid, cfg.gid)
os.umask(old_umask)
# unlink the file so we don't leak tempory files
try:
if not IS_CYGWIN:
util.unlink(name)
self._tmp = os.fdopen(fd, 'w+b')
except:
os.close(fd)
raise
self.spinner = 0
def notify(self):
try:
self.spinner = (self.spinner + 1) % 2
os.fchmod(self._tmp.fileno(), self.spinner)
except AttributeError:
# python < 2.6
self._tmp.truncate(0)
os.write(self._tmp.fileno(), b"X")
def last_update(self):
return os.fstat(self._tmp.fileno()).st_ctime
def fileno(self):
return self._tmp.fileno()
def close(self):
return self._tmp.close()
# ################################################################################################################################
# ################################################################################################################################
| 3,934
|
Python
|
.py
| 87
| 39.850575
| 130
| 0.545954
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,187
|
wsgiapp.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/app/wsgiapp.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import os
from zato.server.ext.zunicorn.errors import ConfigError
from zato.server.ext.zunicorn.app.base import Application
from zato.server.ext.zunicorn import util
class WSGIApplication(Application):
def init(self, parser, opts, args):
if opts.paste:
app_name = 'main'
path = opts.paste
if '#' in path:
path, app_name = path.split('#')
path = os.path.abspath(os.path.normpath(
os.path.join(util.getcwd(), path)))
if not os.path.exists(path):
raise ConfigError("%r not found" % path)
# paste application, load the config
self.cfgurl = 'config:%s#%s' % (path, app_name)
self.relpath = os.path.dirname(path)
from .pasterapp import paste_config
return paste_config(self.cfg, self.cfgurl, self.relpath)
if len(args) < 1:
parser.error("No application module specified.")
self.cfg.set("default_proc_name", args[0])
self.app_uri = args[0]
def load_wsgiapp(self):
# load the app
return util.import_app(self.app_uri)
def load_pasteapp(self):
# load the paste app
from .pasterapp import load_pasteapp
return load_pasteapp(self.cfgurl, self.relpath, global_conf=self.cfg.paste_global_conf)
def load(self):
if self.cfg.paste is not None:
return self.load_pasteapp()
else:
return self.load_wsgiapp()
def run():
"""\
The ``gunicorn`` command line runner for launching Gunicorn with
generic WSGI applications.
"""
from zato.server.ext.zunicorn.app.wsgiapp import WSGIApplication
WSGIApplication("%(prog)s [OPTIONS] [APP_MODULE]").run()
if __name__ == '__main__':
run()
| 3,170
|
Python
|
.py
| 73
| 37.506849
| 95
| 0.696812
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,188
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/app/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
| 1,363
|
Python
|
.py
| 28
| 47.428571
| 65
| 0.798193
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,189
|
base.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/zunicorn/app/base.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
"""
Copyright (C) 2019, Zato Source s.r.o. https://zato.io
Licensed under AGPLv3, see LICENSE.txt for terms and conditions.
"""
"""
BELOW IS THE ORIGINAL LICENSE ON WHICH THIS SOFTWARE IS BASED.
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org>
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# flake8: noqa
import os
import sys
import traceback
from zato.server.ext.zunicorn._compat import execfile_
from zato.server.ext.zunicorn import util
from zato.server.ext.zunicorn.arbiter import Arbiter
from zato.server.ext.zunicorn.config import Config, get_default_config_file
from zato.server.ext.zunicorn import debug
class BaseApplication:
"""
An application interface for configuring and loading
the various necessities for any given web framework.
"""
def __init__(self, usage=None, prog=None):
self.usage = usage
self.cfg = None
self.callable = None
self.prog = prog
self.logger = None
self.do_load_config()
def do_load_config(self):
"""
Loads the configuration
"""
try:
self.load_default_config()
self.load_config()
except Exception as e:
print("\nError: %s" % str(e), file=sys.stderr)
sys.stderr.flush()
sys.exit(1)
def load_default_config(self):
# init configuration
self.cfg = Config(self.usage, prog=self.prog)
def init(self, parser, opts, args):
raise NotImplementedError
def load(self):
raise NotImplementedError
def load_config(self):
"""
This method is used to load the configuration from one or several input(s).
Custom Command line, configuration file.
You have to override this method in your class.
"""
raise NotImplementedError
def reload(self):
self.do_load_config()
if self.cfg.spew:
debug.spew()
def wsgi(self):
if self.callable is None:
self.callable = self.load()
return self.callable
def run(self):
try:
Arbiter(self).run()
except RuntimeError as e:
print("\nError: %s\n" % e, file=sys.stderr)
sys.stderr.flush()
sys.exit(1)
class Application(BaseApplication):
# 'init' and 'load' methods are implemented by WSGIApplication.
# pylint: disable=abstract-method
def chdir(self):
# chdir to the configured path before loading,
# default is the current dir
os.chdir(self.cfg.chdir)
# add the path to sys.path
if self.cfg.chdir not in sys.path:
sys.path.insert(0, self.cfg.chdir)
def get_config_from_filename(self, filename):
if not os.path.exists(filename):
raise RuntimeError("%r doesn't exist" % filename)
cfg = {
"__builtins__": __builtins__,
"__name__": "__config__",
"__file__": filename,
"__doc__": None,
"__package__": None
}
try:
execfile_(filename, cfg, cfg)
except Exception:
print("Failed to read config file: %s" % filename, file=sys.stderr)
traceback.print_exc()
sys.stderr.flush()
sys.exit(1)
return cfg
def get_config_from_module_name(self, module_name):
return vars(util.import_module(module_name))
def load_config_from_module_name_or_filename(self, location):
"""
Loads the configuration file: the file is a python file, otherwise raise an RuntimeError
Exception or stop the process if the configuration file contains a syntax error.
"""
if location.startswith("python:"):
module_name = location[len("python:"):]
cfg = self.get_config_from_module_name(module_name)
else:
if location.startswith("file:"):
filename = location[len("file:"):]
else:
filename = location
cfg = self.get_config_from_filename(filename)
for k, v in cfg.items():
# Ignore unknown names
if k not in self.cfg.settings:
continue
try:
self.cfg.set(k.lower(), v)
except:
print("Invalid value for %s: %s\n" % (k, v), file=sys.stderr)
sys.stderr.flush()
raise
return cfg
def load_config_from_file(self, filename):
return self.load_config_from_module_name_or_filename(location=filename)
def load_config(self):
# parse console args
parser = self.cfg.parser()
args = parser.parse_args()
# optional settings from apps
cfg = self.init(parser, args, args.args)
# set up import paths and follow symlinks
self.chdir()
# Load up the any app specific configuration
if cfg:
for k, v in cfg.items():
self.cfg.set(k.lower(), v)
env_args = parser.parse_args(self.cfg.get_cmd_args_from_env())
if args.config:
self.load_config_from_file(args.config)
elif env_args.config:
self.load_config_from_file(env_args.config)
else:
default_config = get_default_config_file()
if default_config is not None:
self.load_config_from_file(default_config)
# Load up environment configuration
for k, v in vars(env_args).items():
if v is None:
continue
if k == "args":
continue
self.cfg.set(k.lower(), v)
# Lastly, update the configuration with any command line settings.
for k, v in vars(args).items():
if v is None:
continue
if k == "args":
continue
self.cfg.set(k.lower(), v)
# current directory might be changed by the config now
# set up import paths and follow symlinks
self.chdir()
def run(self):
if self.cfg.check_config:
try:
self.load()
except:
msg = "\nError while loading the application:\n"
print(msg, file=sys.stderr)
traceback.print_exc()
sys.stderr.flush()
sys.exit(1)
sys.exit(0)
if self.cfg.spew:
debug.spew()
if self.cfg.daemon:
util.daemonize(self.cfg.enable_stdio_inheritance)
# set python paths
if self.cfg.pythonpath:
paths = self.cfg.pythonpath.split(",")
for path in paths:
pythonpath = os.path.abspath(path)
if pythonpath not in sys.path:
sys.path.insert(0, pythonpath)
super(Application, self).run()
| 7,977
|
Python
|
.py
| 208
| 29.25
| 96
| 0.613731
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,190
|
utf8validator.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/utf8validator.py
|
# coding=utf-8
# flake8: noqa
###############################################################################
##
## Copyright 2011 Tavendo GmbH
##
## Note:
##
## This code is a Python implementation of the algorithm
##
## "Flexible and Economical UTF-8 Decoder"
##
## by Bjoern Hoehrmann
##
## bjoern@hoehrmann.de
## http://bjoern.hoehrmann.de/utf-8/decoder/dfa/
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
class Utf8Validator(object):
"""
Incremental UTF-8 validator with constant memory consumption (minimal state).
Implements the algorithm "Flexible and Economical UTF-8 Decoder" by
Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/).
"""
## DFA transitions
UTF8VALIDATOR_DFA = [
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 00..1f
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 20..3f
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 40..5f
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 60..7f
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, # 80..9f
7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, # a0..bf
8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, # c0..df
0xa,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x4,0x3,0x3, # e0..ef
0xb,0x6,0x6,0x6,0x5,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8, # f0..ff
0x0,0x1,0x2,0x3,0x5,0x8,0x7,0x1,0x1,0x1,0x4,0x6,0x1,0x1,0x1,0x1, # s0..s0
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,1, # s1..s2
1,2,1,1,1,1,1,2,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1, # s3..s4
1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,3,1,3,1,1,1,1,1,1, # s5..s6
1,3,1,1,1,1,1,3,1,3,1,1,1,1,1,1,1,3,1,1,1,1,1,1,1,1,1,1,1,1,1,1, # s7..s8
]
UTF8_ACCEPT = 0
UTF8_REJECT = 1
def __init__(self):
self.reset()
def decode(self, b):
"""
Eat one UTF-8 octet, and validate on the fly.
Returns UTF8_ACCEPT when enough octets have been consumed, in which case
self.codepoint contains the decoded Unicode code point.
Returns UTF8_REJECT when invalid UTF-8 was encountered.
Returns some other positive integer when more octets need to be eaten.
"""
type = Utf8Validator.UTF8VALIDATOR_DFA[b]
if self.state != Utf8Validator.UTF8_ACCEPT:
self.codepoint = (b & 0x3f) | (self.codepoint << 6)
else:
self.codepoint = (0xff >> type) & b
self.state = Utf8Validator.UTF8VALIDATOR_DFA[256 + self.state * 16 + type]
return self.state
def reset(self):
"""
Reset validator to start new incremental UTF-8 decode/validation.
"""
self.state = Utf8Validator.UTF8_ACCEPT
self.codepoint = 0
self.i = 0
def validate(self, ba):
"""
Incrementally validate a chunk of bytes provided as bytearray.
Will return a quad (valid?, endsOnCodePoint?, currentIndex, totalIndex).
As soon as an octet is encountered which renders the octet sequence
invalid, a quad with valid? == False is returned. currentIndex returns
the index within the currently consumed chunk, and totalIndex the
index within the total consumed sequence that was the point of bail out.
When valid? == True, currentIndex will be len(ba) and totalIndex the
total amount of consumed bytes.
"""
state = self.state
DFA = Utf8Validator.UTF8VALIDATOR_DFA
i = 0 # make sure 'i' is set if when 'ba' is empty
for i, b in enumerate(ba):
## optimized version of decode(), since we are not interested in actual code points
state = DFA[256 + (state << 4) + DFA[b]]
if state == Utf8Validator.UTF8_REJECT:
self.i += i
self.state = state
return False, False, i, self.i
self.i += i
self.state = state
return True, state == Utf8Validator.UTF8_ACCEPT, i, self.i
| 4,775
|
Python
|
.py
| 103
| 39.737864
| 95
| 0.590724
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,191
|
streaming.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/streaming.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
import struct
from struct import unpack
from zato.server.ext.ws4py.utf8validator import Utf8Validator
from zato.server.ext.ws4py.messaging import TextMessage, BinaryMessage, CloseControlMessage,\
PingControlMessage, PongControlMessage
from zato.server.ext.ws4py.framing import Frame, OPCODE_CONTINUATION, OPCODE_TEXT, \
OPCODE_BINARY, OPCODE_CLOSE, OPCODE_PING, OPCODE_PONG
from zato.server.ext.ws4py.exc import FrameTooLargeException, ProtocolException, InvalidBytesError,\
TextFrameEncodingException, UnsupportedFrameTypeException, StreamClosed
from zato.server.ext.ws4py.compat import py3k
VALID_CLOSING_CODES = [1000, 1001, 1002, 1003, 1007, 1008, 1009, 1010, 1011]
class Stream(object):
def __init__(self, always_mask=False, expect_masking=True):
""" Represents a websocket stream of bytes flowing in and out.
The stream doesn't know about the data provider itself and
doesn't even know about sockets. Instead the stream simply
yields for more bytes whenever it requires them. The stream owner
is responsible to provide the stream with those bytes until
a frame can be interpreted.
.. code-block:: python
:linenos:
>>> s = Stream()
>>> s.parser.send(BYTES)
>>> s.has_messages
False
>>> s.parser.send(MORE_BYTES)
>>> s.has_messages
True
>>> s.message
<TextMessage ... >
Set ``always_mask`` to mask all frames built.
Set ``expect_masking`` to indicate masking will be
checked on all parsed frames.
"""
self.message = None
"""
Parsed test or binary messages. Whenever the parser
reads more bytes from a fragment message, those bytes
are appended to the most recent message.
"""
self.pings = []
"""
Parsed ping control messages. They are instances of
:class:`ws4py.messaging.PingControlMessage`
"""
self.pongs = []
"""
Parsed pong control messages. They are instances of
:class:`ws4py.messaging.PongControlMessage`
"""
self.closing = None
"""
Parsed close control messsage. Instance of
:class:`ws4py.messaging.CloseControlMessage`
"""
self.errors = []
"""
Detected errors while parsing. Instances of
:class:`ws4py.messaging.CloseControlMessage`
"""
self._parser = None
"""
Parser in charge to process bytes it is fed with.
"""
self.always_mask = always_mask
self.expect_masking = expect_masking
@property
def parser(self):
if self._parser is None:
self._parser = self.receiver()
# Python generators must be initialized once.
next(self.parser)
return self._parser
def _cleanup(self):
"""
Frees the stream's resources rendering it unusable.
"""
self.message = None
if self._parser is not None:
if not self._parser.gi_running:
self._parser.close()
self._parser = None
self.errors = None
self.pings = None
self.pongs = None
self.closing = None
def text_message(self, text):
"""
Returns a :class:`ws4py.messaging.TextMessage` instance
ready to be built. Convenience method so
that the caller doesn't need to import the
:class:`ws4py.messaging.TextMessage` class itself.
"""
return TextMessage(text=text)
def binary_message(self, bytes):
"""
Returns a :class:`ws4py.messaging.BinaryMessage` instance
ready to be built. Convenience method so
that the caller doesn't need to import the
:class:`ws4py.messaging.BinaryMessage` class itself.
"""
return BinaryMessage(bytes)
@property
def has_message(self):
"""
Checks if the stream has received any message
which, if fragmented, is now completed.
"""
if self.message is not None:
return self.message.completed
return False
def close(self, code=4043, reason='Z4043'):
"""
Returns a close control message built from
a :class:`ws4py.messaging.CloseControlMessage` instance,
using the given status ``code`` and ``reason`` message.
"""
return CloseControlMessage(code=code, reason=reason)
def ping(self, data=''):
"""
Returns a ping control message built from
a :class:`ws4py.messaging.PingControlMessage` instance.
"""
return PingControlMessage(data).single(mask=self.always_mask)
def pong(self, data=''):
"""
Returns a ping control message built from
a :class:`ws4py.messaging.PongControlMessage` instance.
"""
return PongControlMessage(data).single(mask=self.always_mask)
def receiver(self):
"""
Parser that keeps trying to interpret bytes it is fed with as
incoming frames part of a message.
Control message are single frames only while data messages, like text
and binary, may be fragmented accross frames.
The way it works is by instanciating a :class:`wspy.framing.Frame` object,
then running its parser generator which yields how much bytes
it requires to performs its task. The stream parser yields this value
to its caller and feeds the frame parser.
When the frame parser raises :exc:`StopIteration`, the stream parser
tries to make sense of the parsed frame. It dispatches the frame's bytes
to the most appropriate message type based on the frame's opcode.
Overall this makes the stream parser totally agonstic to
the data provider.
"""
utf8validator = Utf8Validator()
running = True
frame = None
while running:
frame = Frame()
while 1:
try:
some_bytes = (yield next(frame.parser))
frame.parser.send(some_bytes)
except GeneratorExit:
running = False
break
except StopIteration:
frame._cleanup()
some_bytes = frame.body
# Let's avoid unmasking when there is no payload
if some_bytes:
if frame.masking_key and self.expect_masking:
some_bytes = frame.unmask(some_bytes)
elif not frame.masking_key and self.expect_masking:
msg = CloseControlMessage(code=1002, reason='Missing masking when expected')
self.errors.append(msg)
break
elif frame.masking_key and not self.expect_masking:
msg = CloseControlMessage(code=1002, reason='Masked when not expected')
self.errors.append(msg)
break
else:
# If we reach this stage, it's because
# the frame wasn't masked and we didn't expect
# it anyway. Therefore, on py2k, the bytes
# are actually a str object and can't be used
# in the utf8 validator as we need integers
# when we get each byte one by one.
# Our only solution here is to convert our
# string to a bytearray.
some_bytes = bytearray(some_bytes)
if frame.opcode == OPCODE_TEXT:
if self.message and not self.message.completed:
# We got a text frame before we completed the previous one
msg = CloseControlMessage(code=1002, reason='Received a new message before completing previous')
self.errors.append(msg)
break
m = TextMessage(some_bytes)
m.completed = (frame.fin == 1)
self.message = m
if some_bytes:
is_valid, end_on_code_point, _, _ = utf8validator.validate(some_bytes)
if not is_valid or (m.completed and not end_on_code_point):
self.errors.append(CloseControlMessage(code=1007, reason='Invalid UTF-8 bytes'))
break
elif frame.opcode == OPCODE_BINARY:
if self.message and not self.message.completed:
# We got a text frame before we completed the previous one
msg = CloseControlMessage(code=1002, reason='Received a new message before completing previous')
self.errors.append(msg)
break
m = BinaryMessage(some_bytes)
m.completed = (frame.fin == 1)
self.message = m
elif frame.opcode == OPCODE_CONTINUATION:
m = self.message
if m is None:
self.errors.append(CloseControlMessage(code=1002, reason='Message not started yet'))
break
m.extend(some_bytes)
m.completed = (frame.fin == 1)
if m.opcode == OPCODE_TEXT:
if some_bytes:
is_valid, end_on_code_point, _, _ = utf8validator.validate(some_bytes)
if not is_valid or (m.completed and not end_on_code_point):
self.errors.append(CloseControlMessage(code=1007, reason='Invalid UTF-8 bytes'))
break
elif frame.opcode == OPCODE_CLOSE:
code = 1000
reason = ""
if frame.payload_length == 0:
self.closing = CloseControlMessage(code=1000)
elif frame.payload_length == 1:
self.closing = CloseControlMessage(code=1002, reason='Payload has invalid length')
else:
try:
# at this stage, some_bytes have been unmasked
# so actually are held in a bytearray
code = int(unpack("!H", bytes(some_bytes[0:2]))[0])
except struct.error:
code = 1002
reason = 'Failed at decoding closing code'
else:
# Those codes are reserved or plainly forbidden
if code not in VALID_CLOSING_CODES and not (2999 < code < 5000):
reason = 'Invalid Closing Frame Code: %d' % code
code = 1002
elif frame.payload_length > 1:
reason = some_bytes[2:] if frame.masking_key else frame.body[2:]
if not py3k: reason = bytearray(reason)
is_valid, end_on_code_point, _, _ = utf8validator.validate(reason)
if not is_valid or not end_on_code_point:
self.errors.append(CloseControlMessage(code=1007, reason='Invalid UTF-8 bytes'))
break
reason = bytes(reason)
self.closing = CloseControlMessage(code=code, reason=reason)
elif frame.opcode == OPCODE_PING:
self.pings.append(PingControlMessage(some_bytes))
elif frame.opcode == OPCODE_PONG:
self.pongs.append(PongControlMessage(some_bytes))
else:
self.errors.append(CloseControlMessage(code=1003))
break
except ProtocolException:
self.errors.append(CloseControlMessage(code=1002))
break
except FrameTooLargeException:
self.errors.append(CloseControlMessage(code=1002, reason="Frame was too large"))
break
frame._cleanup()
frame.body = None
frame = None
if self.message is not None and self.message.completed:
utf8validator.reset()
utf8validator.reset()
utf8validator = None
self._cleanup()
| 13,185
|
Python
|
.py
| 271
| 32.273063
| 124
| 0.538677
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,192
|
async_websocket.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/async_websocket.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
__doc__ = """
WebSocket implementation that relies on two new Python
features:
* asyncio to provide the high-level interface above transports
* yield from to delegate to the reading stream whenever more
bytes are required
You can use these implementations in that context
and benefit from those features whilst using ws4py.
Strictly speaking this module probably doesn't have to
be called async_websocket but it feels this will be its typical
usage and is probably more readable than
delegated_generator_websocket_on_top_of_asyncio.py
"""
import asyncio
import types
from zato.server.ext.ws4py.websocket import WebSocket as _WebSocket
from zato.server.ext.ws4py.messaging import Message
__all__ = ['WebSocket', 'EchoWebSocket']
class WebSocket(_WebSocket):
def __init__(self, proto):
"""
A :pep:`3156` ready websocket handler that works
well in a coroutine-aware loop such as the one provided
by the asyncio module.
The provided `proto` instance is a
:class:`asyncio.Protocol` subclass instance that will
be used internally to read and write from the
underlying transport.
Because the base :class:`ws4py.websocket.WebSocket`
class is still coupled a bit to the socket interface,
we have to override a little more than necessary
to play nice with the :pep:`3156` interface. Hopefully,
some day this will be cleaned out.
"""
_WebSocket.__init__(self, None)
self.started = False
self.proto = proto
@property
def local_address(self):
"""
Local endpoint address as a tuple
"""
if not self._local_address:
self._local_address = self.proto.reader.transport.get_extra_info('sockname')
if len(self._local_address) == 4:
self._local_address = self._local_address[:2]
return self._local_address
@property
def peer_address(self):
"""
Peer endpoint address as a tuple
"""
if not self._peer_address:
self._peer_address = self.proto.reader.transport.get_extra_info('peername')
if len(self._peer_address) == 4:
self._peer_address = self._peer_address[:2]
return self._peer_address
def once(self):
"""
The base class directly is used in conjunction with
the :class:`ws4py.manager.WebSocketManager` which is
not actually used with the asyncio implementation
of ws4py. So let's make it clear it shan't be used.
"""
raise NotImplemented()
def close_connection(self):
"""
Close the underlying transport
"""
@asyncio.coroutine
def closeit():
yield from self.proto.writer.drain()
self.proto.writer.close()
asyncio.async(closeit())
def _write(self, data):
"""
Write to the underlying transport
"""
@asyncio.coroutine
def sendit(data):
self.proto.writer.write(data)
yield from self.proto.writer.drain()
asyncio.async(sendit(data))
@asyncio.coroutine
def run(self):
"""
Coroutine that runs until the websocket
exchange is terminated. It also calls the
`opened()` method to indicate the exchange
has started.
"""
self.started = True
try:
self.opened()
reader = self.proto.reader
while True:
data = yield from reader.read(self.reading_buffer_size)
if not self.process(data):
return False
finally:
self.terminate()
return True
class EchoWebSocket(WebSocket):
def received_message(self, message):
"""
Automatically sends back the provided ``message`` to
its originating endpoint.
"""
self.send(message.data, message.is_binary)
| 4,015
|
Python
|
.py
| 111
| 28.153153
| 88
| 0.637252
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,193
|
exc.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/exc.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
__all__ = ['WebSocketException', 'FrameTooLargeException', 'ProtocolException',
'UnsupportedFrameTypeException', 'TextFrameEncodingException',
'UnsupportedFrameTypeException', 'TextFrameEncodingException',
'StreamClosed', 'HandshakeError', 'InvalidBytesError']
class WebSocketException(Exception): pass
class ProtocolException(WebSocketException): pass
class FrameTooLargeException(WebSocketException): pass
class UnsupportedFrameTypeException(WebSocketException): pass
class TextFrameEncodingException(WebSocketException): pass
class InvalidBytesError(WebSocketException): pass
class StreamClosed(Exception): pass
class HandshakeError(WebSocketException):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
| 838
|
Python
|
.py
| 18
| 41.833333
| 79
| 0.776543
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,194
|
manager.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/manager.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
__doc__ = """
The manager module provides a selected classes to
handle websocket's execution.
Initially the rationale was to:
- Externalize the way the CherryPy server had been setup
as its websocket management was too tightly coupled with
the plugin implementation.
- Offer a management that could be used by other
server or client implementations.
- Move away from the threaded model to the event-based
model by relying on `select` or `epoll` (when available).
A simple usage for handling websocket clients:
.. code-block:: python
from zato.server.ext.ws4py.client import WebSocketBaseClient
from zato.server.ext.ws4py.manager import WebSocketManager
m = WebSocketManager()
class EchoClient(WebSocketBaseClient):
def handshake_ok(self):
m.add(self) # register the client once the handshake is done
def received_message(self, msg):
print str(msg)
m.start()
client = EchoClient('ws://localhost:9000/ws')
client.connect()
m.join() # blocks forever
Managers are not compulsory but hopefully will help your
workflow. For clients, you can still rely on threaded, gevent or
tornado based implementations of course.
"""
import logging
import select
import threading
import time
from zato.server.ext.ws4py import format_addresses
from zato.server.ext.ws4py.compat import py3k
logger = logging.getLogger('zato_web_socket')
class SelectPoller(object):
def __init__(self, timeout=0.1):
"""
A socket poller that uses the `select`
implementation to determines which
file descriptors have data available to read.
It is available on all platforms.
"""
self._fds = []
self.timeout = timeout
def release(self):
"""
Cleanup resources.
"""
self._fds = []
def register(self, fd):
"""
Register a new file descriptor to be
part of the select polling next time around.
"""
if fd not in self._fds:
self._fds.append(fd)
def unregister(self, fd):
"""
Unregister the given file descriptor.
"""
if fd in self._fds:
self._fds.remove(fd)
def poll(self):
"""
Polls once and returns a list of
ready-to-be-read file descriptors.
"""
if not self._fds:
time.sleep(self.timeout)
return []
r, w, x = select.select(self._fds, [], [], self.timeout)
return r
class EPollPoller(object):
def __init__(self, timeout=0.1):
"""
An epoll poller that uses the ``epoll``
implementation to determines which
file descriptors have data available to read.
Available on Unix flavors mostly.
"""
self.poller = select.epoll()
self.timeout = timeout
def release(self):
"""
Cleanup resources.
"""
self.poller.close()
def register(self, fd):
"""
Register a new file descriptor to be
part of the select polling next time around.
"""
try:
self.poller.register(fd, select.EPOLLIN | select.EPOLLPRI)
except IOError:
pass
def unregister(self, fd):
"""
Unregister the given file descriptor.
"""
self.poller.unregister(fd)
def poll(self):
"""
Polls once and yields each ready-to-be-read
file-descriptor
"""
events = self.poller.poll(timeout=self.timeout)
for fd, event in events:
if event | select.EPOLLIN | select.EPOLLPRI:
yield fd
class KQueuePoller(object):
def __init__(self, timeout=0.1):
"""
An epoll poller that uses the ``epoll``
implementation to determines which
file descriptors have data available to read.
Available on Unix flavors mostly.
"""
self.poller = select.epoll()
self.timeout = timeout
def release(self):
"""
Cleanup resources.
"""
self.poller.close()
def register(self, fd):
"""
Register a new file descriptor to be
part of the select polling next time around.
"""
try:
self.poller.register(fd, select.EPOLLIN | select.EPOLLPRI)
except IOError:
pass
def unregister(self, fd):
"""
Unregister the given file descriptor.
"""
self.poller.unregister(fd)
def poll(self):
"""
Polls once and yields each ready-to-be-read
file-descriptor
"""
events = self.poller.poll(timeout=self.timeout)
for fd, event in events:
if event | select.EPOLLIN | select.EPOLLPRI:
yield fd
class WebSocketManager(threading.Thread):
def __init__(self, poller=None):
"""
An event-based websocket manager. By event-based, we mean
that the websockets will be called when their
sockets have data to be read from.
The manager itself runs in its own thread as not to
be the blocking mainloop of your application.
The poller's implementation is automatically chosen
with ``epoll`` if available else ``select`` unless you
provide your own ``poller``.
"""
threading.Thread.__init__(self)
self.lock = threading.Lock()
self.websockets = {}
self.running = False
if poller:
self.poller = poller
else:
if hasattr(select, "epoll"):
self.poller = EPollPoller()
logger.info("Using epoll")
else:
self.poller = SelectPoller()
logger.info("Using select as epoll is not available")
def __len__(self):
return len(self.websockets)
def __iter__(self):
if py3k:
return iter(self.websockets.values())
else:
return self.websockets.itervalues()
def __contains__(self, ws):
fd = ws.sock.fileno()
# just in case the file descriptor was reused
# we actually check the instance (well, this might
# also have been reused...)
return self.websockets.get(fd) is ws
def add(self, websocket):
"""
Manage a new websocket.
First calls its :meth:`opened() <ws4py.websocket.WebSocket.opened>`
method and register its socket against the poller
for reading events.
"""
if websocket in self:
return
logger.info("Managing websocket %s" % format_addresses(websocket))
websocket.opened()
with self.lock:
fd = websocket.sock.fileno()
self.websockets[fd] = websocket
self.poller.register(fd)
def remove(self, websocket):
"""
Remove the given ``websocket`` from the manager.
This does not call its :meth:`closed() <ws4py.websocket.WebSocket.closed>`
method as it's out-of-band by your application
or from within the manager's run loop.
"""
if websocket not in self:
return
logger.info("Removing websocket %s" % format_addresses(websocket))
with self.lock:
fd = websocket.sock.fileno()
self.websockets.pop(fd, None)
self.poller.unregister(fd)
def stop(self):
"""
Mark the manager as terminated and
releases its resources.
"""
self.running = False
with self.lock:
self.websockets.clear()
self.poller.release()
def run(self):
"""
Manager's mainloop executed from within a thread.
Constantly poll for read events and, when available,
call related websockets' `once` method to
read and process the incoming data.
If the :meth:`once() <ws4py.websocket.WebSocket.once>`
method returns a `False` value, its :meth:`terminate() <ws4py.websocket.WebSocket.terminate>`
method is also applied to properly close
the websocket and its socket is unregistered from the poller.
Note that websocket shouldn't take long to process
their data or they will block the remaining
websockets with data to be handled. As for what long means,
it's up to your requirements.
"""
self.running = True
while self.running:
with self.lock:
polled = self.poller.poll()
if not self.running:
break
for fd in polled:
if not self.running:
break
ws = self.websockets.get(fd)
if ws and not ws.terminated:
if not ws.once():
with self.lock:
fd = ws.sock.fileno()
self.websockets.pop(fd, None)
self.poller.unregister(fd)
if not ws.terminated:
logger.info("Terminating websocket %s" % format_addresses(ws))
ws.terminate()
def close_all(self, code=1001, message='Server is shutting down'):
"""
Execute the :meth:`close() <ws4py.websocket.WebSocket.close>`
method of each registered websockets to initiate the closing handshake.
It doesn't wait for the handshake to complete properly.
"""
with self.lock:
logger.info("Closing all websockets with [%d] '%s'" % (code, message))
for ws in iter(self):
ws.close(code=code, reason=message)
def broadcast(self, message, binary=False):
"""
Broadcasts the given message to all registered
websockets, at the time of the call.
Broadcast may fail on a given registered peer
but this is silent as it's not the method's
purpose to handle websocket's failures.
"""
with self.lock:
websockets = self.websockets.copy()
if py3k:
ws_iter = iter(websockets.values())
else:
ws_iter = websockets.itervalues()
for ws in ws_iter:
if not ws.terminated:
try:
ws.send(message, binary)
except:
pass
| 10,479
|
Python
|
.py
| 292
| 26.373288
| 101
| 0.594568
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,195
|
__init__.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/__init__.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of ws4py nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# type: ignore
import logging
import logging.handlers as handlers
__author__ = "Sylvain Hellegouarch"
__version__ = "0.3.5"
__all__ = ['WS_KEY', 'WS_VERSION', 'configure_logger', 'format_addresses']
WS_KEY = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
WS_VERSION = (8, 13)
def configure_logger(stdout=True, filepath=None, level=logging.INFO):
logger = logging.getLogger('zato_web_socket')
logger.setLevel(level)
logfmt = logging.Formatter("[%(asctime)s] %(levelname)s %(message)s")
if filepath:
h = handlers.RotatingFileHandler(filepath, maxBytes=10485760, backupCount=3)
h.setLevel(level)
h.setFormatter(logfmt)
logger.addHandler(h)
if stdout:
import sys
h = logging.StreamHandler(sys.stdout)
h.setLevel(level)
h.setFormatter(logfmt)
logger.addHandler(h)
return logger
def format_addresses(ws):
me = ws.local_address
peer = ws.peer_address
if isinstance(me, tuple) and isinstance(peer, tuple):
me_ip, me_port = ws.local_address
peer_ip, peer_port = ws.peer_address
return "[Local => %s:%d | Remote => %s:%d]" % (me_ip, me_port, peer_ip, peer_port)
return "[Bound to '%s']" % me
| 2,732
|
Python
|
.py
| 61
| 41.377049
| 90
| 0.735338
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,196
|
websocket.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/websocket.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
import logging
import socket
import time
import threading
from json import dumps
from types import GeneratorType
try:
from OpenSSL.SSL import Error as pyOpenSSLError
except ImportError:
class pyOpenSSLError(Exception):
pass
from zato.common.api import GENERIC, WEB_SOCKET
from zato.common.marshal_.api import Model
from zato.common.util.config import replace_query_string_items
from zato.server.ext.ws4py.streaming import Stream
from zato.server.ext.ws4py.messaging import Message, PingControlMessage
Default_Read_Size = 200
Default_Socket_Read_Timeout = WEB_SOCKET.DEFAULT.Socket_Read_Timeout
Default_Socket_Write_Timeout = WEB_SOCKET.DEFAULT.Socket_Write_Timeout
logger = logging.getLogger('zato_web_socket')
__all__ = ['WebSocket', 'EchoWebSocket', 'Heartbeat']
class Heartbeat(threading.Thread):
def __init__(self, websocket, frequency=2.0):
"""
Runs at a periodic interval specified by
`frequency` by sending an unsolicitated pong
message to the connected peer.
If the message fails to be sent and a socket
error is raised, we close the websocket
socket automatically, triggering the `closed`
handler.
"""
threading.Thread.__init__(self)
self.websocket = websocket
self.frequency = frequency
def __enter__(self):
if self.frequency:
self.start()
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self.stop()
def stop(self):
self.running = False
def run(self):
self.running = True
while self.running:
time.sleep(self.frequency)
if self.websocket.terminated:
break
try:
self.websocket.send(PingControlMessage(data='{}'))
except socket.error as e:
logger.info('WSX PingControl error -> %s', e)
self.websocket.server_terminated = True
self.websocket.close_connection()
break
class WebSocket(object):
""" Represents a websocket endpoint and provides a high level interface to drive the endpoint.
"""
# This will be provided by subclasses
url:'str'
def __init__(self, server, sock, protocols=None, extensions=None, environ=None, heartbeat_freq=None,
socket_read_timeout=None, socket_write_timeout=None):
""" The ``sock`` is an opened connection
resulting from the websocket handshake.
If ``protocols`` is provided, it is a list of protocols
negotiated during the handshake as is ``extensions``.
If ``environ`` is provided, it is a copy of the WSGI environ
dictionnary from the underlying WSGI server.
"""
self.address_masked = replace_query_string_items(server, self.url)
self.stream = Stream(always_mask=False)
"""
Underlying websocket stream that performs the websocket
parsing to high level objects. By default this stream
never masks its messages. Clients using this class should
set the ``stream.always_mask`` fields to ``True``
and ``stream.expect_masking`` fields to ``False``.
"""
self.protocols = protocols
"""
List of protocols supported by this endpoint.
Unused for now.
"""
self.extensions = extensions
"""
List of extensions supported by this endpoint.
Unused for now.
"""
self.sock = sock
"""
Underlying connection.
"""
self._is_secure = hasattr(sock, '_ssl') or hasattr(sock, '_sslobj')
"""
Tell us if the socket is secure or not.
"""
self.client_terminated = False
"""
Indicates if the client has been marked as terminated.
"""
self.server_terminated = False
"""
Indicates if the server has been marked as terminated.
"""
self.reading_buffer_size = Default_Read_Size
"""
Current connection reading buffer size.
"""
self.environ = environ
"""
WSGI environ dictionary.
"""
self.heartbeat_freq = heartbeat_freq
"""
At which interval the heartbeat will be running.
Set this to `0` or `None` to disable it entirely.
"""
self.socket_read_timeout = socket_read_timeout or Default_Socket_Read_Timeout
self.socket_write_timeout = socket_write_timeout or Default_Socket_Write_Timeout
self._local_address = None
self._peer_address = None
# These will be used by self.terminate
self.last_close_code = 4003
self.last_close_reason = GENERIC.InitialReason
@property
def local_address(self):
"""
Local endpoint address as a tuple
"""
if not self._local_address:
self._local_address = self.sock.getsockname()
if len(self._local_address) == 4:
self._local_address = self._local_address[:2]
return self._local_address
@property
def peer_address(self):
"""
Peer endpoint address as a tuple
"""
if not self._peer_address:
self._peer_address = self.sock.getpeername()
if len(self._peer_address) == 4:
self._peer_address = self._peer_address[:2]
return self._peer_address
def zopened(self):
"""
Called by the server when the upgrade handshake
has succeeeded.
"""
pass
def close(self, code=4041, reason='Z4041'):
"""
Call this method to initiate the websocket connection
closing by sending a close frame to the connected peer.
The ``code`` is the status code representing the
termination's reason.
Once this method is called, the ``server_terminated``
attribute is set. Calling this method several times is
safe as the closing frame will be sent only the first
time.
.. seealso:: Defined Status Codes http://tools.ietf.org/html/rfc6455#section-7.4.1
"""
# Set for self.terminate to use
self.last_close_code = code
self.last_close_reason = reason
if not self.server_terminated:
self.server_terminated = True
self._write(self.stream.close(code=code, reason=reason).single(mask=self.stream.always_mask))
def zclosed(self, code, reason=None):
"""
Called when the websocket stream and connection are finally closed.
The provided ``code`` is status set by the other point and
``reason`` is a human readable message.
.. seealso:: Defined Status Codes http://tools.ietf.org/html/rfc6455#section-7.4.1
"""
pass
@property
def terminated(self):
"""
Returns ``True`` if both the client and server have been
marked as terminated.
"""
client_terminated = self.client_terminated is True
server_terminated = self.server_terminated is True
is_terminated = client_terminated and server_terminated
return is_terminated
@property
def connection(self):
return self.sock
def close_connection(self):
"""
Shutdowns then closes the underlying connection.
"""
if self.sock:
try:
self.sock.shutdown(socket.SHUT_RDWR)
except Exception:
pass
try:
self.sock.close()
except Exception:
pass
finally:
self.sock = None
self.client_terminated = True
self.server_terminated = True
def ping(self, message):
"""
Send a ping message to the remote peer.
The given `message` must be a unicode string.
"""
self.send(PingControlMessage(message))
def ponged(self, pong):
"""
Pong message, as a :class:`messaging.PongControlMessage` instance,
received on the stream.
"""
pass
def zreceived_message(self, message):
"""
Called whenever a complete ``message``, binary or text,
is received and ready for application's processing.
The passed message is an instance of :class:`messaging.TextMessage`
or :class:`messaging.BinaryMessage`.
.. note:: You should override this method in your subclass.
"""
pass
def unhandled_error(self, error):
"""
Called whenever a socket, or an OS, error is trapped
by ws4py but not managed by it. The given error is
an instance of `socket.error` or `OSError`.
Note however that application exceptions will not go
through this handler. Instead, do make sure you
protect your code appropriately in `received_message`
or `send`.
The default behaviour of this handler is to log
the error with a message.
"""
logger.warn("Failed to receive WSX data from `%s` -> `%s`", self.address_masked, error)
def _write(self, data):
"""
Trying to prevent a write operation
on an already closed websocket stream.
This cannot be bullet proof but hopefully
will catch almost all use cases.
"""
if self.terminated or self.sock is None:
logger.info('Could not send message on a terminated socket; `%s` -> %s (%s)',
self.config.client_name, self.config.address, self.config.client_id)
else:
self.sock.settimeout(self.socket_write_timeout)
self.sock.sendall(data)
def send(self, payload, binary=False):
"""
Sends the given ``payload`` out.
If ``payload`` is some bytes or a bytearray,
then it is sent as a single message not fragmented.
If ``payload`` is a generator, each chunk is sent as part of
fragmented message.
If ``binary`` is set, handles the payload as a binary message.
"""
if not self.stream:
logger.info('Could not send message without self.stream -> %s -> %s (%s -> %s) ',
self.config.client_name,
self.config.address,
self.config.username,
self.config.client_id,
)
return
message_sender = self.stream.binary_message if binary else self.stream.text_message # type: any_
if payload is None or isinstance(payload, (dict, list, tuple, int, float)):
payload = dumps(payload)
elif isinstance(payload, Model):
payload = payload.to_json()
if isinstance(payload, str) or isinstance(payload, bytearray):
m = message_sender(payload).single(mask=self.stream.always_mask)
self._write(m)
elif isinstance(payload, Message):
data = payload.single(mask=self.stream.always_mask)
self._write(data)
elif type(payload) == GeneratorType:
bytes = next(payload)
first = True
for chunk in payload:
self._write(message_sender(bytes).fragment(first=first, mask=self.stream.always_mask))
bytes = chunk
first = False
self._write(message_sender(bytes).fragment(last=True, mask=self.stream.always_mask))
else:
raise ValueError('Unsupported type `%s` passed to send()' % type(payload))
def _get_from_pending(self):
"""
The SSL socket object provides the same interface
as the socket interface but behaves differently.
When data is sent over a SSL connection
more data may be read than was requested from by
the ws4py websocket object.
In that case, the data may have been indeed read
from the underlying real socket, but not read by the
application which will expect another trigger from the
manager's polling mechanism as if more data was still on the
wire. This will happen only when new data is
sent by the other peer which means there will be
some delay before the initial read data is handled
by the application.
Due to this, we have to rely on a non-public method
to query the internal SSL socket buffer if it has indeed
more data pending in its buffer.
Now, some people in the Python community
`discourage <https://bugs.python.org/issue21430>`_
this usage of the ``pending()`` method because it's not
the right way of dealing with such use case. They advise
`this approach <https://docs.python.org/dev/library/ssl.html#notes-on-non-blocking-sockets>`_
instead. Unfortunately, this applies only if the
application can directly control the poller which is not
the case with the WebSocket abstraction here.
We therefore rely on this `technic <http://stackoverflow.com/questions/3187565/select-and-ssl-in-python>`_
which seems to be valid anyway.
This is a bit of a shame because we have to process
more data than what wanted initially.
"""
data = b""
pending = self.sock.pending()
while pending:
data += self.sock.recv(pending)
pending = self.sock.pending()
return data
def once(self):
"""
Performs the operation of reading from the underlying
connection in order to feed the stream of bytes.
We start with a small size of two bytes to be read
from the connection so that we can quickly parse an
incoming frame header. Then the stream indicates
whatever size must be read from the connection since
it knows the frame payload length.
It returns `False` if an error occurred at the
socket level or during the bytes processing. Otherwise,
it returns `True`.
"""
if self.terminated:
return False
if not self.sock:
return False
try:
self.sock.settimeout(self.socket_read_timeout)
b = self.sock.recv(self.reading_buffer_size)
# This will only make sense with secure sockets.
if self._is_secure:
b += self._get_from_pending()
except TimeoutError as e:
return True
except (ValueError, socket.error, OSError, pyOpenSSLError) as e:
self.unhandled_error(e)
return True
else:
if not self.process(b):
return False
return True
def terminate(self):
"""
Completes the websocket by calling the `closed`
method either using the received closing code
and reason, or when none was received, using
the special `1006` code.
Finally close the underlying connection for
good and cleanup resources by unsetting
the `environ` and `stream` attributes.
"""
s = self.stream
self.client_terminated = self.server_terminated = True
try:
if s.closing is None:
self.closed(self.last_close_code, self.last_close_reason)
else:
self.closed(s.closing.code, s.closing.reason)
finally:
self.close_connection()
# Cleaning up resources
s._cleanup()
self.stream = None
self.environ = None
def process(self, bytes):
""" Takes some bytes and process them through the
internal stream's parser. If a message of any kind is
found, performs one of these actions:
* A closing message will initiate the closing handshake
* Errors will initiate a closing handshake
* A message will be passed to the ``received_message`` method
* Pings will see pongs be sent automatically
* Pongs will be passed to the ``ponged`` method
The process should be terminated when this method
returns ``False``.
"""
s = self.stream
if not bytes and self.reading_buffer_size > 0:
return False
self.reading_buffer_size = s.parser.send(bytes) or Default_Read_Size
if s.closing is not None:
logger.info("Closing message received (%d) '%s'" % (s.closing.code, s.closing.reason))
if not self.server_terminated:
self.close(s.closing.code, s.closing.reason)
else:
self.client_terminated = True
return False
if s.errors:
for error in s.errors:
logger.info("Error message received (%d) '%s'" % (error.code, error.reason))
self.close(error.code, error.reason)
s.errors = []
return False
if s.has_message:
self.received_message(s.message)
if s.message is not None:
s.message.data = None
s.message = None
return True
if s.pings:
for ping in s.pings:
self._write(s.pong(ping.data))
s.pings = []
if s.pongs:
for pong in s.pongs:
self.ponged(pong)
s.pongs = []
return True
def run(self):
"""
Performs the operation of reading from the underlying
connection in order to feed the stream of bytes.
We start with a small size of two bytes to be read
from the connection so that we can quickly parse an
incoming frame header. Then the stream indicates
whatever size must be read from the connection since
it knows the frame payload length.
Note that we perform some automatic opererations:
* On a closing message, we respond with a closing
message and finally close the connection
* We respond to pings with pong messages.
* Whenever an error is raised by the stream parsing,
we initiate the closing of the connection with the
appropiate error code.
This method is blocking and should likely be run
in a thread.
"""
self.sock.setblocking(True)
with Heartbeat(self, frequency=self.heartbeat_freq):
try:
self.opened()
while not self.terminated:
if not self.once():
break
finally:
self.terminate()
class EchoWebSocket(WebSocket):
def received_message(self, message):
"""
Automatically sends back the provided ``message`` to
its originating endpoint.
"""
self.send(message.data, message.is_binary)
| 18,882
|
Python
|
.py
| 465
| 30.795699
| 114
| 0.61704
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,197
|
compat.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/compat.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
__doc__ = """
This compatibility module is inspired by the one found
in CherryPy. It provides a common entry point for the various
functions and types that are used with ws4py but which
differ from Python 2.x to Python 3.x
There are likely better ways for some of them so feel
free to provide patches.
Note this has been tested against 2.7 and 3.3 only but
should hopefully work fine with other versions too.
"""
import sys
if sys.version_info >= (3, 0):
py3k = True
from urllib.parse import urlsplit
range = range
unicode = str
basestring = (bytes, str)
_ord = ord
def get_connection(fileobj):
return fileobj.raw._sock
def detach_connection(fileobj):
fileobj.detach()
def ord(c):
if isinstance(c, int):
return c
return _ord(c)
else:
py3k = False
from urlparse import urlsplit
range = xrange
unicode = unicode
basestring = basestring
ord = ord
def get_connection(fileobj):
return fileobj._sock
def detach_connection(fileobj):
fileobj._sock = None
| 1,123
|
Python
|
.py
| 39
| 24.282051
| 61
| 0.689302
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,198
|
framing.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/framing.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
from struct import pack, unpack
from zato.server.ext.ws4py.exc import FrameTooLargeException, ProtocolException
from zato.server.ext.ws4py.compat import py3k, ord, range
# Frame opcodes defined in the spec.
OPCODE_CONTINUATION = 0x0
OPCODE_TEXT = 0x1
OPCODE_BINARY = 0x2
OPCODE_CLOSE = 0x8
OPCODE_PING = 0x9
OPCODE_PONG = 0xa
__all__ = ['Frame']
class Frame(object):
def __init__(self, opcode=None, body=b'', masking_key=None, fin=0, rsv1=0, rsv2=0, rsv3=0):
"""
Implements the framing protocol as defined by RFC 6455.
.. code-block:: python
:linenos:
>>> test_mask = 'XXXXXX' # perhaps from os.urandom(4)
>>> f = Frame(OPCODE_TEXT, 'hello world', masking_key=test_mask, fin=1)
>>> bytes = f.build()
>>> bytes.encode('hex')
'818bbe04e66ad6618a06d1249105cc6882'
>>> f = Frame()
>>> f.parser.send(bytes[0])
1
>>> f.parser.send(bytes[1])
4
.. seealso:: Data Framing http://tools.ietf.org/html/rfc6455#section-5.2
"""
if not isinstance(body, bytes):
raise TypeError("The body must be properly encoded")
self.opcode = opcode
self.body = body
self.masking_key = masking_key
self.fin = fin
self.rsv1 = rsv1
self.rsv2 = rsv2
self.rsv3 = rsv3
self.payload_length = len(body)
self._parser = None
@property
def parser(self):
if self._parser is None:
self._parser = self._parsing()
# Python generators must be initialized once.
next(self.parser)
return self._parser
def _cleanup(self):
if self._parser:
self._parser.close()
self._parser = None
def build(self):
"""
Builds a frame from the instance's attributes and returns
its bytes representation.
"""
header = b''
if self.fin > 0x1:
raise ValueError('FIN bit parameter must be 0 or 1')
if 0x3 <= self.opcode <= 0x7 or 0xB <= self.opcode:
raise ValueError('Opcode cannot be a reserved opcode')
## +-+-+-+-+-------+
## |F|R|R|R| opcode|
## |I|S|S|S| (4) |
## |N|V|V|V| |
## | |1|2|3| |
## +-+-+-+-+-------+
header = pack('!B', ((self.fin << 7)
| (self.rsv1 << 6)
| (self.rsv2 << 5)
| (self.rsv3 << 4)
| self.opcode))
## +-+-------------+-------------------------------+
## |M| Payload len | Extended payload length |
## |A| (7) | (16/63) |
## |S| | (if payload len==126/127) |
## |K| | |
## +-+-+-+-+-------+-+-------------+ - - - - - - - - - - - - - - - +
## | Extended payload length continued, if payload len == 127 |
## + - - - - - - - - - - - - - - - +-------------------------------+
if self.masking_key: mask_bit = 1 << 7
else: mask_bit = 0
length = self.payload_length
if length < 126:
header += pack('!B', (mask_bit | length))
elif length < (1 << 16):
header += pack('!B', (mask_bit | 126)) + pack('!H', length)
elif length < (1 << 63):
header += pack('!B', (mask_bit | 127)) + pack('!Q', length)
else:
raise FrameTooLargeException()
## + - - - - - - - - - - - - - - - +-------------------------------+
## | |Masking-key, if MASK set to 1 |
## +-------------------------------+-------------------------------+
## | Masking-key (continued) | Payload Data |
## +-------------------------------- - - - - - - - - - - - - - - - +
## : Payload Data continued ... :
## + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +
## | Payload Data continued ... |
## +---------------------------------------------------------------+
body = self.body
if not self.masking_key:
return bytes(header + body)
return bytes(header + self.masking_key + self.mask(body))
def _parsing(self):
"""
Generator to parse bytes into a frame. Yields until
enough bytes have been read or an error is met.
"""
buf = b''
some_bytes = b''
# yield until we get the first header's byte
while not some_bytes:
some_bytes = (yield 1)
first_byte = some_bytes[0] if isinstance(some_bytes, bytearray) else ord(some_bytes[0])
# frame-fin = %x0 ; more frames of this message follow
# / %x1 ; final frame of this message
self.fin = (first_byte >> 7) & 1
self.rsv1 = (first_byte >> 6) & 1
self.rsv2 = (first_byte >> 5) & 1
self.rsv3 = (first_byte >> 4) & 1
self.opcode = first_byte & 0xf
# frame-rsv1 = %x0 ; 1 bit, MUST be 0 unless negotiated otherwise
# frame-rsv2 = %x0 ; 1 bit, MUST be 0 unless negotiated otherwise
# frame-rsv3 = %x0 ; 1 bit, MUST be 0 unless negotiated otherwise
if self.rsv1 or self.rsv2 or self.rsv3:
raise ProtocolException()
# control frames between 3 and 7 as well as above 0xA are currently reserved
if 2 < self.opcode < 8 or self.opcode > 0xA:
raise ProtocolException()
# control frames cannot be fragmented
if self.opcode > 0x7 and self.fin == 0:
raise ProtocolException()
# do we already have enough some_bytes to continue?
some_bytes = some_bytes[1:] if some_bytes and len(some_bytes) > 1 else b''
# Yield until we get the second header's byte
while not some_bytes:
some_bytes = (yield 1)
second_byte = some_bytes[0] if isinstance(some_bytes, bytearray) else ord(some_bytes[0])
mask = (second_byte >> 7) & 1
self.payload_length = second_byte & 0x7f
# All control frames MUST have a payload length of 125 some_bytes or less
if self.opcode > 0x7 and self.payload_length > 125:
raise FrameTooLargeException()
if some_bytes and len(some_bytes) > 1:
buf = some_bytes[1:]
some_bytes = buf
else:
buf = b''
some_bytes = b''
if self.payload_length == 127:
# This will compute the actual application data size
if len(buf) < 8:
nxt_buf_size = 8 - len(buf)
some_bytes = (yield nxt_buf_size)
some_bytes = buf + (some_bytes or b'')
while len(some_bytes) < 8:
b = (yield 8 - len(some_bytes))
if b is not None:
some_bytes = some_bytes + b
if len(some_bytes) > 8:
buf = some_bytes[8:]
some_bytes = some_bytes[:8]
else:
some_bytes = buf[:8]
buf = buf[8:]
extended_payload_length = some_bytes
self.payload_length = unpack(
'!Q', extended_payload_length)[0]
if self.payload_length > 0x7FFFFFFFFFFFFFFF:
raise FrameTooLargeException()
elif self.payload_length == 126:
if len(buf) < 2:
nxt_buf_size = 2 - len(buf)
some_bytes = (yield nxt_buf_size)
some_bytes = buf + (some_bytes or b'')
while len(some_bytes) < 2:
b = (yield 2 - len(some_bytes))
if b is not None:
some_bytes = some_bytes + b
if len(some_bytes) > 2:
buf = some_bytes[2:]
some_bytes = some_bytes[:2]
else:
some_bytes = buf[:2]
buf = buf[2:]
extended_payload_length = some_bytes
self.payload_length = unpack(
'!H', extended_payload_length)[0]
if mask:
if len(buf) < 4:
nxt_buf_size = 4 - len(buf)
some_bytes = (yield nxt_buf_size)
some_bytes = buf + (some_bytes or b'')
while not some_bytes or len(some_bytes) < 4:
b = (yield 4 - len(some_bytes))
if b is not None:
some_bytes = some_bytes + b
if len(some_bytes) > 4:
buf = some_bytes[4:]
else:
some_bytes = buf[:4]
buf = buf[4:]
self.masking_key = some_bytes
if len(buf) < self.payload_length:
nxt_buf_size = self.payload_length - len(buf)
some_bytes = (yield nxt_buf_size)
some_bytes = buf + (some_bytes or b'')
while len(some_bytes) < self.payload_length:
l = self.payload_length - len(some_bytes)
b = (yield l)
if b is not None:
some_bytes = some_bytes + b
else:
if self.payload_length == len(buf):
some_bytes = buf
else:
some_bytes = buf[:self.payload_length]
self.body = some_bytes
yield
def mask(self, data):
"""
Performs the masking or unmasking operation on data
using the simple masking algorithm:
..
j = i MOD 4
transformed-octet-i = original-octet-i XOR masking-key-octet-j
"""
masked = bytearray(data)
if py3k: key = self.masking_key
else: key = map(ord, self.masking_key)
for i in range(len(data)):
masked[i] = masked[i] ^ key[i%4]
return masked
unmask = mask
| 10,201
|
Python
|
.py
| 237
| 31.455696
| 96
| 0.468265
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
10,199
|
messaging.py
|
zatosource_zato/code/zato-server/src/zato/server/ext/ws4py/messaging.py
|
# -*- coding: utf-8 -*-
# flake8: noqa
import os
import struct
from zato.server.ext.ws4py.framing import Frame, OPCODE_CONTINUATION, OPCODE_TEXT, \
OPCODE_BINARY, OPCODE_CLOSE, OPCODE_PING, OPCODE_PONG
from zato.server.ext.ws4py.compat import unicode, py3k
__all__ = ['Message', 'TextMessage', 'BinaryMessage', 'CloseControlMessage',
'PingControlMessage', 'PongControlMessage']
class Message(object):
def __init__(self, opcode, data=b'', encoding='utf-8'):
"""
A message is a application level entity. It's usually built
from one or many frames. The protocol defines several kind
of messages which are grouped into two sets:
* data messages which can be text or binary typed
* control messages which provide a mechanism to perform
in-band control communication between peers
The ``opcode`` indicates the message type and ``data`` is
the possible message payload.
The payload is held internally as a a :func:`bytearray` as they are
faster than pure strings for append operations.
Unicode data will be encoded using the provided ``encoding``.
"""
self.opcode = opcode
self._completed = False
self.encoding = encoding
if isinstance(data, unicode):
if not encoding:
raise TypeError("unicode data without an encoding")
data = data.encode(encoding)
elif isinstance(data, bytearray):
data = bytes(data)
elif not isinstance(data, bytes):
raise TypeError("%s is not a supported data type" % type(data))
self.data = data
def single(self, mask=False):
"""
Returns a frame bytes with the fin bit set and a random mask.
If ``mask`` is set, automatically mask the frame
using a generated 4-byte token.
"""
mask = os.urandom(4) if mask else None
return Frame(body=self.data, opcode=self.opcode,
masking_key=mask, fin=1).build()
def fragment(self, first=False, last=False, mask=False):
"""
Returns a :class:`ws4py.framing.Frame` bytes.
The behavior depends on the given flags:
* ``first``: the frame uses ``self.opcode`` else a continuation opcode
* ``last``: the frame has its ``fin`` bit set
* ``mask``: the frame is masked using a automatically generated 4-byte token
"""
fin = 1 if last is True else 0
opcode = self.opcode if first is True else OPCODE_CONTINUATION
mask = os.urandom(4) if mask else None
return Frame(body=self.data,
opcode=opcode, masking_key=mask,
fin=fin).build()
@property
def completed(self):
"""
Indicates the the message is complete, meaning
the frame's ``fin`` bit was set.
"""
return self._completed
@completed.setter
def completed(self, state):
"""
Sets the state for this message. Usually
set by the stream's parser.
"""
self._completed = state
def extend(self, data):
"""
Add more ``data`` to the message.
"""
if isinstance(data, bytes):
self.data += data
elif isinstance(data, bytearray):
self.data += bytes(data)
elif isinstance(data, unicode):
self.data += data.encode(self.encoding)
else:
raise TypeError("%s is not a supported data type" % type(data))
def __len__(self):
return len(self.__unicode__())
def __str__(self):
if py3k:
return self.data.decode(self.encoding)
return self.data
def __unicode__(self):
return self.data.decode(self.encoding)
class TextMessage(Message):
def __init__(self, text=None):
Message.__init__(self, OPCODE_TEXT, text)
@property
def is_binary(self):
return False
@property
def is_text(self):
return True
class BinaryMessage(Message):
def __init__(self, bytes=None):
Message.__init__(self, OPCODE_BINARY, bytes, encoding=None)
@property
def is_binary(self):
return True
@property
def is_text(self):
return False
def __len__(self):
return len(self.data)
class CloseControlMessage(Message):
def __init__(self, code=4042, reason='Z4042'):
data = b""
if code:
data += struct.pack("!H", code)
if reason is not None:
if isinstance(reason, unicode):
reason = reason.encode('utf-8')
data += reason
Message.__init__(self, OPCODE_CLOSE, data, 'utf-8')
self.code = code
self.reason = reason
def __str__(self):
if py3k:
return self.reason.decode('utf-8')
return self.reason
def __unicode__(self):
return self.reason.decode(self.encoding)
class PingControlMessage(Message):
def __init__(self, data=None):
Message.__init__(self, OPCODE_PING, data)
class PongControlMessage(Message):
def __init__(self, data):
Message.__init__(self, OPCODE_PONG, data)
| 5,214
|
Python
|
.py
| 137
| 29.562044
| 84
| 0.609954
|
zatosource/zato
| 1,096
| 239
| 0
|
AGPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|