sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def _write_file_safely(local_path, fileinfo, response):
"""attempts to stream a remote file into a local file object,
removes the local file if it's interrupted by any error"""
try:
_write_file(local_path, fileinfo, response)
except BaseException as e:
logger.warning("{} interrupted writing {} -- "
"cleaning up partial file".format(
e.__class__.__name__, local_path))
os.remove(local_path)
raise e | attempts to stream a remote file into a local file object,
removes the local file if it's interrupted by any error | entailment |
def up_by_files(to_sync, remote_dir=DEFAULT_REMOTE_DIR, remote_files=None):
"""Sync a given list of local files to `remote_dir` dir"""
if remote_files is None:
remote_files = command.map_files_raw(remote_dir=remote_dir)
for local_file in to_sync:
_sync_local_file(local_file, remote_dir, remote_files) | Sync a given list of local files to `remote_dir` dir | entailment |
def up_by_time(*filters, local_dir=".", remote_dir=DEFAULT_REMOTE_DIR, count=1):
"""Sync most recent file by date, time attribues"""
remote_files = command.map_files_raw(remote_dir=remote_dir)
local_files = list_local_files(*filters, local_dir=local_dir)
most_recent = sorted(local_files, key=lambda f: f.datetime)
to_sync = most_recent[-count:]
_notify_sync(Direction.up, to_sync)
up_by_files(to_sync[::-1], remote_dir, remote_files) | Sync most recent file by date, time attribues | entailment |
def up_by_name(*filters, local_dir=".", remote_dir=DEFAULT_REMOTE_DIR, count=1):
"""Sync files whose filename attribute is highest in alphanumeric order"""
remote_files = command.map_files_raw(remote_dir=remote_dir)
local_files = list_local_files(*filters, local_dir=local_dir)
greatest = sorted(local_files, key=lambda f: f.filename)
to_sync = greatest[-count:]
_notify_sync(Direction.up, to_sync)
up_by_files(to_sync[::-1], remote_dir, remote_files) | Sync files whose filename attribute is highest in alphanumeric order | entailment |
def _upload_file_safely(fileinfo, remote_dir):
"""attempts to upload a local file to FlashAir,
tries to remove the remote file if interrupted by any error"""
try:
upload.upload_file(fileinfo.path, remote_dir=remote_dir)
except BaseException as e:
logger.warning("{} interrupted writing {} -- "
"cleaning up partial remote file".format(
e.__class__.__name__, fileinfo.path))
upload.delete_file(fileinfo.path)
raise e | attempts to upload a local file to FlashAir,
tries to remove the remote file if interrupted by any error | entailment |
def rsync(hosts, source, destination, logger=None, sudo=False):
"""
Grabs the hosts (or single host), creates the connection object for each
and set the rsync execnet engine to push the files.
It assumes that all of the destinations for the different hosts is the
same. This deviates from what execnet does because it has the flexibility
to push to different locations.
"""
logger = logger or basic_remote_logger()
sync = _RSync(source, logger=logger)
# setup_targets
if not isinstance(hosts, list):
hosts = [hosts]
for host in hosts:
conn = Connection(
host,
logger,
sudo,
)
sync.add_target(conn.gateway, destination)
return sync.send() | Grabs the hosts (or single host), creates the connection object for each
and set the rsync execnet engine to push the files.
It assumes that all of the destinations for the different hosts is the
same. This deviates from what execnet does because it has the flexibility
to push to different locations. | entailment |
def map_into_range(low, high, raw_value):
"""
Map an input function into an output value, clamping such that the magnitude of the output is at most 1.0
:param low:
The value in the input range corresponding to zero.
:param high:
The value in the input range corresponding to 1.0 or -1.0, depending on whether this is higher or lower than the
low value respectively.
:param raw_value:
An input value
:return:
Mapped output value
"""
value = float(raw_value)
if low < high:
if value < low:
return 0
elif value > high:
return 1.0
elif low > high:
if value > low:
return 0
elif value < high:
return -1.0
return (value - low) / abs(high - low) | Map an input function into an output value, clamping such that the magnitude of the output is at most 1.0
:param low:
The value in the input range corresponding to zero.
:param high:
The value in the input range corresponding to 1.0 or -1.0, depending on whether this is higher or lower than the
low value respectively.
:param raw_value:
An input value
:return:
Mapped output value | entailment |
def map_single_axis(low, high, dead_zone, hot_zone, value):
"""
Apply dead and hot zones before mapping a value to a range. The dead and hot zones are both expressed as the
proportion of the axis range which should be regarded as 0.0 or 1.0 (or -1.0 depending on cardinality) respectively,
so for example setting dead zone to 0.2 means the first 20% of the range of the axis will be treated as if it's the
low value, and setting the hot zone to 0.4 means the last 40% of the range will be treated as if it's the high
value. Note that as with map_into_range, low is not necessarily numerically lower than high, it instead expresses
a low value signal as opposed to a high value one (which could include a high negative value). Note that bad things
happen if dead_zone + hot_zone == 1.0, so don't do that. This is used by the map_dual_axis call, but can also be
used by itself to handle single axes like triggers where the overall range varies from 0.0 to 1.0 rather than -1.0
to 1.0 as a regular joystick axis would.
:param low:
The value corresponding to no signal
:param high:
The value corresponding to a full signal
:param dead_zone:
The proportion of the range of motion away from the no-signal end which should be treated as equivalent to no
signal and return 0.0
:param hot_zone:
The proportion of the range of motion away from the high signal end which should be treated as equivalent to a
full strength input.
:param value:
The raw value to map
:return:
The scaled and clipped value, taking into account dead and hot zone boundaries, ranging from 0.0 to either 1.0
or -1.0 depending on whether low or high are numerically larger (low < high means max value is 1.0, high < low
means it's -1.0).
"""
input_range = high - low
corrected_low = low + input_range * dead_zone
corrected_high = high - input_range * hot_zone
return map_into_range(corrected_low, corrected_high, value) | Apply dead and hot zones before mapping a value to a range. The dead and hot zones are both expressed as the
proportion of the axis range which should be regarded as 0.0 or 1.0 (or -1.0 depending on cardinality) respectively,
so for example setting dead zone to 0.2 means the first 20% of the range of the axis will be treated as if it's the
low value, and setting the hot zone to 0.4 means the last 40% of the range will be treated as if it's the high
value. Note that as with map_into_range, low is not necessarily numerically lower than high, it instead expresses
a low value signal as opposed to a high value one (which could include a high negative value). Note that bad things
happen if dead_zone + hot_zone == 1.0, so don't do that. This is used by the map_dual_axis call, but can also be
used by itself to handle single axes like triggers where the overall range varies from 0.0 to 1.0 rather than -1.0
to 1.0 as a regular joystick axis would.
:param low:
The value corresponding to no signal
:param high:
The value corresponding to a full signal
:param dead_zone:
The proportion of the range of motion away from the no-signal end which should be treated as equivalent to no
signal and return 0.0
:param hot_zone:
The proportion of the range of motion away from the high signal end which should be treated as equivalent to a
full strength input.
:param value:
The raw value to map
:return:
The scaled and clipped value, taking into account dead and hot zone boundaries, ranging from 0.0 to either 1.0
or -1.0 depending on whether low or high are numerically larger (low < high means max value is 1.0, high < low
means it's -1.0). | entailment |
def map_dual_axis(low, high, centre, dead_zone, hot_zone, value):
"""
Map an axis with a central dead zone and hot zones at each end to a range from -1.0 to 1.0. This in effect uses two
calls to map_single_axis, choosing whether to use centre and low, or centre and high as the low and high values in
that call based on which side of the centre value the input value falls. This is the call that handles mapping of
values on regular joysticks where there's a centre point to which the physical control returns when no input is
being made.
:param low:
The raw value corresponding to the strongest negative input (stick far left / down).
:param high:
The raw value corresponding to the strongest positive input (stick far right / up).
:param centre:
The raw value corresponding to the resting position of the axis when no user interaction is happening.
:param dead_zone:
The proportion of each (positive and negative) part of the motion away from the centre which should result in
an output of 0.0
:param hot_zone:
The proportion of each (positive and negative) part of the motion away from each extreme end of the range which
should result in 1.0 or -1.0 being returned (depending on whether we're on the high or low side of the centre
point)
:param value:
The raw value to map
:return:
The filtered and clamped value, from -1.0 at low to 1.0 at high, with a centre as specified mapping to 0.0
"""
if value <= centre:
return map_single_axis(centre, low, dead_zone, hot_zone, value)
else:
return map_single_axis(centre, high, dead_zone, hot_zone, value) | Map an axis with a central dead zone and hot zones at each end to a range from -1.0 to 1.0. This in effect uses two
calls to map_single_axis, choosing whether to use centre and low, or centre and high as the low and high values in
that call based on which side of the centre value the input value falls. This is the call that handles mapping of
values on regular joysticks where there's a centre point to which the physical control returns when no input is
being made.
:param low:
The raw value corresponding to the strongest negative input (stick far left / down).
:param high:
The raw value corresponding to the strongest positive input (stick far right / up).
:param centre:
The raw value corresponding to the resting position of the axis when no user interaction is happening.
:param dead_zone:
The proportion of each (positive and negative) part of the motion away from the centre which should result in
an output of 0.0
:param hot_zone:
The proportion of each (positive and negative) part of the motion away from each extreme end of the range which
should result in 1.0 or -1.0 being returned (depending on whether we're on the high or low side of the centre
point)
:param value:
The raw value to map
:return:
The filtered and clamped value, from -1.0 at low to 1.0 at high, with a centre as specified mapping to 0.0 | entailment |
def register_button_handler(self, button_handler, button_sname: str):
"""
Register a handler function which will be called when a button is pressed
:param button_handler:
A function which will be called when any of the specified buttons are pressed. The
function is called with the Button that was pressed as the sole argument.
:param button_sname:
The sname of the button which should trigger the handler function
:return:
A no-arg function which can be used to remove this registration
"""
return self.buttons.register_button_handler(button_handler, self.buttons[button_sname]) | Register a handler function which will be called when a button is pressed
:param button_handler:
A function which will be called when any of the specified buttons are pressed. The
function is called with the Button that was pressed as the sole argument.
:param button_sname:
The sname of the button which should trigger the handler function
:return:
A no-arg function which can be used to remove this registration | entailment |
def axis_updated(self, event: InputEvent, prefix=None):
"""
Called to process an absolute axis event from evdev, this is called internally by the controller implementations
:internal:
:param event:
The evdev event to process
:param prefix:
If present, a named prefix that should be applied to the event code when searching for the axis
"""
if prefix is not None:
axis = self.axes_by_code.get(prefix + str(event.code))
else:
axis = self.axes_by_code.get(event.code)
if axis is not None:
axis.receive_device_value(event.value)
else:
logger.debug('Unknown axis code {} ({}), value {}'.format(event.code, prefix, event.value)) | Called to process an absolute axis event from evdev, this is called internally by the controller implementations
:internal:
:param event:
The evdev event to process
:param prefix:
If present, a named prefix that should be applied to the event code when searching for the axis | entailment |
def set_axis_centres(self, *args):
"""
Sets the centre points for each axis to the current value for that axis. This centre value is used when
computing the value for the axis and is subtracted before applying any scaling. This will only be applied
to CentredAxis instances
"""
for axis in self.axes_by_code.values():
if isinstance(axis, CentredAxis):
axis.centre = axis.value | Sets the centre points for each axis to the current value for that axis. This centre value is used when
computing the value for the axis and is subtracted before applying any scaling. This will only be applied
to CentredAxis instances | entailment |
def names(self) -> [str]:
"""
The snames of all axis objects
"""
return sorted([name for name in self.axes_by_sname.keys() if name is not '']) | The snames of all axis objects | entailment |
def _input_to_raw_value(self, value: int) -> float:
"""
Convert the value read from evdev to a 0.0 to 1.0 range.
:internal:
:param value:
a value ranging from the defined minimum to the defined maximum value.
:return:
0.0 at minimum, 1.0 at maximum, linearly interpolating between those two points.
"""
return (float(value) - self.min_raw_value) / self.max_raw_value | Convert the value read from evdev to a 0.0 to 1.0 range.
:internal:
:param value:
a value ranging from the defined minimum to the defined maximum value.
:return:
0.0 at minimum, 1.0 at maximum, linearly interpolating between those two points. | entailment |
def value(self) -> float:
"""
Get a centre-compensated, scaled, value for the axis, taking any dead-zone into account. The value will
scale from 0.0 at the edge of the dead-zone to 1.0 (positive) at the extreme position of
the trigger or the edge of the hot zone, if defined as other than 1.0.
:return:
a float value, 0.0 when not pressed or within the dead zone, to 1.0 when fully pressed or in the hot zone
"""
return map_single_axis(self.min, self.max, self.dead_zone, self.hot_zone, self.__value) | Get a centre-compensated, scaled, value for the axis, taking any dead-zone into account. The value will
scale from 0.0 at the edge of the dead-zone to 1.0 (positive) at the extreme position of
the trigger or the edge of the hot zone, if defined as other than 1.0.
:return:
a float value, 0.0 when not pressed or within the dead zone, to 1.0 when fully pressed or in the hot zone | entailment |
def receive_device_value(self, raw_value: int):
"""
Set a new value, called from within the joystick implementation class when parsing the event queue.
:param raw_value: the raw value from the joystick hardware
:internal:
"""
new_value = self._input_to_raw_value(raw_value)
if self.button is not None:
if new_value > (self.button_trigger_value + 0.05) > self.__value:
self.buttons.button_pressed(self.button.key_code)
elif new_value < (self.button_trigger_value - 0.05) < self.__value:
self.buttons.button_released(self.button.key_code)
self.__value = new_value
if new_value > self.max:
self.max = new_value
elif new_value < self.min:
self.min = new_value | Set a new value, called from within the joystick implementation class when parsing the event queue.
:param raw_value: the raw value from the joystick hardware
:internal: | entailment |
def value(self) -> float:
"""
Get a centre-compensated, scaled, value for the axis, taking any dead-zone into account. The value will
scale from 0.0 at the edge of the dead-zone to 1.0 (positive) or -1.0 (negative) at the extreme position of
the controller or the edge of the hot zone, if defined as other than 1.0. The axis will auto-calibrate for
maximum value, initially it will behave as if the highest possible value from the hardware is 0.9 in each
direction, and will expand this as higher values are observed. This is scaled by this function and should
always return 1.0 or -1.0 at the extreme ends of the axis.
:return: a float value, negative to the left or down and ranging from -1.0 to 1.0
"""
mapped_value = map_dual_axis(self.min, self.max, self.centre, self.dead_zone, self.hot_zone, self.__value)
if self.invert:
return -mapped_value
else:
return mapped_value | Get a centre-compensated, scaled, value for the axis, taking any dead-zone into account. The value will
scale from 0.0 at the edge of the dead-zone to 1.0 (positive) or -1.0 (negative) at the extreme position of
the controller or the edge of the hot zone, if defined as other than 1.0. The axis will auto-calibrate for
maximum value, initially it will behave as if the highest possible value from the hardware is 0.9 in each
direction, and will expand this as higher values are observed. This is scaled by this function and should
always return 1.0 or -1.0 at the extreme ends of the axis.
:return: a float value, negative to the left or down and ranging from -1.0 to 1.0 | entailment |
def receive_device_value(self, raw_value: int):
"""
Set a new value, called from within the joystick implementation class when parsing the event queue.
:param raw_value: the raw value from the joystick hardware
:internal:
"""
new_value = self._input_to_raw_value(raw_value)
self.__value = new_value
if new_value > self.max:
self.max = new_value
elif new_value < self.min:
self.min = new_value | Set a new value, called from within the joystick implementation class when parsing the event queue.
:param raw_value: the raw value from the joystick hardware
:internal: | entailment |
def button_pressed(self, key_code, prefix=None):
"""
Called from the controller classes to update the state of this button manager when a button is pressed.
:internal:
:param key_code:
The code specified when populating Button instances
:param prefix:
Applied to key code if present
"""
if prefix is not None:
state = self.buttons_by_code.get(prefix + str(key_code))
else:
state = self.buttons_by_code.get(key_code)
if state is not None:
for handler in state.button_handlers:
handler(state.button)
state.is_pressed = True
state.last_pressed = time()
state.was_pressed_since_last_check = True
else:
logger.debug('Unknown button code {} ({})'.format(key_code, prefix)) | Called from the controller classes to update the state of this button manager when a button is pressed.
:internal:
:param key_code:
The code specified when populating Button instances
:param prefix:
Applied to key code if present | entailment |
def button_released(self, key_code, prefix=None):
"""
Called from the controller classes to update the state of this button manager when a button is released.
:internal:
:param key_code:
The code specified when populating Button instance
:param prefix:
Applied to key code if present
"""
if prefix is not None:
state = self.buttons_by_code.get(prefix + str(key_code))
else:
state = self.buttons_by_code.get(key_code)
if state is not None:
state.is_pressed = False
state.last_pressed = None | Called from the controller classes to update the state of this button manager when a button is released.
:internal:
:param key_code:
The code specified when populating Button instance
:param prefix:
Applied to key code if present | entailment |
def check_presses(self):
"""
Return the set of Buttons which have been pressed since this call was last made, clearing it as we do.
:return:
A ButtonPresses instance which contains buttons which were pressed since this call was last made.
"""
pressed = []
for button, state in self.buttons.items():
if state.was_pressed_since_last_check:
pressed.append(button)
state.was_pressed_since_last_check = False
self.__presses = ButtonPresses(pressed)
return self.__presses | Return the set of Buttons which have been pressed since this call was last made, clearing it as we do.
:return:
A ButtonPresses instance which contains buttons which were pressed since this call was last made. | entailment |
def held(self, sname):
"""
Determines whether a button is currently held, identifying it by standard name
:param sname:
The standard name of the button
:return:
None if the button is not held down, or is not available, otherwise the number of seconds as a floating
point value since it was pressed
"""
state = self.buttons_by_sname.get(sname)
if state is not None:
if state.is_pressed and state.last_pressed is not None:
return time() - state.last_pressed
return None | Determines whether a button is currently held, identifying it by standard name
:param sname:
The standard name of the button
:return:
None if the button is not held down, or is not available, otherwise the number of seconds as a floating
point value since it was pressed | entailment |
def register_button_handler(self, button_handler, buttons):
"""
Register a handler function which will be called when a button is pressed
:param button_handler:
A function which will be called when any of the specified buttons are pressed. The
function is called with the Button that was pressed as the sole argument.
:param [Button] buttons:
A list or one or more buttons which should trigger the handler when pressed. Buttons
are specified as :class:`approxeng.input.Button` instances, in general controller implementations will
expose these as constants such as SixAxis.BUTTON_CIRCLE. A single Button can be specified if only one button
binding is required.
:return:
A no-arg function which can be used to remove this registration
"""
if not isinstance(buttons, list):
buttons = [buttons]
for button in buttons:
state = self.buttons.get(button)
if state is not None:
state.button_handlers.append(button_handler)
def remove():
for button_to_remove in buttons:
state_to_remove = self.buttons.get(button_to_remove)
if state_to_remove is not None:
state_to_remove.button_handlers.remove(button_handler)
return remove | Register a handler function which will be called when a button is pressed
:param button_handler:
A function which will be called when any of the specified buttons are pressed. The
function is called with the Button that was pressed as the sole argument.
:param [Button] buttons:
A list or one or more buttons which should trigger the handler when pressed. Buttons
are specified as :class:`approxeng.input.Button` instances, in general controller implementations will
expose these as constants such as SixAxis.BUTTON_CIRCLE. A single Button can be specified if only one button
binding is required.
:return:
A no-arg function which can be used to remove this registration | entailment |
def _func(self) -> typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]]:
"""Get wrapped function.
:rtype: typing.Optional[typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]]
"""
return self.__func | Get wrapped function.
:rtype: typing.Optional[typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]] | entailment |
def _get_function_wrapper(
self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]
) -> typing.Callable[..., typing.Any]:
"""Here should be constructed and returned real decorator.
:param func: Wrapped function
:type func: typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]
:rtype: typing.Callable
"""
raise NotImplementedError() | Here should be constructed and returned real decorator.
:param func: Wrapped function
:type func: typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]
:rtype: typing.Callable | entailment |
def _await_if_required(
target: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]
) -> typing.Callable[..., typing.Any]:
"""Await result if coroutine was returned."""
@functools.wraps(target)
def wrapper(*args, **kwargs): # type: (typing.Any, typing.Any) -> typing.Any
"""Decorator/wrapper."""
result = target(*args, **kwargs)
if asyncio.iscoroutine(result):
loop = asyncio.new_event_loop()
result = loop.run_until_complete(result)
loop.close()
return result
return wrapper | Await result if coroutine was returned. | entailment |
def unique_name(device: InputDevice) -> str:
"""
Construct a unique name for the device based on, in order if available, the uniq ID, the phys ID and
finally a concatenation of vendor, product, version and filename.
:param device:
An InputDevice instance to query
:return:
A string containing as unique as possible a name for the physical entity represented by the device
"""
if device.uniq:
return device.uniq
elif device.phys:
return device.phys.split('/')[0]
return '{}-{}-{}-{}'.format(device.info.vendor, device.info.product, device.info.version, device.path) | Construct a unique name for the device based on, in order if available, the uniq ID, the phys ID and
finally a concatenation of vendor, product, version and filename.
:param device:
An InputDevice instance to query
:return:
A string containing as unique as possible a name for the physical entity represented by the device | entailment |
def find_matching_controllers(*requirements, **kwargs) -> [ControllerDiscovery]:
"""
Find a sequence of controllers which match the supplied requirements, or raise an error if no such controllers
exist.
:param requirements:
Zero or more ControllerRequirement instances defining the requirements for controllers. If no item is passed it
will be treated as a single requirement with no filters applied and will therefore match the first controller
found.
:return:
A sequence of the same length as the supplied requirements array containing ControllerDiscovery instances which
match the requirements supplied.
:raises:
ControllerNotFoundError if no appropriately matching controllers can be located
"""
requirements = list(requirements)
if requirements is None or len(requirements) == 0:
requirements = [ControllerRequirement()]
def pop_controller(r: ControllerRequirement, discoveries: [ControllerDiscovery]) -> ControllerDiscovery:
"""
Find a single controller matching the supplied requirement from a list of ControllerDiscovery instances
:param r:
The ControllerRequirement to match
:param discoveries:
The [ControllerDiscovery] to search
:return:
A matching ControllerDiscovery. Modifies the supplied list of discoveries, removing the found item.
:raises:
ControllerNotFoundError if no matching controller can be found
"""
for index, d in enumerate(discoveries):
if r.accept(d):
return discoveries.pop(index)
raise ControllerNotFoundError()
all_controllers = find_all_controllers(**kwargs)
try:
return list(pop_controller(r, all_controllers) for r in requirements)
except ControllerNotFoundError as exception:
logger.info('Unable to satisfy controller requirements' +
', required {}, found {}'.format(requirements, find_all_controllers(**kwargs)))
raise exception | Find a sequence of controllers which match the supplied requirements, or raise an error if no such controllers
exist.
:param requirements:
Zero or more ControllerRequirement instances defining the requirements for controllers. If no item is passed it
will be treated as a single requirement with no filters applied and will therefore match the first controller
found.
:return:
A sequence of the same length as the supplied requirements array containing ControllerDiscovery instances which
match the requirements supplied.
:raises:
ControllerNotFoundError if no appropriately matching controllers can be located | entailment |
def find_all_controllers(**kwargs) -> [ControllerDiscovery]:
"""
:return:
A list of :class:`~approxeng.input.controllers.ControllerDiscovery` instances corresponding to controllers
attached to this host, ordered by the ordering on ControllerDiscovery. Any controllers found will be
constructed with kwargs passed to their constructor function, particularly useful for dead and hot zone
parameters.
"""
def get_controller_classes() -> [{}]:
"""
Scans for subclasses of :class:`~approxeng.input.Controller` and reads out data from their
:meth:`~approxeng.input.Controller.registrations_ids` method. This should return a list of
tuples of `(vendor_id, product_id)` which are then used along with the subclass itself to
populate a registry of known subclasses.
:return:
A generator that produces known subclasses and their registration information
"""
for controller_class in Controller.__subclasses__():
for vendor_id, product_id in controller_class.registration_ids():
yield {'constructor': controller_class,
'vendor_id': vendor_id,
'product_id': product_id}
id_to_constructor = {'{}-{}'.format(c['vendor_id'], c['product_id']): c['constructor'] for c in
get_controller_classes()}
def controller_constructor(d: InputDevice):
id = '{}-{}'.format(d.info.vendor, d.info.product)
if id in id_to_constructor:
return id_to_constructor[id]
return None
all_devices = list(InputDevice(path) for path in list_devices())
devices_by_name = {name: list(e for e in all_devices if unique_name(e) == name) for name in
set(unique_name(e) for e in all_devices if
controller_constructor(e) is not None)}
controllers = sorted(
ControllerDiscovery(controller=controller_constructor(devices[0])(**kwargs), devices=devices, name=name) for
name, devices in devices_by_name.items())
return controllers | :return:
A list of :class:`~approxeng.input.controllers.ControllerDiscovery` instances corresponding to controllers
attached to this host, ordered by the ordering on ControllerDiscovery. Any controllers found will be
constructed with kwargs passed to their constructor function, particularly useful for dead and hot zone
parameters. | entailment |
def print_devices():
"""
Simple test function which prints out all devices found by evdev
"""
def device_verbose_info(device: InputDevice) -> {}:
"""
Gather and format as much info as possible about the supplied InputDevice. Used mostly for debugging at this point.
:param device:
An InputDevice to examine
:return:
A dict containing as much information as possible about the input device.
"""
def axis_name(axis_code):
try:
return ecodes.ABS[axis_code]
except KeyError:
return 'EXTENDED_CODE_{}'.format(axis_code)
def rel_axis_name(axis_code):
try:
return ecodes.REL[axis_code]
except KeyError:
return 'EXTENDED_CODE_{}'.format(axis_code)
axes = None
if has_abs_axes(device):
axes = {
axis_name(axis_code): {'code': axis_code, 'min': axis_info.min, 'max': axis_info.max,
'fuzz': axis_info.fuzz,
'flat': axis_info.flat, 'res': axis_info.resolution} for
axis_code, axis_info in device.capabilities().get(3)}
rel_axes = None
if has_rel_axes(device):
print(device.capabilities().get(2))
rel_axes = {
rel_axis_name(axis_code): {'code': axis_code} for
axis_code in device.capabilities().get(2)}
buttons = None
if has_buttons(device):
buttons = {code: names for (names, code) in
dict(util.resolve_ecodes_dict({1: device.capabilities().get(1)})).get(('EV_KEY', 1))}
return {'fn': device.fn, 'path': device.path, 'name': device.name, 'phys': device.phys, 'uniq': device.uniq,
'vendor': device.info.vendor, 'product': device.info.product, 'version': device.info.version,
'bus': device.info.bustype, 'axes': axes, 'rel_axes': rel_axes, 'buttons': buttons,
'unique_name': unique_name(device)}
def has_abs_axes(device):
return device.capabilities().get(3) is not None
def has_rel_axes(device):
return device.capabilities().get(2) is not None
def has_buttons(device):
return device.capabilities().get(1) is not None
_check_import()
for d in [InputDevice(fn) for fn in list_devices()]:
if has_abs_axes(d) or has_rel_axes(d):
pp = pprint.PrettyPrinter(indent=2, width=100)
pp.pprint(device_verbose_info(d)) | Simple test function which prints out all devices found by evdev | entailment |
def print_controllers():
"""
Pretty-print all controllers found
"""
_check_import()
pp = pprint.PrettyPrinter(indent=2)
for discovery in find_all_controllers():
pp.pprint(discovery.controller) | Pretty-print all controllers found | entailment |
def accept(self, discovery: ControllerDiscovery):
"""
Returns True if the supplied ControllerDiscovery matches this requirement, False otherwise
"""
if self.require_class is not None and not isinstance(discovery.controller, self.require_class):
return False
if self.snames is not None:
all_controls = discovery.controller.buttons.names + discovery.controller.axes.names
for sname in self.snames:
if sname not in all_controls:
return False
return True | Returns True if the supplied ControllerDiscovery matches this requirement, False otherwise | entailment |
def config(param_map, mastercode=DEFAULT_MASTERCODE):
"""Takes a dictionary of {Config.key: value} and
returns a dictionary of processed keys and values to be used in the
construction of a POST request to FlashAir's config.cgi"""
pmap = {Config.mastercode: mastercode}
pmap.update(param_map)
processed_params = dict(_process_params(pmap))
return processed_params | Takes a dictionary of {Config.key: value} and
returns a dictionary of processed keys and values to be used in the
construction of a POST request to FlashAir's config.cgi | entailment |
def post(param_map, url=URL):
"""Posts a `param_map` created with `config` to
the FlashAir config.cgi entrypoint"""
prepped_request = _prep_post(url=url, **param_map)
return cgi.send(prepped_request) | Posts a `param_map` created with `config` to
the FlashAir config.cgi entrypoint | entailment |
def _validate_timeout(seconds: float):
"""Creates an int from 60000 to 4294967294 that represents a
valid millisecond wireless LAN timeout"""
val = int(seconds * 1000)
assert 60000 <= val <= 4294967294, "Bad value: {}".format(val)
return val | Creates an int from 60000 to 4294967294 that represents a
valid millisecond wireless LAN timeout | entailment |
def parse_datetime(datetime_input):
"""The arrow library is sadly not good enough to parse
certain date strings. It even gives unexpected results for partial
date strings such as '2015-01' or just '2015', which I think
should be seen as 'the first moment of 2014'.
This function should overcome those limitations."""
date_els, time_els = _split_datetime(datetime_input)
date_vals = _parse_date(date_els)
time_vals = _parse_time(time_els)
vals = tuple(date_vals) + tuple(time_vals)
return arrow.get(*vals) | The arrow library is sadly not good enough to parse
certain date strings. It even gives unexpected results for partial
date strings such as '2015-01' or just '2015', which I think
should be seen as 'the first moment of 2014'.
This function should overcome those limitations. | entailment |
def set_led(self, led_number, led_value):
"""
Set front-panel controller LEDs. The DS3 controller has four, labelled, LEDs on the front panel that can
be either on or off.
:param led_number:
Integer between 1 and 4
:param led_value:
Value, set to 0 to turn the LED off, 1 to turn it on
"""
if 1 > led_number > 4:
return
write_led_value(hw_id=self.device_unique_name, led_name='sony{}'.format(led_number), value=led_value) | Set front-panel controller LEDs. The DS3 controller has four, labelled, LEDs on the front panel that can
be either on or off.
:param led_number:
Integer between 1 and 4
:param led_value:
Value, set to 0 to turn the LED off, 1 to turn it on | entailment |
def mixer(yaw, throttle, max_power=100):
"""
Mix a pair of joystick axes, returning a pair of wheel speeds. This is where the mapping from
joystick positions to wheel powers is defined, so any changes to how the robot drives should
be made here, everything else is really just plumbing.
:param yaw:
Yaw axis value, ranges from -1.0 to 1.0
:param throttle:
Throttle axis value, ranges from -1.0 to 1.0
:param max_power:
Maximum speed that should be returned from the mixer, defaults to 100
:return:
A pair of power_left, power_right integer values to send to the motor driver
"""
left = throttle + yaw
right = throttle - yaw
scale = float(max_power) / max(1, abs(left), abs(right))
return int(left * scale), int(right * scale) | Mix a pair of joystick axes, returning a pair of wheel speeds. This is where the mapping from
joystick positions to wheel powers is defined, so any changes to how the robot drives should
be made here, everything else is really just plumbing.
:param yaw:
Yaw axis value, ranges from -1.0 to 1.0
:param throttle:
Throttle axis value, ranges from -1.0 to 1.0
:param max_power:
Maximum speed that should be returned from the mixer, defaults to 100
:return:
A pair of power_left, power_right integer values to send to the motor driver | entailment |
def admin_command(sudo, command):
"""
If sudo is needed, make sure the command is prepended
correctly, otherwise return the command as it came.
:param sudo: A boolean representing the intention of having a sudo command
(or not)
:param command: A list of the actual command to execute with Popen.
"""
if sudo:
if not isinstance(command, list):
command = [command]
return ['sudo'] + [cmd for cmd in command]
return command | If sudo is needed, make sure the command is prepended
correctly, otherwise return the command as it came.
:param sudo: A boolean representing the intention of having a sudo command
(or not)
:param command: A list of the actual command to execute with Popen. | entailment |
def scan_system():
"""
Scans /sys/class/leds looking for entries, then examining their .../device/uevent file to obtain unique hardware
IDs corresponding to the associated hardware. This then allows us to associate InputDevice based controllers with
sets of zero or more LEDs. The result is a dict from hardware address to a dict of name to filename, where the name
is the name of the LED and the filename is the brightness control to which writing a value changes the LED state.
At the same time, scans /sys/class/power_supply looking for battery entries and analysing them in the same way.
Hardware IDs are, in order of preference, the HID_UNIQ address (corresponding to the physical MAC of an attached
bluetooth or similar HID device), or the PHYS corresponding to other devices. This is the same logic as used in the
evdev based scanning to group together input nodes for composite controllers (i.e. ones with motion sensors as well
as physical axes). It is intended to produce the same results, so the LEDs for e.g. a PS4 controller will be keyed
on the same physical address as that returned by :func:`approxeng.input.controllers.unique_name` for all the
InputDevice instances associated with a given controller.
:return:
A dict containing available LEDs, keyed on physical device ID
"""
def find_device_hardware_id(uevent_file_path):
hid_uniq = None
phys = None
for line in open(uevent_file_path, 'r').read().split('\n'):
parts = line.split('=')
if len(parts) == 2:
name, value = parts
value = value.replace('"', '')
if name == 'HID_UNIQ' and value:
hid_uniq = value
elif name == 'PHYS' and value:
phys = value.split('/')[0]
if hid_uniq:
return hid_uniq
elif phys:
return phys
return None
leds = {}
for sub in ['/sys/class/leds/' + sub_dir for sub_dir in listdir('/sys/class/leds')]:
led_name = sub.split(':')[-1]
write_path = sub + '/brightness'
device_id = find_device_hardware_id(sub + '/device/uevent')
if device_id:
if device_id not in leds:
leds[device_id] = {}
leds[device_id][led_name] = write_path
power = {}
for sub in ['/sys/class/power_supply/' + sub_dir for sub_dir in listdir('/sys/class/power_supply')]:
read_path = sub + '/capacity'
device_id = find_device_hardware_id(sub + '/device/uevent')
if device_id:
power[device_id] = read_path
return {'leds': leds,
'power': power} | Scans /sys/class/leds looking for entries, then examining their .../device/uevent file to obtain unique hardware
IDs corresponding to the associated hardware. This then allows us to associate InputDevice based controllers with
sets of zero or more LEDs. The result is a dict from hardware address to a dict of name to filename, where the name
is the name of the LED and the filename is the brightness control to which writing a value changes the LED state.
At the same time, scans /sys/class/power_supply looking for battery entries and analysing them in the same way.
Hardware IDs are, in order of preference, the HID_UNIQ address (corresponding to the physical MAC of an attached
bluetooth or similar HID device), or the PHYS corresponding to other devices. This is the same logic as used in the
evdev based scanning to group together input nodes for composite controllers (i.e. ones with motion sensors as well
as physical axes). It is intended to produce the same results, so the LEDs for e.g. a PS4 controller will be keyed
on the same physical address as that returned by :func:`approxeng.input.controllers.unique_name` for all the
InputDevice instances associated with a given controller.
:return:
A dict containing available LEDs, keyed on physical device ID | entailment |
def bind_controllers(*discoveries, print_events=False):
"""
Bind a controller or controllers to a set of evdev InputDevice instances, starting a thread to keep those
controllers in sync with the state of the hardware.
:param discoveries:
ControllerDiscovery instances specifying the controllers and their associated input devices
:param print_events:
Defaults to False, if set to True then all events picked up by this binder will be printed to stdout
:return:
A function which can be used to stop the event reading thread and unbind from the device
"""
discoveries = list(discoveries)
class SelectThread(Thread):
def __init__(self):
Thread.__init__(self, name='evdev select thread')
self.daemon = True
self.running = True
self.device_to_controller_discovery = {}
for discovery in discoveries:
for d in discovery.devices:
self.device_to_controller_discovery[d.fn] = discovery
self.all_devices = reduce(lambda x, y: x + y, [discovery.devices for discovery in discoveries])
def run(self):
for discovery in discoveries:
discovery.controller.device_unique_name = discovery.name
while self.running:
try:
r, w, x = select(self.all_devices, [], [], 0.5)
for fd in r:
active_device = fd
controller_discovery = self.device_to_controller_discovery[active_device.fn]
controller = controller_discovery.controller
controller_devices = controller_discovery.devices
prefix = None
if controller.node_mappings is not None and len(controller_devices) > 1:
try:
prefix = controller.node_mappings[active_device.name]
except KeyError:
pass
for event in active_device.read():
if print_events:
print(event)
if event.type == EV_ABS or event.type == EV_REL:
controller.axes.axis_updated(event, prefix=prefix)
elif event.type == EV_KEY:
# Button event
if event.value == 1:
# Button down
controller.buttons.button_pressed(event.code, prefix=prefix)
elif event.value == 0:
# Button up
controller.buttons.button_released(event.code, prefix=prefix)
except Exception as e:
self.stop(e)
def stop(self, exception=None):
for discovery in discoveries:
discovery.controller.device_unique_name = None
discovery.controller.exception = exception
self.running = False
polling_thread = SelectThread()
# Force an update of the LED and battery system cache
sys.scan_cache(force_update=True)
for device in polling_thread.all_devices:
device.grab()
def unbind():
polling_thread.stop()
for dev in polling_thread.all_devices:
try:
dev.ungrab()
except IOError:
pass
polling_thread.start()
return unbind | Bind a controller or controllers to a set of evdev InputDevice instances, starting a thread to keep those
controllers in sync with the state of the hardware.
:param discoveries:
ControllerDiscovery instances specifying the controllers and their associated input devices
:param print_events:
Defaults to False, if set to True then all events picked up by this binder will be printed to stdout
:return:
A function which can be used to stop the event reading thread and unbind from the device | entailment |
def _encode_time(mtime: float):
"""Encode a mtime float as a 32-bit FAT time"""
dt = arrow.get(mtime)
dt = dt.to("local")
date_val = ((dt.year - 1980) << 9) | (dt.month << 5) | dt.day
secs = dt.second + dt.microsecond / 10**6
time_val = (dt.hour << 11) | (dt.minute << 5) | math.floor(secs / 2)
return (date_val << 16) | time_val | Encode a mtime float as a 32-bit FAT time | entailment |
def threadpooled(
func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]],
*,
loop_getter: None = None,
loop_getter_need_context: bool = False,
) -> typing.Callable[..., "concurrent.futures.Future[typing.Any]"]:
"""Overload: function callable, no loop getter.""" | Overload: function callable, no loop getter. | entailment |
def threadpooled(
func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]],
*,
loop_getter: typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop],
loop_getter_need_context: bool = False,
) -> typing.Callable[..., "asyncio.Task[typing.Any]"]:
"""Overload: function callable, loop getter available.""" | Overload: function callable, loop getter available. | entailment |
def threadpooled(
func: None = None,
*,
loop_getter: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] = None,
loop_getter_need_context: bool = False,
) -> ThreadPooled:
"""Overload: No function.""" | Overload: No function. | entailment |
def threadpooled( # noqa: F811
func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = None,
*,
loop_getter: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] = None,
loop_getter_need_context: bool = False,
) -> typing.Union[
ThreadPooled,
typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"],
]:
"""Post function to ThreadPoolExecutor.
:param func: function to wrap
:type func: typing.Optional[typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]]
:param loop_getter: Method to get event loop, if wrap in asyncio task
:type loop_getter: typing.Union[
None,
typing.Callable[..., asyncio.AbstractEventLoop],
asyncio.AbstractEventLoop
]
:param loop_getter_need_context: Loop getter requires function context
:type loop_getter_need_context: bool
:return: ThreadPooled instance, if called as function or argumented decorator, else callable wrapper
:rtype: typing.Union[ThreadPooled, typing.Callable[..., typing.Union[concurrent.futures.Future, typing.Awaitable]]]
"""
if func is None:
return ThreadPooled(func=func, loop_getter=loop_getter, loop_getter_need_context=loop_getter_need_context)
return ThreadPooled( # type: ignore
func=None, loop_getter=loop_getter, loop_getter_need_context=loop_getter_need_context
)(func) | Post function to ThreadPoolExecutor.
:param func: function to wrap
:type func: typing.Optional[typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]]
:param loop_getter: Method to get event loop, if wrap in asyncio task
:type loop_getter: typing.Union[
None,
typing.Callable[..., asyncio.AbstractEventLoop],
asyncio.AbstractEventLoop
]
:param loop_getter_need_context: Loop getter requires function context
:type loop_getter_need_context: bool
:return: ThreadPooled instance, if called as function or argumented decorator, else callable wrapper
:rtype: typing.Union[ThreadPooled, typing.Callable[..., typing.Union[concurrent.futures.Future, typing.Awaitable]]] | entailment |
def configure(cls: typing.Type["ThreadPooled"], max_workers: typing.Optional[int] = None) -> None:
"""Pool executor create and configure.
:param max_workers: Maximum workers
:type max_workers: typing.Optional[int]
"""
if isinstance(cls.__executor, ThreadPoolExecutor):
if cls.__executor.max_workers == max_workers:
return
cls.__executor.shutdown()
cls.__executor = ThreadPoolExecutor(max_workers=max_workers) | Pool executor create and configure.
:param max_workers: Maximum workers
:type max_workers: typing.Optional[int] | entailment |
def executor(self) -> "ThreadPoolExecutor":
"""Executor instance.
:rtype: ThreadPoolExecutor
"""
if not isinstance(self.__executor, ThreadPoolExecutor) or self.__executor.is_shutdown:
self.configure()
return self.__executor | Executor instance.
:rtype: ThreadPoolExecutor | entailment |
def loop_getter(
self
) -> typing.Optional[typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop]]:
"""Loop getter.
:rtype: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop]
"""
return self.__loop_getter | Loop getter.
:rtype: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] | entailment |
def _get_loop(self, *args: typing.Any, **kwargs: typing.Any) -> typing.Optional[asyncio.AbstractEventLoop]:
"""Get event loop in decorator class."""
if callable(self.loop_getter):
if self.loop_getter_need_context:
return self.loop_getter(*args, **kwargs) # pylint: disable=not-callable
return self.loop_getter() # pylint: disable=not-callable
return self.loop_getter | Get event loop in decorator class. | entailment |
def _get_function_wrapper(
self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]
) -> typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"]:
"""Here should be constructed and returned real decorator.
:param func: Wrapped function
:type func: typing.Callable
:return: wrapped coroutine or function
:rtype: typing.Callable[..., typing.Union[typing.Awaitable, concurrent.futures.Future]]
"""
prepared = self._await_if_required(func)
# noinspection PyMissingOrEmptyDocstring
@functools.wraps(prepared) # pylint: disable=missing-docstring
def wrapper(
*args: typing.Any, **kwargs: typing.Any
) -> typing.Union[
"concurrent.futures.Future[typing.Any]",
"typing.Awaitable[typing.Any]",
typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"],
]:
loop: typing.Optional[asyncio.AbstractEventLoop] = self._get_loop(*args, **kwargs)
if loop is None:
return self.executor.submit(prepared, *args, **kwargs)
return loop.run_in_executor(self.executor, functools.partial(prepared, *args, **kwargs))
return wrapper | Here should be constructed and returned real decorator.
:param func: Wrapped function
:type func: typing.Callable
:return: wrapped coroutine or function
:rtype: typing.Callable[..., typing.Union[typing.Awaitable, concurrent.futures.Future]] | entailment |
def needs_ssh(hostname, _socket=None):
"""
Obtains remote hostname of the socket and cuts off the domain part
of its FQDN.
"""
if hostname.lower() in ['localhost', '127.0.0.1', '127.0.1.1']:
return False
_socket = _socket or socket
fqdn = _socket.getfqdn()
if hostname == fqdn:
return False
local_hostname = _socket.gethostname()
local_short_hostname = local_hostname.split('.')[0]
if local_hostname == hostname or local_short_hostname == hostname:
return False
return True | Obtains remote hostname of the socket and cuts off the domain part
of its FQDN. | entailment |
def get_python_executable(conn):
"""
Try to determine the remote Python version so that it can be used
when executing. Avoids the problem of different Python versions, or distros
that do not use ``python`` but do ``python3``
"""
# executables in order of preference:
executables = ['python3', 'python', 'python2.7']
for executable in executables:
conn.logger.debug('trying to determine remote python executable with %s' % executable)
out, err, code = check(conn, ['which', executable])
if code:
conn.logger.warning('skipping %s, was not found in path' % executable)
else:
try:
return out[0].strip()
except IndexError:
conn.logger.warning('could not parse stdout: %s' % out)
# if all fails, we just return whatever the main connection had
conn.logger.info('Falling back to using interpreter: %s' % conn.interpreter)
return conn.interpreter | Try to determine the remote Python version so that it can be used
when executing. Avoids the problem of different Python versions, or distros
that do not use ``python`` but do ``python3`` | entailment |
def _detect_sudo(self, _execnet=None):
"""
``sudo`` detection has to create a different connection to the remote
host so that we can reliably ensure that ``getuser()`` will return the
right information.
After getting the user info it closes the connection and returns
a boolean
"""
exc = _execnet or execnet
gw = exc.makegateway(
self._make_connection_string(self.hostname, use_sudo=False)
)
channel = gw.remote_exec(
'import getpass; channel.send(getpass.getuser())'
)
result = channel.receive()
gw.exit()
if result == 'root':
return False
self.logger.debug('connection detected need for sudo')
return True | ``sudo`` detection has to create a different connection to the remote
host so that we can reliably ensure that ``getuser()`` will return the
right information.
After getting the user info it closes the connection and returns
a boolean | entailment |
def import_module(self, module):
"""
Allows remote execution of a local module. Depending on the
``remote_import_system`` attribute it may use execnet's implementation
or remoto's own based on JSON.
.. note:: It is not possible to use execnet's remote execution model on
connections that aren't SSH or Local.
"""
if self.remote_import_system is not None:
if self.remote_import_system == 'json':
self.remote_module = JsonModuleExecute(self, module, self.logger)
else:
self.remote_module = LegacyModuleExecute(self.gateway, module, self.logger)
else:
self.remote_module = LegacyModuleExecute(self.gateway, module, self.logger)
return self.remote_module | Allows remote execution of a local module. Depending on the
``remote_import_system`` attribute it may use execnet's implementation
or remoto's own based on JSON.
.. note:: It is not possible to use execnet's remote execution model on
connections that aren't SSH or Local. | entailment |
def get(name, fallback='ssh'):
"""
Retrieve the matching backend class from a string. If no backend can be
matched, it raises an error.
>>> get('ssh')
<class 'remoto.backends.BaseConnection'>
>>> get()
<class 'remoto.backends.BaseConnection'>
>>> get('non-existent')
<class 'remoto.backends.BaseConnection'>
>>> get('non-existent', 'openshift')
<class 'remoto.backends.openshift.OpenshiftConnection'>
"""
mapping = {
'ssh': ssh.SshConnection,
'oc': openshift.OpenshiftConnection,
'openshift': openshift.OpenshiftConnection,
'kubernetes': kubernetes.KubernetesConnection,
'k8s': kubernetes.KubernetesConnection,
'local': local.LocalConnection,
'popen': local.LocalConnection,
'localhost': local.LocalConnection,
'docker': docker.DockerConnection,
'podman': podman.PodmanConnection,
}
if not name:
# fallsback to just plain local/ssh
name = 'ssh'
name = name.strip().lower()
connection_class = mapping.get(name)
if not connection_class:
logger.warning('no connection backend found for: "%s"' % name)
if fallback:
logger.info('falling back to "%s"' % fallback)
# this assumes that ``fallback`` is a valid mapping name
return mapping.get(fallback)
return connection_class | Retrieve the matching backend class from a string. If no backend can be
matched, it raises an error.
>>> get('ssh')
<class 'remoto.backends.BaseConnection'>
>>> get()
<class 'remoto.backends.BaseConnection'>
>>> get('non-existent')
<class 'remoto.backends.BaseConnection'>
>>> get('non-existent', 'openshift')
<class 'remoto.backends.openshift.OpenshiftConnection'> | entailment |
def set_leds(self, hue: float = 0.0, saturation: float = 1.0, value: float = 1.0):
"""
The DualShock4 has an LED bar on the front of the controller. This function allows you to set the value of this
bar. Note that the controller must be connected for this to work, if it's not the call will just be ignored.
:param hue:
The hue of the colour, defaults to 0, specified as a floating point value between 0.0 and 1.0.
:param saturation:
Saturation of the colour, defaults to 1.0, specified as a floating point value between 0.0 and 1.0.
:param value:
Value of the colour (i.e. how bright the light is overall), defaults to 1.0, specified as a floating point
value between 0.0 and 1.0
"""
r, g, b = hsv_to_rgb(hue, saturation, value)
write_led_value(self.device_unique_name, 'red', r * 255.0)
write_led_value(self.device_unique_name, 'green', g * 255.0)
write_led_value(self.device_unique_name, 'blue', b * 255.0) | The DualShock4 has an LED bar on the front of the controller. This function allows you to set the value of this
bar. Note that the controller must be connected for this to work, if it's not the call will just be ignored.
:param hue:
The hue of the colour, defaults to 0, specified as a floating point value between 0.0 and 1.0.
:param saturation:
Saturation of the colour, defaults to 1.0, specified as a floating point value between 0.0 and 1.0.
:param value:
Value of the colour (i.e. how bright the light is overall), defaults to 1.0, specified as a floating point
value between 0.0 and 1.0 | entailment |
def config(env=DEFAULT_ENV, default='locmem://'):
"""Returns configured CACHES dictionary from CACHE_URL"""
config = {}
s = os.environ.get(env, default)
if s:
config = parse(s)
return config | Returns configured CACHES dictionary from CACHE_URL | entailment |
def parse(url):
"""Parses a cache URL."""
config = {}
url = urlparse.urlparse(url)
# Handle python 2.6 broken url parsing
path, query = url.path, url.query
if '?' in path and query == '':
path, query = path.split('?', 1)
cache_args = dict([(key.upper(), ';'.join(val)) for key, val in
urlparse.parse_qs(query).items()])
# Update with environment configuration.
backend = BACKENDS.get(url.scheme)
if not backend:
raise Exception('Unknown backend: "{0}"'.format(url.scheme))
config['BACKEND'] = BACKENDS[url.scheme]
redis_options = {}
if url.scheme == 'hiredis':
redis_options['PARSER_CLASS'] = 'redis.connection.HiredisParser'
# File based
if not url.netloc:
if url.scheme in ('memcached', 'pymemcached', 'djangopylibmc'):
config['LOCATION'] = 'unix:' + path
elif url.scheme in ('redis', 'hiredis'):
match = re.match(r'.+?(?P<db>\d+)', path)
if match:
db = match.group('db')
path = path[:path.rfind('/')]
else:
db = '0'
config['LOCATION'] = 'unix:%s:%s' % (path, db)
else:
config['LOCATION'] = path
# URL based
else:
# Handle multiple hosts
config['LOCATION'] = ';'.join(url.netloc.split(','))
if url.scheme in ('redis', 'hiredis'):
if url.password:
redis_options['PASSWORD'] = url.password
# Specifying the database is optional, use db 0 if not specified.
db = path[1:] or '0'
port = url.port if url.port else 6379
config['LOCATION'] = "redis://%s:%s/%s" % (url.hostname, port, db)
if redis_options:
config.setdefault('OPTIONS', {}).update(redis_options)
if url.scheme == 'uwsgicache':
config['LOCATION'] = config.get('LOCATION', 'default') or 'default'
# Pop special options from cache_args
# https://docs.djangoproject.com/en/1.10/topics/cache/#cache-arguments
options = {}
for key in ['MAX_ENTRIES', 'CULL_FREQUENCY']:
val = cache_args.pop(key, None)
if val is not None:
options[key] = int(val)
if options:
config.setdefault('OPTIONS', {}).update(options)
config.update(cache_args)
return config | Parses a cache URL. | entailment |
def memory_changed(url=URL):
"""Returns True if memory has been written to, False otherwise"""
response = _get(Operation.memory_changed, url)
try:
return int(response.text) == 1
except ValueError:
raise IOError("Likely no FlashAir connection, "
"memory changed CGI command failed") | Returns True if memory has been written to, False otherwise | entailment |
def _get(operation: Operation, url=URL, **params):
"""HTTP GET of the FlashAir command.cgi entrypoint"""
prepped_request = _prep_get(operation, url=url, **params)
return cgi.send(prepped_request) | HTTP GET of the FlashAir command.cgi entrypoint | entailment |
def do_translate(parser, token):
"""
This will mark a string for translation and will
translate the string for the current language.
Usage::
{% trans "this is a test" %}
This will mark the string for translation so it will
be pulled out by mark-messages.py into the .po files
and will run the string through the translation engine.
There is a second form::
{% trans "this is a test" noop %}
This will only mark for translation, but will return
the string unchanged. Use it when you need to store
values into forms that should be translated later on.
You can use variables instead of constant strings
to translate stuff you marked somewhere else::
{% trans variable %}
This will just try to translate the contents of
the variable ``variable``. Make sure that the string
in there is something that is in the .po file.
It is possible to store the translated string into a variable::
{% trans "this is a test" as var %}
{{ var }}
Contextual translations are also supported::
{% trans "this is a test" context "greeting" %}
This is equivalent to calling pgettext instead of (u)gettext.
"""
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("'%s' takes at least one argument" % bits[0])
message_string = parser.compile_filter(bits[1])
remaining = bits[2:]
noop = False
asvar = None
message_context = None
seen = set()
invalid_context = {'as', 'noop'}
while remaining:
option = remaining.pop(0)
if option in seen:
raise TemplateSyntaxError(
"The '%s' option was specified more than once." % option,
)
elif option == 'noop':
noop = True
elif option == 'context':
try:
value = remaining.pop(0)
except IndexError:
msg = "No argument provided to the '%s' tag for the context option." % bits[0]
six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])
if value in invalid_context:
raise TemplateSyntaxError(
"Invalid argument '%s' provided to the '%s' tag for the context option" % (value, bits[0]),
)
message_context = parser.compile_filter(value)
elif option == 'as':
try:
value = remaining.pop(0)
except IndexError:
msg = "No argument provided to the '%s' tag for the as option." % bits[0]
six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])
asvar = value
else:
raise TemplateSyntaxError(
"Unknown argument for '%s' tag: '%s'. The only options "
"available are 'noop', 'context' \"xxx\", and 'as VAR'." % (
bits[0], option,
)
)
seen.add(option)
if phrase_settings.PHRASE_ENABLED:
return PhraseTranslateNode(message_string, noop, asvar, message_context)
else:
return TranslateNode(message_string, noop, asvar, message_context) | This will mark a string for translation and will
translate the string for the current language.
Usage::
{% trans "this is a test" %}
This will mark the string for translation so it will
be pulled out by mark-messages.py into the .po files
and will run the string through the translation engine.
There is a second form::
{% trans "this is a test" noop %}
This will only mark for translation, but will return
the string unchanged. Use it when you need to store
values into forms that should be translated later on.
You can use variables instead of constant strings
to translate stuff you marked somewhere else::
{% trans variable %}
This will just try to translate the contents of
the variable ``variable``. Make sure that the string
in there is something that is in the .po file.
It is possible to store the translated string into a variable::
{% trans "this is a test" as var %}
{{ var }}
Contextual translations are also supported::
{% trans "this is a test" context "greeting" %}
This is equivalent to calling pgettext instead of (u)gettext. | entailment |
def asynciotask(
func: None = None,
*,
loop_getter: typing.Union[
typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop
] = asyncio.get_event_loop,
loop_getter_need_context: bool = False,
) -> AsyncIOTask:
"""Overload: no function.""" | Overload: no function. | entailment |
def asynciotask( # noqa: F811
func: typing.Optional[typing.Callable[..., "typing.Awaitable[typing.Any]"]] = None,
*,
loop_getter: typing.Union[
typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop
] = asyncio.get_event_loop,
loop_getter_need_context: bool = False,
) -> typing.Union[AsyncIOTask, typing.Callable[..., "asyncio.Task[typing.Any]"]]:
"""Wrap function in future and return.
:param func: Function to wrap
:type func: typing.Optional[typing.Callable[..., typing.Awaitable]]
:param loop_getter: Method to get event loop, if wrap in asyncio task
:type loop_getter: typing.Union[
typing.Callable[..., asyncio.AbstractEventLoop],
asyncio.AbstractEventLoop
]
:param loop_getter_need_context: Loop getter requires function context
:type loop_getter_need_context: bool
:return: AsyncIOTask instance, if called as function or argumented decorator, else callable wrapper
:rtype: typing.Union[AsyncIOTask, typing.Callable[..., asyncio.Task]]
"""
if func is None:
return AsyncIOTask(func=func, loop_getter=loop_getter, loop_getter_need_context=loop_getter_need_context)
return AsyncIOTask( # type: ignore
func=None, loop_getter=loop_getter, loop_getter_need_context=loop_getter_need_context
)(func) | Wrap function in future and return.
:param func: Function to wrap
:type func: typing.Optional[typing.Callable[..., typing.Awaitable]]
:param loop_getter: Method to get event loop, if wrap in asyncio task
:type loop_getter: typing.Union[
typing.Callable[..., asyncio.AbstractEventLoop],
asyncio.AbstractEventLoop
]
:param loop_getter_need_context: Loop getter requires function context
:type loop_getter_need_context: bool
:return: AsyncIOTask instance, if called as function or argumented decorator, else callable wrapper
:rtype: typing.Union[AsyncIOTask, typing.Callable[..., asyncio.Task]] | entailment |
def get_loop(self, *args, **kwargs): # type: (typing.Any, typing.Any) -> asyncio.AbstractEventLoop
"""Get event loop in decorator class."""
if callable(self.loop_getter):
if self.loop_getter_need_context:
return self.loop_getter(*args, **kwargs) # pylint: disable=not-callable
return self.loop_getter() # pylint: disable=not-callable
return self.loop_getter | Get event loop in decorator class. | entailment |
def _get_function_wrapper(
self, func: typing.Callable[..., "typing.Awaitable[typing.Any]"]
) -> typing.Callable[..., "asyncio.Task[typing.Any]"]:
"""Here should be constructed and returned real decorator.
:param func: Wrapped function
:type func: typing.Callable[..., typing.Awaitable]
:return: wrapper, which will produce asyncio.Task on call with function called inside it
:rtype: typing.Callable[..., asyncio.Task]
"""
# noinspection PyMissingOrEmptyDocstring
@functools.wraps(func) # pylint: disable=missing-docstring
def wrapper(*args, **kwargs): # type: (typing.Any, typing.Any) -> asyncio.Task[typing.Any]
loop = self.get_loop(*args, **kwargs)
return loop.create_task(func(*args, **kwargs))
return wrapper | Here should be constructed and returned real decorator.
:param func: Wrapped function
:type func: typing.Callable[..., typing.Awaitable]
:return: wrapper, which will produce asyncio.Task on call with function called inside it
:rtype: typing.Callable[..., asyncio.Task] | entailment |
def threaded(
name: typing.Callable[..., typing.Any], daemon: bool = False, started: bool = False
) -> typing.Callable[..., threading.Thread]:
"""Overload: Call decorator without arguments.""" | Overload: Call decorator without arguments. | entailment |
def threaded( # noqa: F811
name: typing.Optional[typing.Union[str, typing.Callable[..., typing.Any]]] = None,
daemon: bool = False,
started: bool = False,
) -> typing.Union[Threaded, typing.Callable[..., threading.Thread]]:
"""Run function in separate thread.
:param name: New thread name.
If callable: use as wrapped function.
If none: use wrapped function name.
:type name: typing.Union[None, str, typing.Callable]
:param daemon: Daemonize thread.
:type daemon: bool
:param started: Return started thread
:type started: bool
:return: Threaded instance, if called as function or argumented decorator, else callable wraper
:rtype: typing.Union[Threaded, typing.Callable[..., threading.Thread]]
"""
if callable(name):
func, name = (name, "Threaded: " + getattr(name, "__name__", str(hash(name))))
return Threaded(name=name, daemon=daemon, started=started)(func) # type: ignore
return Threaded(name=name, daemon=daemon, started=started) | Run function in separate thread.
:param name: New thread name.
If callable: use as wrapped function.
If none: use wrapped function name.
:type name: typing.Union[None, str, typing.Callable]
:param daemon: Daemonize thread.
:type daemon: bool
:param started: Return started thread
:type started: bool
:return: Threaded instance, if called as function or argumented decorator, else callable wraper
:rtype: typing.Union[Threaded, typing.Callable[..., threading.Thread]] | entailment |
def _get_function_wrapper(
self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]
) -> typing.Callable[..., threading.Thread]:
"""Here should be constructed and returned real decorator.
:param func: Wrapped function
:type func: typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]
:return: wrapped function
:rtype: typing.Callable[..., threading.Thread]
"""
prepared: typing.Callable[..., typing.Any] = self._await_if_required(func)
name: typing.Optional[str] = self.name
if name is None:
name = "Threaded: " + getattr(func, "__name__", str(hash(func)))
# noinspection PyMissingOrEmptyDocstring
@functools.wraps(prepared) # pylint: disable=missing-docstring
def wrapper(*args, **kwargs): # type: (typing.Any, typing.Any) -> threading.Thread
thread = threading.Thread(target=prepared, name=name, args=args, kwargs=kwargs, daemon=self.daemon)
if self.started:
thread.start()
return thread
return wrapper | Here should be constructed and returned real decorator.
:param func: Wrapped function
:type func: typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]
:return: wrapped function
:rtype: typing.Callable[..., threading.Thread] | entailment |
def issuer(self, value):
"""
An asn1crypto.x509.Certificate or oscrypto.asymmetric.Certificate object
of the issuer.
"""
is_oscrypto = isinstance(value, asymmetric.Certificate)
if not is_oscrypto and not isinstance(value, x509.Certificate):
raise TypeError(_pretty_message(
'''
issuer must be an instance of asn1crypto.x509.Certificate or
oscrypto.asymmetric.Certificate, not %s
''',
_type_name(value)
))
if is_oscrypto:
value = value.asn1
self._issuer = value | An asn1crypto.x509.Certificate or oscrypto.asymmetric.Certificate object
of the issuer. | entailment |
def key_hash_algo(self, value):
"""
A unicode string of the hash algorithm to use when creating the
certificate identifier - "sha1" (default), or "sha256".
"""
if value not in set(['sha1', 'sha256']):
raise ValueError(_pretty_message(
'''
hash_algo must be one of "sha1", "sha256", not %s
''',
repr(value)
))
self._key_hash_algo = value | A unicode string of the hash algorithm to use when creating the
certificate identifier - "sha1" (default), or "sha256". | entailment |
def nonce(self, value):
"""
A bool - if the nonce extension should be used to prevent replay
attacks.
"""
if not isinstance(value, bool):
raise TypeError(_pretty_message(
'''
nonce must be a boolean, not %s
''',
_type_name(value)
))
self._nonce = value | A bool - if the nonce extension should be used to prevent replay
attacks. | entailment |
def set_extension(self, name, value):
"""
Sets the value for an extension using a fully constructed
asn1crypto.core.Asn1Value object. Normally this should not be needed,
and the convenience attributes should be sufficient.
See the definition of asn1crypto.ocsp.TBSRequestExtension and
asn1crypto.ocsp.RequestExtension to determine the appropriate object
type for a given extension. Extensions are marked as critical when RFC
6960 indicates so.
:param name:
A unicode string of an extension id name from
asn1crypto.ocsp.TBSRequestExtensionId or
asn1crypto.ocsp.RequestExtensionId. If the extension is not one
defined in those classes, this must be an instance of one of the
classes instead of a unicode string.
:param value:
A value object per the specs defined by
asn1crypto.ocsp.TBSRequestExtension or
asn1crypto.ocsp.RequestExtension
"""
if isinstance(name, str_cls):
request_extension_oids = set([
'service_locator',
'1.3.6.1.5.5.7.48.1.7'
])
tbs_request_extension_oids = set([
'nonce',
'acceptable_responses',
'preferred_signature_algorithms',
'1.3.6.1.5.5.7.48.1.2',
'1.3.6.1.5.5.7.48.1.4',
'1.3.6.1.5.5.7.48.1.8'
])
if name in request_extension_oids:
name = ocsp.RequestExtensionId(name)
elif name in tbs_request_extension_oids:
name = ocsp.TBSRequestExtensionId(name)
else:
raise ValueError(_pretty_message(
'''
name must be a unicode string from
asn1crypto.ocsp.TBSRequestExtensionId or
asn1crypto.ocsp.RequestExtensionId, not %s
''',
repr(name)
))
if isinstance(name, ocsp.RequestExtensionId):
extension = ocsp.RequestExtension({'extn_id': name})
elif isinstance(name, ocsp.TBSRequestExtensionId):
extension = ocsp.TBSRequestExtension({'extn_id': name})
else:
raise TypeError(_pretty_message(
'''
name must be a unicode string or an instance of
asn1crypto.ocsp.TBSRequestExtensionId or
asn1crypto.ocsp.RequestExtensionId, not %s
''',
_type_name(name)
))
# We use native here to convert OIDs to meaningful names
name = extension['extn_id'].native
spec = extension.spec('extn_value')
if not isinstance(value, spec) and value is not None:
raise TypeError(_pretty_message(
'''
value must be an instance of %s, not %s
''',
_type_name(spec),
_type_name(value)
))
if isinstance(extension, ocsp.TBSRequestExtension):
extn_dict = self._tbs_request_extensions
else:
extn_dict = self._request_extensions
if value is None:
if name in extn_dict:
del extn_dict[name]
else:
extn_dict[name] = value | Sets the value for an extension using a fully constructed
asn1crypto.core.Asn1Value object. Normally this should not be needed,
and the convenience attributes should be sufficient.
See the definition of asn1crypto.ocsp.TBSRequestExtension and
asn1crypto.ocsp.RequestExtension to determine the appropriate object
type for a given extension. Extensions are marked as critical when RFC
6960 indicates so.
:param name:
A unicode string of an extension id name from
asn1crypto.ocsp.TBSRequestExtensionId or
asn1crypto.ocsp.RequestExtensionId. If the extension is not one
defined in those classes, this must be an instance of one of the
classes instead of a unicode string.
:param value:
A value object per the specs defined by
asn1crypto.ocsp.TBSRequestExtension or
asn1crypto.ocsp.RequestExtension | entailment |
def build(self, requestor_private_key=None, requestor_certificate=None, other_certificates=None):
"""
Validates the request information, constructs the ASN.1 structure and
then optionally signs it.
The requestor_private_key, requestor_certificate and other_certificates
params are all optional and only necessary if the request needs to be
signed. Signing a request is uncommon for OCSP requests related to web
TLS connections.
:param requestor_private_key:
An asn1crypto.keys.PrivateKeyInfo or oscrypto.asymmetric.PrivateKey
object for the private key to sign the request with
:param requestor_certificate:
An asn1crypto.x509.Certificate or oscrypto.asymmetric.Certificate
object of the certificate associated with the private key
:param other_certificates:
A list of asn1crypto.x509.Certificate or
oscrypto.asymmetric.Certificate objects that may be useful for the
OCSP server to verify the request signature. Intermediate
certificates would be specified here.
:return:
An asn1crypto.ocsp.OCSPRequest object of the request
"""
def _make_extension(name, value):
return {
'extn_id': name,
'critical': False,
'extn_value': value
}
tbs_request_extensions = []
request_extensions = []
has_nonce = False
for name, value in self._tbs_request_extensions.items():
if name == 'nonce':
has_nonce = True
tbs_request_extensions.append(_make_extension(name, value))
if self._nonce and not has_nonce:
tbs_request_extensions.append(
_make_extension('nonce', util.rand_bytes(16))
)
if not tbs_request_extensions:
tbs_request_extensions = None
for name, value in self._request_extensions.items():
request_extensions.append(_make_extension(name, value))
if not request_extensions:
request_extensions = None
tbs_request = ocsp.TBSRequest({
'request_list': [
{
'req_cert': {
'hash_algorithm': {
'algorithm': self._key_hash_algo
},
'issuer_name_hash': getattr(self._certificate.issuer, self._key_hash_algo),
'issuer_key_hash': getattr(self._issuer.public_key, self._key_hash_algo),
'serial_number': self._certificate.serial_number,
},
'single_request_extensions': request_extensions
}
],
'request_extensions': tbs_request_extensions
})
signature = None
if requestor_private_key or requestor_certificate or other_certificates:
is_oscrypto = isinstance(requestor_private_key, asymmetric.PrivateKey)
if not isinstance(requestor_private_key, keys.PrivateKeyInfo) and not is_oscrypto:
raise TypeError(_pretty_message(
'''
requestor_private_key must be an instance of
asn1crypto.keys.PrivateKeyInfo or
oscrypto.asymmetric.PrivateKey, not %s
''',
_type_name(requestor_private_key)
))
cert_is_oscrypto = isinstance(requestor_certificate, asymmetric.Certificate)
if not isinstance(requestor_certificate, x509.Certificate) and not cert_is_oscrypto:
raise TypeError(_pretty_message(
'''
requestor_certificate must be an instance of
asn1crypto.x509.Certificate or
oscrypto.asymmetric.Certificate, not %s
''',
_type_name(requestor_certificate)
))
if other_certificates is not None and not isinstance(other_certificates, list):
raise TypeError(_pretty_message(
'''
other_certificates must be a list of
asn1crypto.x509.Certificate or
oscrypto.asymmetric.Certificate objects, not %s
''',
_type_name(other_certificates)
))
if cert_is_oscrypto:
requestor_certificate = requestor_certificate.asn1
tbs_request['requestor_name'] = x509.GeneralName(
name='directory_name',
value=requestor_certificate.subject
)
certificates = [requestor_certificate]
for other_certificate in other_certificates:
other_cert_is_oscrypto = isinstance(other_certificate, asymmetric.Certificate)
if not isinstance(other_certificate, x509.Certificate) and not other_cert_is_oscrypto:
raise TypeError(_pretty_message(
'''
other_certificate must be an instance of
asn1crypto.x509.Certificate or
oscrypto.asymmetric.Certificate, not %s
''',
_type_name(other_certificate)
))
if other_cert_is_oscrypto:
other_certificate = other_certificate.asn1
certificates.append(other_certificate)
signature_algo = requestor_private_key.algorithm
if signature_algo == 'ec':
signature_algo = 'ecdsa'
signature_algorithm_id = '%s_%s' % (self._hash_algo, signature_algo)
if requestor_private_key.algorithm == 'rsa':
sign_func = asymmetric.rsa_pkcs1v15_sign
elif requestor_private_key.algorithm == 'dsa':
sign_func = asymmetric.dsa_sign
elif requestor_private_key.algorithm == 'ec':
sign_func = asymmetric.ecdsa_sign
if not is_oscrypto:
requestor_private_key = asymmetric.load_private_key(requestor_private_key)
signature_bytes = sign_func(requestor_private_key, tbs_request.dump(), self._hash_algo)
signature = ocsp.Signature({
'signature_algorithm': {'algorithm': signature_algorithm_id},
'signature': signature_bytes,
'certs': certificates
})
return ocsp.OCSPRequest({
'tbs_request': tbs_request,
'optional_signature': signature
}) | Validates the request information, constructs the ASN.1 structure and
then optionally signs it.
The requestor_private_key, requestor_certificate and other_certificates
params are all optional and only necessary if the request needs to be
signed. Signing a request is uncommon for OCSP requests related to web
TLS connections.
:param requestor_private_key:
An asn1crypto.keys.PrivateKeyInfo or oscrypto.asymmetric.PrivateKey
object for the private key to sign the request with
:param requestor_certificate:
An asn1crypto.x509.Certificate or oscrypto.asymmetric.Certificate
object of the certificate associated with the private key
:param other_certificates:
A list of asn1crypto.x509.Certificate or
oscrypto.asymmetric.Certificate objects that may be useful for the
OCSP server to verify the request signature. Intermediate
certificates would be specified here.
:return:
An asn1crypto.ocsp.OCSPRequest object of the request | entailment |
def response_status(self, value):
"""
The overall status of the response. Only a "successful" response will
include information about the certificate. Other response types are for
signaling info about the OCSP responder. Valid values include:
- "successful" - when the response includes information about the certificate
- "malformed_request" - when the request could not be understood
- "internal_error" - when an internal error occured with the OCSP responder
- "try_later" - when the OCSP responder is temporarily unavailable
- "sign_required" - when the OCSP request must be signed
- "unauthorized" - when the responder is not the correct responder for the certificate
"""
if not isinstance(value, str_cls):
raise TypeError(_pretty_message(
'''
response_status must be a unicode string, not %s
''',
_type_name(value)
))
valid_response_statuses = set([
'successful',
'malformed_request',
'internal_error',
'try_later',
'sign_required',
'unauthorized'
])
if value not in valid_response_statuses:
raise ValueError(_pretty_message(
'''
response_status must be one of "successful",
"malformed_request", "internal_error", "try_later",
"sign_required", "unauthorized", not %s
''',
repr(value)
))
self._response_status = value | The overall status of the response. Only a "successful" response will
include information about the certificate. Other response types are for
signaling info about the OCSP responder. Valid values include:
- "successful" - when the response includes information about the certificate
- "malformed_request" - when the request could not be understood
- "internal_error" - when an internal error occured with the OCSP responder
- "try_later" - when the OCSP responder is temporarily unavailable
- "sign_required" - when the OCSP request must be signed
- "unauthorized" - when the responder is not the correct responder for the certificate | entailment |
def certificate(self, value):
"""
An asn1crypto.x509.Certificate or oscrypto.asymmetric.Certificate object
of the certificate the response is about.
"""
if value is not None:
is_oscrypto = isinstance(value, asymmetric.Certificate)
if not is_oscrypto and not isinstance(value, x509.Certificate):
raise TypeError(_pretty_message(
'''
certificate must be an instance of asn1crypto.x509.Certificate
or oscrypto.asymmetric.Certificate, not %s
''',
_type_name(value)
))
if is_oscrypto:
value = value.asn1
self._certificate = value | An asn1crypto.x509.Certificate or oscrypto.asymmetric.Certificate object
of the certificate the response is about. | entailment |
def certificate_status(self, value):
"""
A unicode string of the status of the certificate. Valid values include:
- "good" - when the certificate is in good standing
- "revoked" - when the certificate is revoked without a reason code
- "key_compromise" - when a private key is compromised
- "ca_compromise" - when the CA issuing the certificate is compromised
- "affiliation_changed" - when the certificate subject name changed
- "superseded" - when the certificate was replaced with a new one
- "cessation_of_operation" - when the certificate is no longer needed
- "certificate_hold" - when the certificate is temporarily invalid
- "remove_from_crl" - only delta CRLs - when temporary hold is removed
- "privilege_withdrawn" - one of the usages for a certificate was removed
- "unknown" - when the responder doesn't know about the certificate being requested
"""
if value is not None:
if not isinstance(value, str_cls):
raise TypeError(_pretty_message(
'''
certificate_status must be a unicode string, not %s
''',
_type_name(value)
))
valid_certificate_statuses = set([
'good',
'revoked',
'key_compromise',
'ca_compromise',
'affiliation_changed',
'superseded',
'cessation_of_operation',
'certificate_hold',
'remove_from_crl',
'privilege_withdrawn',
'unknown',
])
if value not in valid_certificate_statuses:
raise ValueError(_pretty_message(
'''
certificate_status must be one of "good", "revoked", "key_compromise",
"ca_compromise", "affiliation_changed", "superseded",
"cessation_of_operation", "certificate_hold", "remove_from_crl",
"privilege_withdrawn", "unknown" not %s
''',
repr(value)
))
self._certificate_status = value | A unicode string of the status of the certificate. Valid values include:
- "good" - when the certificate is in good standing
- "revoked" - when the certificate is revoked without a reason code
- "key_compromise" - when a private key is compromised
- "ca_compromise" - when the CA issuing the certificate is compromised
- "affiliation_changed" - when the certificate subject name changed
- "superseded" - when the certificate was replaced with a new one
- "cessation_of_operation" - when the certificate is no longer needed
- "certificate_hold" - when the certificate is temporarily invalid
- "remove_from_crl" - only delta CRLs - when temporary hold is removed
- "privilege_withdrawn" - one of the usages for a certificate was removed
- "unknown" - when the responder doesn't know about the certificate being requested | entailment |
def revocation_date(self, value):
"""
A datetime.datetime object of when the certificate was revoked, if the
status is not "good" or "unknown".
"""
if value is not None and not isinstance(value, datetime):
raise TypeError(_pretty_message(
'''
revocation_date must be an instance of datetime.datetime, not %s
''',
_type_name(value)
))
self._revocation_date = value | A datetime.datetime object of when the certificate was revoked, if the
status is not "good" or "unknown". | entailment |
def certificate_issuer(self, value):
"""
An asn1crypto.x509.Certificate object of the issuer of the certificate.
This should only be set if the OCSP responder is not the issuer of
the certificate, but instead a special certificate only for OCSP
responses.
"""
if value is not None:
is_oscrypto = isinstance(value, asymmetric.Certificate)
if not is_oscrypto and not isinstance(value, x509.Certificate):
raise TypeError(_pretty_message(
'''
certificate_issuer must be an instance of
asn1crypto.x509.Certificate or
oscrypto.asymmetric.Certificate, not %s
''',
_type_name(value)
))
if is_oscrypto:
value = value.asn1
self._certificate_issuer = value | An asn1crypto.x509.Certificate object of the issuer of the certificate.
This should only be set if the OCSP responder is not the issuer of
the certificate, but instead a special certificate only for OCSP
responses. | entailment |
def this_update(self, value):
"""
A datetime.datetime object of when the response was generated.
"""
if not isinstance(value, datetime):
raise TypeError(_pretty_message(
'''
this_update must be an instance of datetime.datetime, not %s
''',
_type_name(value)
))
self._this_update = value | A datetime.datetime object of when the response was generated. | entailment |
def next_update(self, value):
"""
A datetime.datetime object of when the response may next change. This
should only be set if responses are cached. If responses are generated
fresh on every request, this should not be set.
"""
if not isinstance(value, datetime):
raise TypeError(_pretty_message(
'''
next_update must be an instance of datetime.datetime, not %s
''',
_type_name(value)
))
self._next_update = value | A datetime.datetime object of when the response may next change. This
should only be set if responses are cached. If responses are generated
fresh on every request, this should not be set. | entailment |
def set_extension(self, name, value):
"""
Sets the value for an extension using a fully constructed
asn1crypto.core.Asn1Value object. Normally this should not be needed,
and the convenience attributes should be sufficient.
See the definition of asn1crypto.ocsp.SingleResponseExtension and
asn1crypto.ocsp.ResponseDataExtension to determine the appropriate
object type for a given extension. Extensions are marked as critical
when RFC 6960 indicates so.
:param name:
A unicode string of an extension id name from
asn1crypto.ocsp.SingleResponseExtensionId or
asn1crypto.ocsp.ResponseDataExtensionId. If the extension is not one
defined in those classes, this must be an instance of one of the
classes instead of a unicode string.
:param value:
A value object per the specs defined by
asn1crypto.ocsp.SingleResponseExtension or
asn1crypto.ocsp.ResponseDataExtension
"""
if isinstance(name, str_cls):
response_data_extension_oids = set([
'nonce',
'extended_revoke',
'1.3.6.1.5.5.7.48.1.2',
'1.3.6.1.5.5.7.48.1.9'
])
single_response_extension_oids = set([
'crl',
'archive_cutoff',
'crl_reason',
'invalidity_date',
'certificate_issuer',
'1.3.6.1.5.5.7.48.1.3',
'1.3.6.1.5.5.7.48.1.6',
'2.5.29.21',
'2.5.29.24',
'2.5.29.29'
])
if name in response_data_extension_oids:
name = ocsp.ResponseDataExtensionId(name)
elif name in single_response_extension_oids:
name = ocsp.SingleResponseExtensionId(name)
else:
raise ValueError(_pretty_message(
'''
name must be a unicode string from
asn1crypto.ocsp.ResponseDataExtensionId or
asn1crypto.ocsp.SingleResponseExtensionId, not %s
''',
repr(name)
))
if isinstance(name, ocsp.ResponseDataExtensionId):
extension = ocsp.ResponseDataExtension({'extn_id': name})
elif isinstance(name, ocsp.SingleResponseExtensionId):
extension = ocsp.SingleResponseExtension({'extn_id': name})
else:
raise TypeError(_pretty_message(
'''
name must be a unicode string or an instance of
asn1crypto.ocsp.SingleResponseExtensionId or
asn1crypto.ocsp.ResponseDataExtensionId, not %s
''',
_type_name(name)
))
# We use native here to convert OIDs to meaningful names
name = extension['extn_id'].native
spec = extension.spec('extn_value')
if name == 'nonce':
raise ValueError(_pretty_message(
'''
The nonce value should be set via the .nonce attribute, not the
.set_extension() method
'''
))
if name == 'crl_reason':
raise ValueError(_pretty_message(
'''
The crl_reason value should be set via the certificate_status
parameter of the OCSPResponseBuilder() constructor, not the
.set_extension() method
'''
))
if name == 'certificate_issuer':
raise ValueError(_pretty_message(
'''
The certificate_issuer value should be set via the
.certificate_issuer attribute, not the .set_extension() method
'''
))
if not isinstance(value, spec) and value is not None:
raise TypeError(_pretty_message(
'''
value must be an instance of %s, not %s
''',
_type_name(spec),
_type_name(value)
))
if isinstance(extension, ocsp.ResponseDataExtension):
extn_dict = self._response_data_extensions
else:
extn_dict = self._single_response_extensions
if value is None:
if name in extn_dict:
del extn_dict[name]
else:
extn_dict[name] = value | Sets the value for an extension using a fully constructed
asn1crypto.core.Asn1Value object. Normally this should not be needed,
and the convenience attributes should be sufficient.
See the definition of asn1crypto.ocsp.SingleResponseExtension and
asn1crypto.ocsp.ResponseDataExtension to determine the appropriate
object type for a given extension. Extensions are marked as critical
when RFC 6960 indicates so.
:param name:
A unicode string of an extension id name from
asn1crypto.ocsp.SingleResponseExtensionId or
asn1crypto.ocsp.ResponseDataExtensionId. If the extension is not one
defined in those classes, this must be an instance of one of the
classes instead of a unicode string.
:param value:
A value object per the specs defined by
asn1crypto.ocsp.SingleResponseExtension or
asn1crypto.ocsp.ResponseDataExtension | entailment |
def build(self, responder_private_key=None, responder_certificate=None):
"""
Validates the request information, constructs the ASN.1 structure and
signs it.
The responder_private_key and responder_certificate parameters are only
required if the response_status is "successful".
:param responder_private_key:
An asn1crypto.keys.PrivateKeyInfo or oscrypto.asymmetric.PrivateKey
object for the private key to sign the response with
:param responder_certificate:
An asn1crypto.x509.Certificate or oscrypto.asymmetric.Certificate
object of the certificate associated with the private key
:return:
An asn1crypto.ocsp.OCSPResponse object of the response
"""
if self._response_status != 'successful':
return ocsp.OCSPResponse({
'response_status': self._response_status
})
is_oscrypto = isinstance(responder_private_key, asymmetric.PrivateKey)
if not isinstance(responder_private_key, keys.PrivateKeyInfo) and not is_oscrypto:
raise TypeError(_pretty_message(
'''
responder_private_key must be an instance of
asn1crypto.keys.PrivateKeyInfo or
oscrypto.asymmetric.PrivateKey, not %s
''',
_type_name(responder_private_key)
))
cert_is_oscrypto = isinstance(responder_certificate, asymmetric.Certificate)
if not isinstance(responder_certificate, x509.Certificate) and not cert_is_oscrypto:
raise TypeError(_pretty_message(
'''
responder_certificate must be an instance of
asn1crypto.x509.Certificate or
oscrypto.asymmetric.Certificate, not %s
''',
_type_name(responder_certificate)
))
if cert_is_oscrypto:
responder_certificate = responder_certificate.asn1
if self._certificate is None:
raise ValueError(_pretty_message(
'''
certificate must be set if the response_status is
"successful"
'''
))
if self._certificate_status is None:
raise ValueError(_pretty_message(
'''
certificate_status must be set if the response_status is
"successful"
'''
))
def _make_extension(name, value):
return {
'extn_id': name,
'critical': False,
'extn_value': value
}
response_data_extensions = []
single_response_extensions = []
for name, value in self._response_data_extensions.items():
response_data_extensions.append(_make_extension(name, value))
if self._nonce:
response_data_extensions.append(
_make_extension('nonce', self._nonce)
)
if not response_data_extensions:
response_data_extensions = None
for name, value in self._single_response_extensions.items():
single_response_extensions.append(_make_extension(name, value))
if self._certificate_issuer:
single_response_extensions.append(
_make_extension(
'certificate_issuer',
[
x509.GeneralName(
name='directory_name',
value=self._certificate_issuer.subject
)
]
)
)
if not single_response_extensions:
single_response_extensions = None
responder_key_hash = getattr(responder_certificate.public_key, self._key_hash_algo)
if self._certificate_status == 'good':
cert_status = ocsp.CertStatus(
name='good',
value=core.Null()
)
elif self._certificate_status == 'unknown':
cert_status = ocsp.CertStatus(
name='unknown',
value=core.Null()
)
else:
status = self._certificate_status
reason = status if status != 'revoked' else 'unspecified'
cert_status = ocsp.CertStatus(
name='revoked',
value={
'revocation_time': self._revocation_date,
'revocation_reason': reason,
}
)
issuer = self._certificate_issuer if self._certificate_issuer else responder_certificate
if issuer.subject != self._certificate.issuer:
raise ValueError(_pretty_message(
'''
responder_certificate does not appear to be the issuer for
the certificate. Perhaps set the .certificate_issuer attribute?
'''
))
produced_at = datetime.now(timezone.utc)
if self._this_update is None:
self._this_update = produced_at
if self._next_update is None:
self._next_update = self._this_update + timedelta(days=7)
response_data = ocsp.ResponseData({
'responder_id': ocsp.ResponderId(name='by_key', value=responder_key_hash),
'produced_at': produced_at,
'responses': [
{
'cert_id': {
'hash_algorithm': {
'algorithm': self._key_hash_algo
},
'issuer_name_hash': getattr(self._certificate.issuer, self._key_hash_algo),
'issuer_key_hash': getattr(issuer.public_key, self._key_hash_algo),
'serial_number': self._certificate.serial_number,
},
'cert_status': cert_status,
'this_update': self._this_update,
'next_update': self._next_update,
'single_extensions': single_response_extensions
}
],
'response_extensions': response_data_extensions
})
signature_algo = responder_private_key.algorithm
if signature_algo == 'ec':
signature_algo = 'ecdsa'
signature_algorithm_id = '%s_%s' % (self._hash_algo, signature_algo)
if responder_private_key.algorithm == 'rsa':
sign_func = asymmetric.rsa_pkcs1v15_sign
elif responder_private_key.algorithm == 'dsa':
sign_func = asymmetric.dsa_sign
elif responder_private_key.algorithm == 'ec':
sign_func = asymmetric.ecdsa_sign
if not is_oscrypto:
responder_private_key = asymmetric.load_private_key(responder_private_key)
signature_bytes = sign_func(responder_private_key, response_data.dump(), self._hash_algo)
certs = None
if self._certificate_issuer:
certs = [responder_certificate]
return ocsp.OCSPResponse({
'response_status': self._response_status,
'response_bytes': {
'response_type': 'basic_ocsp_response',
'response': {
'tbs_response_data': response_data,
'signature_algorithm': {'algorithm': signature_algorithm_id},
'signature': signature_bytes,
'certs': certs
}
}
}) | Validates the request information, constructs the ASN.1 structure and
signs it.
The responder_private_key and responder_certificate parameters are only
required if the response_status is "successful".
:param responder_private_key:
An asn1crypto.keys.PrivateKeyInfo or oscrypto.asymmetric.PrivateKey
object for the private key to sign the response with
:param responder_certificate:
An asn1crypto.x509.Certificate or oscrypto.asymmetric.Certificate
object of the certificate associated with the private key
:return:
An asn1crypto.ocsp.OCSPResponse object of the response | entailment |
def make_openid_request(arq, keys, issuer, request_object_signing_alg, recv):
"""
Construct the JWT to be passed by value (the request parameter) or by
reference (request_uri).
The request will be signed
:param arq: The Authorization request
:param keys: Keys to use for signing/encrypting. A KeyJar instance
:param issuer: Who is signing this JSON Web Token
:param request_object_signing_alg: Which signing algorithm to use
:param recv: The intended receiver of the request
:return: JWT encoded OpenID request
"""
_jwt = JWT(key_jar=keys, iss=issuer, sign_alg=request_object_signing_alg)
return _jwt.pack(arq.to_dict(), owner=issuer, recv=recv) | Construct the JWT to be passed by value (the request parameter) or by
reference (request_uri).
The request will be signed
:param arq: The Authorization request
:param keys: Keys to use for signing/encrypting. A KeyJar instance
:param issuer: Who is signing this JSON Web Token
:param request_object_signing_alg: Which signing algorithm to use
:param recv: The intended receiver of the request
:return: JWT encoded OpenID request | entailment |
def verify(self, **kwargs):
"""Authorization Request parameters that are OPTIONAL in the OAuth 2.0
specification MAY be included in the OpenID Request Object without also
passing them as OAuth 2.0 Authorization Request parameters, with one
exception: The scope parameter MUST always be present in OAuth 2.0
Authorization Request parameters.
All parameter values that are present both in the OAuth 2.0
Authorization Request and in the OpenID Request Object MUST exactly
match."""
super(AuthorizationRequest, self).verify(**kwargs)
clear_verified_claims(self)
args = {}
for arg in ["keyjar", "opponent_id", "sender", "alg", "encalg",
"encenc"]:
try:
args[arg] = kwargs[arg]
except KeyError:
pass
if "opponent_id" not in kwargs:
args["opponent_id"] = self["client_id"]
if "request" in self:
if isinstance(self["request"], str):
# Try to decode the JWT, checks the signature
oidr = OpenIDRequest().from_jwt(str(self["request"]), **args)
# check if something is change in the original message
for key, val in oidr.items():
if key in self:
if self[key] != val:
# log but otherwise ignore
logger.warning('{} != {}'.format(self[key], val))
# remove all claims
_keys = list(self.keys())
for key in _keys:
if key not in oidr:
del self[key]
self.update(oidr)
# replace the JWT with the parsed and verified instance
self[verified_claim_name("request")] = oidr
if "id_token_hint" in self:
if isinstance(self["id_token_hint"], str):
idt = IdToken().from_jwt(str(self["id_token_hint"]), **args)
self["verified_id_token_hint"] = idt
if "response_type" not in self:
raise MissingRequiredAttribute("response_type missing", self)
_rt = self["response_type"]
if "id_token" in _rt:
if "nonce" not in self:
raise MissingRequiredAttribute("Nonce missing", self)
else:
try:
if self['nonce'] != kwargs['nonce']:
raise ValueError(
'Nonce in id_token not matching nonce in authz '
'request')
except KeyError:
pass
if "openid" not in self.get("scope", []):
raise MissingRequiredValue("openid not in scope", self)
if "offline_access" in self.get("scope", []):
if "prompt" not in self or "consent" not in self["prompt"]:
raise MissingRequiredValue("consent in prompt", self)
if "prompt" in self:
if "none" in self["prompt"] and len(self["prompt"]) > 1:
raise InvalidRequest("prompt none combined with other value",
self)
return True | Authorization Request parameters that are OPTIONAL in the OAuth 2.0
specification MAY be included in the OpenID Request Object without also
passing them as OAuth 2.0 Authorization Request parameters, with one
exception: The scope parameter MUST always be present in OAuth 2.0
Authorization Request parameters.
All parameter values that are present both in the OAuth 2.0
Authorization Request and in the OpenID Request Object MUST exactly
match. | entailment |
def verify(self, **kwargs):
"""
Implementations MUST either return both a Client Configuration Endpoint
and a Registration Access Token or neither of them.
:param kwargs:
:return: True if the message is OK otherwise False
"""
super(RegistrationResponse, self).verify(**kwargs)
has_reg_uri = "registration_client_uri" in self
has_reg_at = "registration_access_token" in self
if has_reg_uri != has_reg_at:
raise VerificationError((
"Only one of registration_client_uri"
" and registration_access_token present"), self)
return True | Implementations MUST either return both a Client Configuration Endpoint
and a Registration Access Token or neither of them.
:param kwargs:
:return: True if the message is OK otherwise False | entailment |
def in_a_while(days=0, seconds=0, microseconds=0, milliseconds=0,
minutes=0, hours=0, weeks=0, time_format=TIME_FORMAT):
"""
:param days:
:param seconds:
:param microseconds:
:param milliseconds:
:param minutes:
:param hours:
:param weeks:
:param time_format:
:return: Formatet string
"""
if not time_format:
time_format = TIME_FORMAT
return time_in_a_while(days, seconds, microseconds, milliseconds,
minutes, hours, weeks).strftime(time_format) | :param days:
:param seconds:
:param microseconds:
:param milliseconds:
:param minutes:
:param hours:
:param weeks:
:param time_format:
:return: Formatet string | entailment |
def before(point):
""" True if point datetime specification is before now """
if not point:
return True
if isinstance(point, str):
point = str_to_time(point)
elif isinstance(point, int):
point = time.gmtime(point)
return time.gmtime() < point | True if point datetime specification is before now | entailment |
def later_than(after, before):
""" True if then is later or equal to that """
if isinstance(after, str):
after = str_to_time(after)
elif isinstance(after, int):
after = time.gmtime(after)
if isinstance(before, str):
before = str_to_time(before)
elif isinstance(before, int):
before = time.gmtime(before)
return after >= before | True if then is later or equal to that | entailment |
def epoch_in_a_while(days=0, seconds=0, microseconds=0, milliseconds=0,
minutes=0, hours=0, weeks=0):
"""
Return the number of seconds since epoch a while from now.
:param days:
:param seconds:
:param microseconds:
:param milliseconds:
:param minutes:
:param hours:
:param weeks:
:return: Seconds since epoch (1970-01-01)
"""
dt = time_in_a_while(days, seconds, microseconds, milliseconds, minutes,
hours, weeks)
return int((dt - datetime(1970, 1, 1)).total_seconds()) | Return the number of seconds since epoch a while from now.
:param days:
:param seconds:
:param microseconds:
:param milliseconds:
:param minutes:
:param hours:
:param weeks:
:return: Seconds since epoch (1970-01-01) | entailment |
def set_defaults(self):
"""
Based on specification set a parameters value to the default value.
"""
for key, val in self.c_default.items():
self._dict[key] = val | Based on specification set a parameters value to the default value. | entailment |
def to_urlencoded(self, lev=0):
"""
Creates a string using the application/x-www-form-urlencoded format
:return: A string of the application/x-www-form-urlencoded format
"""
_spec = self.c_param
if not self.lax:
for attribute, (_, req, _ser, _, na) in _spec.items():
if req and attribute not in self._dict:
raise MissingRequiredAttribute("%s" % attribute,
"%s" % self)
params = []
for key, val in self._dict.items():
try:
(_, req, _ser, _, null_allowed) = _spec[key]
except KeyError: # extra attribute
try:
_key, lang = key.split("#")
(_, req, _ser, _deser, null_allowed) = _spec[_key]
except (ValueError, KeyError):
try:
(_, req, _ser, _, null_allowed) = _spec['*']
except KeyError:
_ser = None
null_allowed = False
if val is None and null_allowed is False:
continue
elif isinstance(val, str):
# Should I allow parameters with "" as value ???
params.append((key, val.encode("utf-8")))
elif isinstance(val, list):
if _ser:
params.append((key, str(_ser(val, sformat="urlencoded",
lev=lev))))
else:
for item in val:
params.append((key, str(item).encode('utf-8')))
elif isinstance(val, Message):
try:
_val = json.dumps(_ser(val, sformat="dict", lev=lev + 1))
params.append((key, _val))
except TypeError:
params.append((key, val))
elif val is None:
params.append((key, val))
else:
try:
params.append((key, _ser(val, lev=lev)))
except Exception:
params.append((key, str(val)))
try:
return urlencode(params)
except UnicodeEncodeError:
_val = []
for k, v in params:
try:
_val.append((k, v.encode("utf-8")))
except TypeError:
_val.append((k, v))
return urlencode(_val) | Creates a string using the application/x-www-form-urlencoded format
:return: A string of the application/x-www-form-urlencoded format | entailment |
def serialize(self, method="urlencoded", lev=0, **kwargs):
"""
Convert this instance to another representation. Which representation
is given by the choice of serialization method.
:param method: A serialization method. Presently 'urlencoded', 'json',
'jwt' and 'dict' is supported.
:param lev:
:param kwargs: Extra key word arguments
:return: THe content of this message serialized using a chosen method
"""
return getattr(self, "to_%s" % method)(lev=lev, **kwargs) | Convert this instance to another representation. Which representation
is given by the choice of serialization method.
:param method: A serialization method. Presently 'urlencoded', 'json',
'jwt' and 'dict' is supported.
:param lev:
:param kwargs: Extra key word arguments
:return: THe content of this message serialized using a chosen method | entailment |
def deserialize(self, info, method="urlencoded", **kwargs):
"""
Convert from an external representation to an internal.
:param info: The input
:param method: The method used to deserialize the info
:param kwargs: extra Keyword arguments
:return: In the normal case the Message instance
"""
try:
func = getattr(self, "from_%s" % method)
except AttributeError:
raise FormatError("Unknown serialization method (%s)" % method)
else:
return func(info, **kwargs) | Convert from an external representation to an internal.
:param info: The input
:param method: The method used to deserialize the info
:param kwargs: extra Keyword arguments
:return: In the normal case the Message instance | entailment |
def from_urlencoded(self, urlencoded, **kwargs):
"""
Starting with a string of the application/x-www-form-urlencoded format
this method creates a class instance
:param urlencoded: The string
:return: A class instance or raise an exception on error
"""
# parse_qs returns a dictionary with keys and values. The values are
# always lists even if there is only one value in the list.
# keys only appears once.
if isinstance(urlencoded, str):
pass
elif isinstance(urlencoded, list):
urlencoded = urlencoded[0]
_spec = self.c_param
_info = parse_qs(urlencoded)
if len(urlencoded) and _info == {}:
raise FormatError('Wrong format')
for key, val in _info.items():
try:
(typ, _, _, _deser, null_allowed) = _spec[key]
except KeyError:
try:
_key, lang = key.split("#")
(typ, _, _, _deser, null_allowed) = _spec[_key]
except (ValueError, KeyError):
try:
(typ, _, _, _deser, null_allowed) = _spec['*']
except KeyError:
if len(val) == 1:
val = val[0]
self._dict[key] = val
continue
if isinstance(typ, list):
if _deser:
self._dict[key] = _deser(val[0], "urlencoded")
else:
self._dict[key] = val
else: # must be single value
if len(val) == 1:
if _deser:
self._dict[key] = _deser(val[0], "urlencoded")
elif isinstance(val[0], typ):
self._dict[key] = val[0]
else:
self._dict[key] = val[0]
else:
raise TooManyValues('{}'.format(key))
return self | Starting with a string of the application/x-www-form-urlencoded format
this method creates a class instance
:param urlencoded: The string
:return: A class instance or raise an exception on error | entailment |
def to_dict(self, lev=0):
"""
Return a dictionary representation of the class
:return: A dict
"""
_spec = self.c_param
_res = {}
lev += 1
for key, val in self._dict.items():
try:
(_, req, _ser, _, null_allowed) = _spec[str(key)]
except KeyError:
try:
_key, lang = key.split("#")
(_, req, _ser, _, null_allowed) = _spec[_key]
except (ValueError, KeyError):
try:
(_, req, _ser, _, null_allowed) = _spec['*']
except KeyError:
_ser = None
if _ser:
val = _ser(val, "dict", lev)
if isinstance(val, Message):
_res[key] = val.to_dict(lev + 1)
elif isinstance(val, list) and isinstance(
next(iter(val or []), None), Message):
_res[key] = [v.to_dict(lev) for v in val]
else:
_res[key] = val
return _res | Return a dictionary representation of the class
:return: A dict | entailment |
def from_dict(self, dictionary, **kwargs):
"""
Direct translation, so the value for one key might be a list or a
single value.
:param dictionary: The info
:return: A class instance or raise an exception on error
"""
_spec = self.c_param
for key, val in dictionary.items():
# Earlier versions of python don't like unicode strings as
# variable names
if val == "" or val == [""]:
continue
skey = str(key)
try:
(vtyp, req, _, _deser, null_allowed) = _spec[key]
except KeyError:
# might be a parameter with a lang tag
try:
_key, lang = skey.split("#")
except ValueError:
try:
(vtyp, _, _, _deser, null_allowed) = _spec['*']
if val is None:
self._dict[key] = val
continue
except KeyError:
self._dict[key] = val
continue
else:
try:
(vtyp, req, _, _deser, null_allowed) = _spec[_key]
except KeyError:
try:
(vtyp, _, _, _deser, null_allowed) = _spec['*']
if val is None:
self._dict[key] = val
continue
except KeyError:
self._dict[key] = val
continue
self._add_value(skey, vtyp, key, val, _deser, null_allowed)
return self | Direct translation, so the value for one key might be a list or a
single value.
:param dictionary: The info
:return: A class instance or raise an exception on error | entailment |
def _add_value(self, skey, vtyp, key, val, _deser, null_allowed):
"""
Main method for adding a value to the instance. Does all the
checking on type of value and if among allowed values.
:param skey: string version of the key
:param vtyp: Type of value
:param key: original representation of the key
:param val: The value to add
:param _deser: A deserializer for this value type
:param null_allowed: Whether null is an allowed value for this key
"""
if isinstance(val, list):
if (len(val) == 0 or val[0] is None) and null_allowed is False:
return
if isinstance(vtyp, tuple):
vtyp = vtyp[0]
if isinstance(vtyp, list):
vtype = vtyp[0]
if isinstance(val, vtype):
if issubclass(vtype, Message):
self._dict[skey] = [val]
elif _deser:
try:
self._dict[skey] = _deser(val, sformat="urlencoded")
except Exception as exc:
raise DecodeError(ERRTXT % (key, exc))
else:
setattr(self, skey, [val])
elif isinstance(val, list):
if _deser:
try:
val = _deser(val, sformat="dict")
except Exception as exc:
raise DecodeError(ERRTXT % (key, exc))
if issubclass(vtype, Message):
try:
_val = []
for v in val:
_val.append(vtype(**dict([(str(x), y) for x, y
in v.items()])))
val = _val
except Exception as exc:
raise DecodeError(ERRTXT % (key, exc))
else:
for v in val:
if not isinstance(v, vtype):
raise DecodeError(
ERRTXT % (key, "type != %s (%s)" % (
vtype, type(v))))
self._dict[skey] = val
elif isinstance(val, dict):
try:
val = _deser(val, sformat="dict")
except Exception as exc:
raise DecodeError(ERRTXT % (key, exc))
else:
self._dict[skey] = val
else:
raise DecodeError(ERRTXT % (key, "type != %s" % vtype))
else:
if val is None:
self._dict[skey] = None
elif isinstance(val, bool):
if vtyp is bool:
self._dict[skey] = val
else:
raise ValueError(
'"{}", wrong type of value for "{}"'.format(val, skey))
elif isinstance(val, vtyp): # Not necessary to do anything
self._dict[skey] = val
else:
if _deser:
try:
val = _deser(val, sformat="dict")
except Exception as exc:
raise DecodeError(ERRTXT % (key, exc))
else:
# if isinstance(val, str):
# self._dict[skey] = val
# elif isinstance(val, list):
# if len(val) == 1:
# self._dict[skey] = val[0]
# elif not len(val):
# pass
# else:
# raise TooManyValues(key)
# else:
self._dict[skey] = val
elif vtyp is int:
try:
self._dict[skey] = int(val)
except (ValueError, TypeError):
raise ValueError(
'"{}", wrong type of value for "{}"'.format(val,
skey))
elif vtyp is bool:
raise ValueError(
'"{}", wrong type of value for "{}"'.format(val, skey))
elif vtyp != type(val):
if vtyp == Message:
if type(val) == dict or isinstance(val, str):
self._dict[skey] = val
else:
raise ValueError(
'"{}", wrong type of value for "{}"'.format(
val, skey))
else:
raise ValueError(
'"{}", wrong type of value for "{}"'.format(val,
skey)) | Main method for adding a value to the instance. Does all the
checking on type of value and if among allowed values.
:param skey: string version of the key
:param vtyp: Type of value
:param key: original representation of the key
:param val: The value to add
:param _deser: A deserializer for this value type
:param null_allowed: Whether null is an allowed value for this key | entailment |
def to_json(self, lev=0, indent=None):
"""
Serialize the content of this instance into a JSON string.
:param lev:
:param indent: Number of spaces that should be used for indentation
:return:
"""
if lev:
return self.to_dict(lev + 1)
else:
return json.dumps(self.to_dict(1), indent=indent) | Serialize the content of this instance into a JSON string.
:param lev:
:param indent: Number of spaces that should be used for indentation
:return: | entailment |
def from_json(self, txt, **kwargs):
"""
Convert from a JSON string to an instance of this class.
:param txt: The JSON string (a ``str``, ``bytes`` or ``bytearray``
instance containing a JSON document)
:param kwargs: extra keyword arguments
:return: The instantiated instance
"""
_dict = json.loads(txt)
return self.from_dict(_dict) | Convert from a JSON string to an instance of this class.
:param txt: The JSON string (a ``str``, ``bytes`` or ``bytearray``
instance containing a JSON document)
:param kwargs: extra keyword arguments
:return: The instantiated instance | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.