sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def uuidify(val):
"""Takes an integer and transforms it to a UUID format.
returns: UUID formatted version of input.
"""
if uuidutils.is_uuid_like(val):
return val
else:
try:
int_val = int(val, 16)
except ValueError:
with excutils.save_and_reraise_exception():
LOG.error("Invalid UUID format %s. Please provide an "
"integer in decimal (0-9) or hex (0-9a-e) "
"format", val)
res = str(int_val)
num = 12 - len(res)
return "00000000-0000-0000-0000-" + "0" * num + res | Takes an integer and transforms it to a UUID format.
returns: UUID formatted version of input. | entailment |
def _ensure_format(rule, attribute, res_dict):
"""Verifies that attribute in res_dict is properly formatted.
Since, in the .ini-files, lists are specified as ':' separated text and
UUID values can be plain integers we need to transform any such values
into proper format. Empty strings are converted to None if validator
specifies that None value is accepted.
"""
if rule == 'type:uuid' or (rule == 'type:uuid_or_none' and
res_dict[attribute]):
res_dict[attribute] = uuidify(res_dict[attribute])
elif rule == 'type:uuid_list':
if not res_dict[attribute]:
res_dict[attribute] = []
else:
temp_list = res_dict[attribute].split(':')
res_dict[attribute] = []
for item in temp_list:
res_dict[attribute].append = uuidify(item)
elif rule == 'type:string_or_none' and res_dict[attribute] == "":
res_dict[attribute] = None | Verifies that attribute in res_dict is properly formatted.
Since, in the .ini-files, lists are specified as ':' separated text and
UUID values can be plain integers we need to transform any such values
into proper format. Empty strings are converted to None if validator
specifies that None value is accepted. | entailment |
def obtain_hosting_device_credentials_from_config():
"""Obtains credentials from config file and stores them in memory.
To be called before hosting device templates defined in the config file
are created.
"""
cred_dict = get_specific_config('cisco_hosting_device_credential')
attr_info = {
'name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None}, 'is_visible': True,
'default': ''},
'description': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'user_name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'password': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'type': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None}, 'is_visible': True,
'default': ''}}
credentials = {}
for cred_uuid, kv_dict in cred_dict.items():
# ensure cred_uuid is properly formatted
cred_uuid = uuidify(cred_uuid)
verify_resource_dict(kv_dict, True, attr_info)
credentials[cred_uuid] = kv_dict
return credentials | Obtains credentials from config file and stores them in memory.
To be called before hosting device templates defined in the config file
are created. | entailment |
def get_resources(cls):
"""Returns Ext Resources."""
plural_mappings = resource_helper.build_plural_mappings(
{}, RESOURCE_ATTRIBUTE_MAP)
if NEUTRON_VERSION.version[0] <= NEUTRON_NEWTON_VERSION.version[0]:
attr.PLURALS.update(plural_mappings)
return resource_helper.build_resource_info(plural_mappings,
RESOURCE_ATTRIBUTE_MAP,
bc.constants.L3) | Returns Ext Resources. | entailment |
def get_routertypes(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
"""Lists defined router types."""
pass | Lists defined router types. | entailment |
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
set_mysql_engine()
engine = session.create_engine(neutron_config.database.connection)
connection = engine.connect()
context.configure(
connection=connection,
target_metadata=target_metadata,
include_object=include_object,
version_table=alembic_migrations.VERSION_TABLE
)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
engine.dispose() | Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context. | entailment |
def has_role(self, role):
'''
Check whether the Launch Paramters set the role.
'''
return self.roles and any([re.search(role, our_role, re.I)
for our_role in self.roles]) | Check whether the Launch Paramters set the role. | entailment |
def username(self, default=None):
'''
Return the full, given, or family name if set.
'''
if self.lis_person_name_given:
return self.lis_person_name_given
elif self.lis_person_name_family:
return self.lis_person_name_family
elif self.lis_person_name_full:
return self.lis_person_name_full
else:
return default | Return the full, given, or family name if set. | entailment |
def post_replace_result(self, score, outcome_opts=defaultdict(lambda:None), result_data=None):
'''
POSTs the given score to the Tool Consumer with a replaceResult.
Returns OutcomeResponse object and stores it in self.outcome_request
OPTIONAL:
result_data must be a dictionary
Note: ONLY ONE of these values can be in the dict at a time,
due to the Canvas specification.
'text' : str text
'url' : str url
'''
return self.new_request(outcome_opts).post_replace_result(score, result_data) | POSTs the given score to the Tool Consumer with a replaceResult.
Returns OutcomeResponse object and stores it in self.outcome_request
OPTIONAL:
result_data must be a dictionary
Note: ONLY ONE of these values can be in the dict at a time,
due to the Canvas specification.
'text' : str text
'url' : str url | entailment |
def build_return_url(self):
'''
If the Tool Consumer sent a return URL, add any set messages to the
URL.
'''
if not self.launch_presentation_return_url:
return None
lti_message_fields = ['lti_errormsg', 'lti_errorlog',
'lti_msg', 'lti_log']
messages = dict([(key, getattr(self, key))
for key in lti_message_fields
if getattr(self, key, None)])
# Disassemble original return URL and reassemble with our options added
original = urlsplit(self.launch_presentation_return_url)
combined = messages.copy()
combined.update(dict(parse_qsl(original.query)))
combined_query = urlencode(combined)
return urlunsplit((
original.scheme,
original.netloc,
original.path,
combined_query,
original.fragment
)) | If the Tool Consumer sent a return URL, add any set messages to the
URL. | entailment |
def success_redirect(self, msg='', log=''):
'''
Shortcut for redirecting Django view to LTI Consumer with messages
'''
from django.shortcuts import redirect
self.lti_msg = msg
self.lti_log = log
return redirect(self.build_return_url()) | Shortcut for redirecting Django view to LTI Consumer with messages | entailment |
def error_redirect(self, errormsg='', errorlog=''):
'''
Shortcut for redirecting Django view to LTI Consumer with errors
'''
from django.shortcuts import redirect
self.lti_errormsg = errormsg
self.lti_errorlog = errorlog
return redirect(self.build_return_url()) | Shortcut for redirecting Django view to LTI Consumer with errors | entailment |
def _notify_thing_lid_change(self, from_lid, to_lid):
"""Used by Thing instances to indicate that a rename operation has happened"""
try:
with self.__private_things:
self.__private_things[to_lid] = self.__private_things.pop(from_lid)
except KeyError:
logger.warning('Thing %s renamed (to %s), but not in private lookup table', from_lid, to_lid)
else:
# renaming could happen before get_thing is called on the original
try:
with self.__new_things:
self.__new_things[to_lid] = self.__new_things.pop(from_lid)
except KeyError:
pass | Used by Thing instances to indicate that a rename operation has happened | entailment |
def register_catchall_feeddata(self, callback, callback_parsed=None):
"""
Registers a callback that is called for all feeddata your Thing receives
`Example`
#!python
def feeddata_callback(data):
print(data)
...
client.register_catchall_feeddata(feeddata_callback)
`callback` (required) the function name that you want to be called on receipt of new feed data
`callback_parsed` (optional) (function reference) callback function to invoke on receipt of feed data. This is
equivalent to `callback` except the dict includes the `parsed` key which holds the set of values in a
[PointDataObject](./Point.m.html#IoticAgent.IOT.Point.PointDataObject) instance. If both
`callback_parsed` and `callback` have been specified, the former takes precedence and `callback` is only called
if the point data could not be parsed according to its current value description.
`NOTE`: `callback_parsed` can only be used if `auto_encode_decode` is enabled for the client instance.
More details on the contents of the `data` dictionary for feeds see:
[follow()](./Thing.m.html#IoticAgent.IOT.Thing.Thing.follow)
"""
if callback_parsed:
callback = self._get_parsed_feed_callback(callback_parsed, callback)
return self.__client.register_callback_feeddata(callback) | Registers a callback that is called for all feeddata your Thing receives
`Example`
#!python
def feeddata_callback(data):
print(data)
...
client.register_catchall_feeddata(feeddata_callback)
`callback` (required) the function name that you want to be called on receipt of new feed data
`callback_parsed` (optional) (function reference) callback function to invoke on receipt of feed data. This is
equivalent to `callback` except the dict includes the `parsed` key which holds the set of values in a
[PointDataObject](./Point.m.html#IoticAgent.IOT.Point.PointDataObject) instance. If both
`callback_parsed` and `callback` have been specified, the former takes precedence and `callback` is only called
if the point data could not be parsed according to its current value description.
`NOTE`: `callback_parsed` can only be used if `auto_encode_decode` is enabled for the client instance.
More details on the contents of the `data` dictionary for feeds see:
[follow()](./Thing.m.html#IoticAgent.IOT.Thing.Thing.follow) | entailment |
def register_catchall_controlreq(self, callback, callback_parsed=None):
"""
Registers a callback that is called for all control requests received by your Thing
`Example`
#!python
def controlreq_callback(data):
print(data)
...
client.register_catchall_controlreq(controlreq_callback)
`callback` (required) the function name that you want to be called on receipt of a new control request
`callback_parsed` (optional) (function reference) callback function to invoke on receipt of a control ask/tell.
This is equivalent to `callback` except the dict includes the `parsed` key which holds the set of values in a
[PointDataObject](./Point.m.html#IoticAgent.IOT.Point.PointDataObject) instance. If both `callback_parsed` and
`callback` have been specified, the former takes precedence and `callback` is only called if the point data
could not be parsed according to its current value description.
More details on the contents of the `data` dictionary for controls see:
[create_control()](./Thing.m.html#IoticAgent.IOT.Thing.Thing.create_control)
"""
if callback_parsed:
callback = self._get_parsed_control_callback(callback_parsed, callback)
return self.__client.register_callback_controlreq(callback) | Registers a callback that is called for all control requests received by your Thing
`Example`
#!python
def controlreq_callback(data):
print(data)
...
client.register_catchall_controlreq(controlreq_callback)
`callback` (required) the function name that you want to be called on receipt of a new control request
`callback_parsed` (optional) (function reference) callback function to invoke on receipt of a control ask/tell.
This is equivalent to `callback` except the dict includes the `parsed` key which holds the set of values in a
[PointDataObject](./Point.m.html#IoticAgent.IOT.Point.PointDataObject) instance. If both `callback_parsed` and
`callback` have been specified, the former takes precedence and `callback` is only called if the point data
could not be parsed according to its current value description.
More details on the contents of the `data` dictionary for controls see:
[create_control()](./Thing.m.html#IoticAgent.IOT.Thing.Thing.create_control) | entailment |
def register_callback_created(self, func, serialised=True):
"""
Register a callback for resource creation. This will be called when any *new* resource
is created within your agent. If `serialised` is not set, the callbacks might arrive
in a different order to they were requested.
The payload passed to your callback is an OrderedDict with the following keys
#!python
r : R_ENTITY, R_FEED, etc # the type of resource created
lid : <name> # the local name of the resource
id : <GUID> # the global Id of the resource
epId : <GUID> # the global Id of your agent
`Note` resource types are defined [here](../Core/Const.m.html)
`Example`
#!python
def created_callback(args):
print(args)
...
client.register_callback_created(created_callback)
This would print out something like the following on creation of an R_ENTITY
#!python
OrderedDict([(u'lid', u'new_thing1'), (u'r', 1),
(u'epId', u'ffd47b75ea786f55c76e337cdc47665a'),
(u'id', u'3f11df0a09588a6a1a9732e3837765f8')]))
"""
self.__client.register_callback_created(partial(self.__callback_payload_only, func), serialised=serialised) | Register a callback for resource creation. This will be called when any *new* resource
is created within your agent. If `serialised` is not set, the callbacks might arrive
in a different order to they were requested.
The payload passed to your callback is an OrderedDict with the following keys
#!python
r : R_ENTITY, R_FEED, etc # the type of resource created
lid : <name> # the local name of the resource
id : <GUID> # the global Id of the resource
epId : <GUID> # the global Id of your agent
`Note` resource types are defined [here](../Core/Const.m.html)
`Example`
#!python
def created_callback(args):
print(args)
...
client.register_callback_created(created_callback)
This would print out something like the following on creation of an R_ENTITY
#!python
OrderedDict([(u'lid', u'new_thing1'), (u'r', 1),
(u'epId', u'ffd47b75ea786f55c76e337cdc47665a'),
(u'id', u'3f11df0a09588a6a1a9732e3837765f8')])) | entailment |
def register_callback_duplicate(self, func, serialised=True):
"""
Register a callback for resource creation but where the resource already exists in Iotic Space.
In this case the existing reference is passed to you.
If `serialised` is not set, the callbacks might arrive in a different order to they were requested.
The payload passed to your callback is an OrderedDict with the following keys
#!python
r : R_ENTITY, R_FEED, etc # the type of existing resource
lid : <name> # the local name of the
# existing resource
id : <GUID> # the global Id of the
# existing resource
epId : <GUID> # the global Id of your agent
`Note` resource types are defined [here](../Core/Const.m.html)
`Example`
#!python
def duplicated_callback(args):
print(args)
...
client.register_callback_created(duplicated_callback)
This would print out something like the following on re-creation of an R_ENTITY
#!python
OrderedDict([(u'lid', u'new_thing1'), (u'r', 1),
(u'epId', u'ffd47b75ea786f55c76e337cdc47665a'),
(u'id', u'3f11df0a09588a6a1a9732e3837765f8')]))
"""
self.__client.register_callback_duplicate(partial(self.__callback_payload_only, func), serialised=serialised) | Register a callback for resource creation but where the resource already exists in Iotic Space.
In this case the existing reference is passed to you.
If `serialised` is not set, the callbacks might arrive in a different order to they were requested.
The payload passed to your callback is an OrderedDict with the following keys
#!python
r : R_ENTITY, R_FEED, etc # the type of existing resource
lid : <name> # the local name of the
# existing resource
id : <GUID> # the global Id of the
# existing resource
epId : <GUID> # the global Id of your agent
`Note` resource types are defined [here](../Core/Const.m.html)
`Example`
#!python
def duplicated_callback(args):
print(args)
...
client.register_callback_created(duplicated_callback)
This would print out something like the following on re-creation of an R_ENTITY
#!python
OrderedDict([(u'lid', u'new_thing1'), (u'r', 1),
(u'epId', u'ffd47b75ea786f55c76e337cdc47665a'),
(u'id', u'3f11df0a09588a6a1a9732e3837765f8')])) | entailment |
def register_callback_renamed(self, func, serialised=True):
"""
Register a callback for resource rename. This will be called when any resource
is renamed within your agent.
If `serialised` is not set, the callbacks might arrive in a different order to they were requested.
The payload passed to your callback is an OrderedDict with the following keys
#!python
r : R_ENTITY, R_FEED, etc # the type of resource deleted
lid : <name> # the new local name of the resource
oldLid : <name> # the old local name of the resource
id : <GUID> # the global Id of the resource
`Note` resource types are defined [here](../Core/Const.m.html)
`Example`
#!python
def renamed_callback(args):
print(args)
...
client.register_callback_renamed(renamed_callback)
This would print out something like the following on renaming of an R_ENTITY
#!python
OrderedDict([(u'lid', u'new_name'),
(u'r', 1),
(u'oldLid', u'old_name'),
(u'id', u'4448993b44738411de5fe2a6cf32d957')])
"""
self.__client.register_callback_renamed(partial(self.__callback_payload_only, func), serialised=serialised) | Register a callback for resource rename. This will be called when any resource
is renamed within your agent.
If `serialised` is not set, the callbacks might arrive in a different order to they were requested.
The payload passed to your callback is an OrderedDict with the following keys
#!python
r : R_ENTITY, R_FEED, etc # the type of resource deleted
lid : <name> # the new local name of the resource
oldLid : <name> # the old local name of the resource
id : <GUID> # the global Id of the resource
`Note` resource types are defined [here](../Core/Const.m.html)
`Example`
#!python
def renamed_callback(args):
print(args)
...
client.register_callback_renamed(renamed_callback)
This would print out something like the following on renaming of an R_ENTITY
#!python
OrderedDict([(u'lid', u'new_name'),
(u'r', 1),
(u'oldLid', u'old_name'),
(u'id', u'4448993b44738411de5fe2a6cf32d957')]) | entailment |
def register_callback_deleted(self, func, serialised=True):
"""
Register a callback for resource deletion. This will be called when any resource
is deleted within your agent.
If `serialised` is not set, the callbacks might arrive in a different order to they were requested.
The payload passed to your callback is an OrderedDict with the following keys
#!python
r : R_ENTITY, R_FEED, etc # the type of resource deleted
lid : <name> # the local name of the resource
id : <GUID> # the global Id of the resource
`Note` resource types are defined [here](../Core/Const.m.html)
`Example`
#!python
def deleted_callback(args):
print(args)
...
client.register_callback_deleted(deleted_callback)
This would print out something like the following on deletion of an R_ENTITY
#!python
OrderedDict([(u'lid', u'old_thing1'),
(u'r', 1),
(u'id', u'315637813d801ec6f057c67728bf00c2')])
"""
self.__client.register_callback_deleted(partial(self.__callback_payload_only, func), serialised=serialised) | Register a callback for resource deletion. This will be called when any resource
is deleted within your agent.
If `serialised` is not set, the callbacks might arrive in a different order to they were requested.
The payload passed to your callback is an OrderedDict with the following keys
#!python
r : R_ENTITY, R_FEED, etc # the type of resource deleted
lid : <name> # the local name of the resource
id : <GUID> # the global Id of the resource
`Note` resource types are defined [here](../Core/Const.m.html)
`Example`
#!python
def deleted_callback(args):
print(args)
...
client.register_callback_deleted(deleted_callback)
This would print out something like the following on deletion of an R_ENTITY
#!python
OrderedDict([(u'lid', u'old_thing1'),
(u'r', 1),
(u'id', u'315637813d801ec6f057c67728bf00c2')]) | entailment |
def register_callback_reassigned(self, func, serialised=True):
"""
Register a callback for resource reassignment. This will be called when any resource
is reassigned to or from your agent.
If `serialised` is not set, the callbacks might arrive in a different order to they were requested.
The payload passed to your callback is an OrderedDict with the following keys
#!python
r : R_ENTITY, R_FEED, etc # the type of resource reassigned
lid : <name> # the local name of the resource
epId : <GUID> # the global Id of the agent the
# resource has been reassigned *to*
id : <GUID> # the global Id of the resource
`Note` resource types are defined [here](../Core/Const.m.html)
`Note` You can check whether this is an assign "in" or "out" by comparing the epId with your current
agent id, using the `IOT.Client.agent_id` property. If it's the same it's a reassign to you.
`Example`
#!python
def reassigned_callback(args):
print(args)
...
client.register_callback_reassigned(reassigned_callback)
This would print out something like the following on assignment of an R_ENTITY to
#!python
OrderedDict([(u'lid', u'moved_thing'),
(u'r', 1),
(u'epId', u'5a8d603ee757133d66d99875d0584c72'),
(u'id', u'4448993b44738411de5fe2a6cf32d957')])
"""
self.__client.register_callback_reassigned(partial(self.__callback_payload_only, func), serialised) | Register a callback for resource reassignment. This will be called when any resource
is reassigned to or from your agent.
If `serialised` is not set, the callbacks might arrive in a different order to they were requested.
The payload passed to your callback is an OrderedDict with the following keys
#!python
r : R_ENTITY, R_FEED, etc # the type of resource reassigned
lid : <name> # the local name of the resource
epId : <GUID> # the global Id of the agent the
# resource has been reassigned *to*
id : <GUID> # the global Id of the resource
`Note` resource types are defined [here](../Core/Const.m.html)
`Note` You can check whether this is an assign "in" or "out" by comparing the epId with your current
agent id, using the `IOT.Client.agent_id` property. If it's the same it's a reassign to you.
`Example`
#!python
def reassigned_callback(args):
print(args)
...
client.register_callback_reassigned(reassigned_callback)
This would print out something like the following on assignment of an R_ENTITY to
#!python
OrderedDict([(u'lid', u'moved_thing'),
(u'r', 1),
(u'epId', u'5a8d603ee757133d66d99875d0584c72'),
(u'id', u'4448993b44738411de5fe2a6cf32d957')]) | entailment |
def register_callback_subscribed(self, callback):
"""
Register a callback for new subscription. This gets called whenever one of *your* things subscribes to something
else.
`Note` it is not called when whenever something else subscribes to your thing.
The payload passed to your callback is either a
[RemoteControl](RemotePoint.m.html#IoticAgent.IOT.RemotePoint.RemoteControl) or
[RemoteFeed](RemotePoint.m.html#IoticAgent.IOT.RemotePoint.RemoteFeed) instance.
"""
return self.__client.register_callback_created(partial(self.__callback_subscribed_filter, callback),
serialised=False) | Register a callback for new subscription. This gets called whenever one of *your* things subscribes to something
else.
`Note` it is not called when whenever something else subscribes to your thing.
The payload passed to your callback is either a
[RemoteControl](RemotePoint.m.html#IoticAgent.IOT.RemotePoint.RemoteControl) or
[RemoteFeed](RemotePoint.m.html#IoticAgent.IOT.RemotePoint.RemoteFeed) instance. | entailment |
def simulate_feeddata(self, feedid, data, mime=None, time=None):
"""Simulate the last feeddata received for given feedid
Calls the registered callback for the feed with the last recieved feed data. Allows you to test your code
without having to wait for the remote thing to share again.
`feedid` (required) (string) local id of your Feed
`data` (optional) (as applicable) The data you want to use to simulate the arrival of remote feed data
`mime` (optional) (string) The mime type of your data. See also:
[share()](./Point.m.html#IoticAgent.IOT.Point.Feed.share)
`time` (optional) (datetime) UTC timestamp for share. See also:
[share()](./Point.m.html#IoticAgent.IOT.Point.Feed.share)
"""
self.__client.simulate_feeddata(feedid, data, mime, time) | Simulate the last feeddata received for given feedid
Calls the registered callback for the feed with the last recieved feed data. Allows you to test your code
without having to wait for the remote thing to share again.
`feedid` (required) (string) local id of your Feed
`data` (optional) (as applicable) The data you want to use to simulate the arrival of remote feed data
`mime` (optional) (string) The mime type of your data. See also:
[share()](./Point.m.html#IoticAgent.IOT.Point.Feed.share)
`time` (optional) (datetime) UTC timestamp for share. See also:
[share()](./Point.m.html#IoticAgent.IOT.Point.Feed.share) | entailment |
def confirm_tell(self, data, success):
"""Confirm that you've done as you were told. Call this from your control callback to confirm action.
Used when you are advertising a control and you want to tell the remote requestor that you have
done what they asked you to.
`Example:` this is a minimal example to show the idea. Note - no Exception handling and ugly use of globals
#!python
client = None
def controlreq_cb(args):
global client # the client object you connected with
# perform your action with the data they sent
success = do_control_action(args['data'])
if args['confirm']: # you've been asked to confirm
client.confirm_tell(args, success)
# else, if you do not confirm_tell() this causes a timeout at the requestor's end.
client = IOT.Client(config='test.ini')
thing = client.create_thing('test321')
control = thing.create_control('control', controlreq_cb)
Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException)
containing the error if the infrastructure detects a problem
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure
`data` (mandatory) (dictionary) The `"args"` dictionary that your callback was called with
`success` (mandatory) (boolean) Whether or not the action you have been asked to do has been
sucessful.
More details on the contents of the `data` dictionary for controls see:
[create_control()](./Thing.m.html#IoticAgent.IOT.Thing.Thing.create_control)
"""
logger.info("confirm_tell(success=%s) [lid=\"%s\",pid=\"%s\"]", success, data[P_ENTITY_LID], data[P_LID])
evt = self._request_point_confirm_tell(R_CONTROL, data[P_ENTITY_LID], data[P_LID], success, data['requestId'])
self._wait_and_except_if_failed(evt) | Confirm that you've done as you were told. Call this from your control callback to confirm action.
Used when you are advertising a control and you want to tell the remote requestor that you have
done what they asked you to.
`Example:` this is a minimal example to show the idea. Note - no Exception handling and ugly use of globals
#!python
client = None
def controlreq_cb(args):
global client # the client object you connected with
# perform your action with the data they sent
success = do_control_action(args['data'])
if args['confirm']: # you've been asked to confirm
client.confirm_tell(args, success)
# else, if you do not confirm_tell() this causes a timeout at the requestor's end.
client = IOT.Client(config='test.ini')
thing = client.create_thing('test321')
control = thing.create_control('control', controlreq_cb)
Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException)
containing the error if the infrastructure detects a problem
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure
`data` (mandatory) (dictionary) The `"args"` dictionary that your callback was called with
`success` (mandatory) (boolean) Whether or not the action you have been asked to do has been
sucessful.
More details on the contents of the `data` dictionary for controls see:
[create_control()](./Thing.m.html#IoticAgent.IOT.Thing.Thing.create_control) | entailment |
def save_config(self):
"""Save the config, update the seqnum & default language
"""
self.__config.set('agent', 'seqnum', self.__client.get_seqnum())
self.__config.set('agent', 'lang', self.__client.default_lang)
self.__config.save() | Save the config, update the seqnum & default language | entailment |
def _get_point_data_handler_for(self, point):
"""Used by point instances and data callbacks"""
with self.__point_data_handlers:
try:
return self.__point_data_handlers[point]
except KeyError:
return self.__point_data_handlers.setdefault(point, PointDataObjectHandler(point, self)) | Used by point instances and data callbacks | entailment |
def _parsed_callback_wrapper(self, callback_parsed, callback_plain, foc, data):
"""Used to by register_catchall_*data() and Thing class (follow, create_point) to present point data as an
object."""
# used by PointDataObjectHandler as reference
if foc == R_FEED:
point_ref = data['pid']
else: # R_CONTROL
point_ref = Control(self, data[P_ENTITY_LID], data[P_LID], '0' * 32)
try:
data['parsed'] = self._get_point_data_handler_for(point_ref).get_template(data=data[P_DATA])
except RefreshException:
# No metadata available, do not produce warning
if callback_plain:
callback_plain(data)
except:
logger.warning('Failed to parse %s data for %s%s', foc_to_str(foc), point_ref,
'' if callback_plain else ', ignoring',
exc_info=DEBUG_ENABLED)
if callback_plain:
callback_plain(data)
else:
callback_parsed(data) | Used to by register_catchall_*data() and Thing class (follow, create_point) to present point data as an
object. | entailment |
def _wait_and_except_if_failed(self, event, timeout=None):
"""Combines waiting for event and call to `_except_if_failed`. If timeout is not specified the configured
sync_timeout is used.
"""
event.wait(timeout or self.__sync_timeout)
self._except_if_failed(event) | Combines waiting for event and call to `_except_if_failed`. If timeout is not specified the configured
sync_timeout is used. | entailment |
def _except_if_failed(cls, event):
"""Raises an IOTException from the given event if it was not successful. Assumes timeout success flag on event
has not been set yet."""
if event.success is None:
raise IOTSyncTimeout('Requested timed out', event)
if not event.success:
msg = "Request failed, unknown error"
if isinstance(event.payload, Mapping):
if P_MESSAGE in event.payload:
msg = event.payload[P_MESSAGE]
try:
exc_class = cls.__exception_mapping[event.payload[P_CODE]]
except KeyError:
pass
else:
raise exc_class(msg, event)
raise IOTException(msg, event) | Raises an IOTException from the given event if it was not successful. Assumes timeout success flag on event
has not been set yet. | entailment |
def list(self, all_my_agents=False, limit=500, offset=0):
"""List `all` the things created by this client on this or all your agents
Returns QAPI list function payload
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure
`all_my_agents` (optional) (boolean) If `False` limit search to just this agent,
if `True` return list of things belonging to all agents you own.
`limit` (optional) (integer) Return this many Point details
`offset` (optional) (integer) Return Point details starting at this offset
"""
logger.info("list(all_my_agents=%s, limit=%s, offset=%s)", all_my_agents, limit, offset)
if all_my_agents:
evt = self._request_entity_list_all(limit=limit, offset=offset)
else:
evt = self._request_entity_list(limit=limit, offset=offset)
self._wait_and_except_if_failed(evt)
return evt.payload['entities'] | List `all` the things created by this client on this or all your agents
Returns QAPI list function payload
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure
`all_my_agents` (optional) (boolean) If `False` limit search to just this agent,
if `True` return list of things belonging to all agents you own.
`limit` (optional) (integer) Return this many Point details
`offset` (optional) (integer) Return Point details starting at this offset | entailment |
def get_thing(self, lid):
"""Get the details of a newly created Thing. This only applies to asynchronous creation of Things and the
new Thing instance can only be retrieved once.
Returns a [Thing](Thing.m.html#IoticAgent.IOT.Thing.Thing) object,
which corresponds to the Thing with the given local id (nickname)
Raises `KeyError` if the Thing has not been newly created (or has already been retrieved by a previous call)
`lid` (required) (string) local identifier of your Thing.
"""
with self.__new_things:
try:
return self.__new_things.pop(lid)
except KeyError as ex:
raise_from(KeyError('Thing %s not know as new' % lid), ex) | Get the details of a newly created Thing. This only applies to asynchronous creation of Things and the
new Thing instance can only be retrieved once.
Returns a [Thing](Thing.m.html#IoticAgent.IOT.Thing.Thing) object,
which corresponds to the Thing with the given local id (nickname)
Raises `KeyError` if the Thing has not been newly created (or has already been retrieved by a previous call)
`lid` (required) (string) local identifier of your Thing. | entailment |
def create_thing(self, lid):
"""Create a new Thing with a local id (lid).
Returns a [Thing](Thing.m.html#IoticAgent.IOT.Thing.Thing) object if successful
or if the Thing already exists
Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException)
containing the error if the infrastructure detects a problem
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure
`lid` (required) (string) local identifier of your Thing. The local id is your name or nickname for the thing.
It's "local" in that it's only available to you on this container, not searchable and not visible to others.
"""
evt = self.create_thing_async(lid)
self._wait_and_except_if_failed(evt)
try:
with self.__new_things:
return self.__new_things.pop(lid)
except KeyError as ex:
raise raise_from(IOTClientError('Thing %s not in cache (post-create)' % lid), ex) | Create a new Thing with a local id (lid).
Returns a [Thing](Thing.m.html#IoticAgent.IOT.Thing.Thing) object if successful
or if the Thing already exists
Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException)
containing the error if the infrastructure detects a problem
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure
`lid` (required) (string) local identifier of your Thing. The local id is your name or nickname for the thing.
It's "local" in that it's only available to you on this container, not searchable and not visible to others. | entailment |
def delete_thing(self, lid):
"""Delete a Thing
Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException)
containing the error if the infrastructure detects a problem
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure
`lid` (required) (string) local identifier of the Thing you want to delete
"""
logger.info("delete_thing(lid=\"%s\")", lid)
evt = self.delete_thing_async(lid)
self._wait_and_except_if_failed(evt) | Delete a Thing
Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException)
containing the error if the infrastructure detects a problem
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure
`lid` (required) (string) local identifier of the Thing you want to delete | entailment |
def search(self, text=None, lang=None, location=None, unit=None, limit=50, offset=0, reduced=False, local=None,
scope=SearchScope.PUBLIC):
"""Search the Iotic Space for public Things with metadata matching the search parameters:
text, lang(uage), location, unit, limit, offset. Note that only things which have at least one point defined can
be found.
Returns dict of results as below (first with reduced=False, second with reduced=True)- OR -
#!python
# reduced=False returns dict similar to below
{
"2b2d8b068e404861b19f9e060877e002": {
"long": -1.74803,
"matches": 3.500,
"lat": 52.4539,
"label": "Weather Station #2",
"owner": "3bbf307b43b1460289fe707619dece3d",
"points": {
"a300cc90147f4e2990195639de0af201": {
"matches": 3.000,
"label": "Feed 201",
"type": "Feed",
"storesRecent": true
},
"a300cc90147f4e2990195639de0af202": {
"matches": 1.500,
"label": "Feed 202",
"type": "Feed",
"storesRecent": false
}
}
},
"76a3b24b02d34f20b675257624b0e001": {
"long": 0.716356,
"matches": 2.000,
"lat": 52.244384,
"label": "Weather Station #1",
"owner": "3bbf307b43b1460289fe707619dece3d",
"points": {
"fb1a4a4dbb2642ab9f836892da93f101": {
"matches": 1.000,
"label": "My weather feed",
"type": "Feed",
"storesRecent": false
},
"fb1a4a4dbb2642ab9f836892da93c102": {
"matches": 1.000,
"label": None,
"type": "Control",
"storesRecent": false
}
}
}
}
# reduced=True returns dict similar to below
{
"2b2d8b068e404861b19f9e060877e002": {
"a300cc90147f4e2990195639de0af201": "Feed",
"a300cc90147f4e2990195639de0af202": "Feed"
},
"76a3b24b02d34f20b675257624b0e001": {
"fb1a4a4dbb2642ab9f836892da93f101": "Feed",
"fb1a4a4dbb2642ab9f836892da93f102": "Control"
}
}
Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException)
containing the error if the infrastructure detects a problem
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure
`text` (optional) (string) The text to search for. Label and description will be searched
for both Thing and Point and each word will be used as a tag search too. Text search is case-insensitive. Tag
search is language neutral.
`lang` (optional) (string) The two-character ISO 639-1 language code to search in, e.g. "en" "fr"
Language is used to limit search to only labels and descriptions in that language. You will only get labels `in
that language` back from search and then only if there are any in that language.
`location` (optional) (dictionary) Latitude, longitude and radius to search within.
All values are float, Radius is in kilometers (km). E.g. `{"lat"=1.2345, "long"=54.321, "radius"=6.789}`. Note:
If `text` has not been specified, radius can at most be 25km.
`unit` (optional) (string) Valid URL of a unit in an ontology. Or use a constant from the
[units](../Units.m.html#IoticAgent.Units) class - such as [METRE](../Units.m.html#IoticAgent.Units.METRE).
`limit` (optional) (integer) Return this many search results.
`offset` (optional) (integer) Return results starting at this offset - good for paging.
`reduced` (optional) (boolean) If `true`, return the reduced results just containing points and
their type.
`local` (optional) (boolean) **Deprecated**, use `scope` instead. If `true`, perform search at container level.
Check the local_meta flag to determine whether local metadata functionality is available. (Takes precedence over
`scope`.)
`scope` (optional) ([SearchScope](../Core/Const.m.html#IoticAgent.Core.Const.SearchScope)) Whether to perform
PUBLIC, LOCAL (container level) or LOCAL_OWN (container level restricted to own things) search. Check the
[local_meta](#IoticAgent.IOT.Client.Client.local_meta) flag to determine whether local metadata functionality is
available. (Note that PUBLIC and LOCAL_OWN scopes are always available.)
"""
logger.info("search(text=\"%s\", lang=\"%s\", location=\"%s\", unit=\"%s\", limit=%s, offset=%s, reduced=%s)",
text, lang, location, unit, limit, offset, reduced)
evt = self._request_search(text, lang, location, unit, limit, offset,
SearchType.REDUCED if reduced else SearchType.FULL, local, scope)
self._wait_and_except_if_failed(evt)
return evt.payload['result'] | Search the Iotic Space for public Things with metadata matching the search parameters:
text, lang(uage), location, unit, limit, offset. Note that only things which have at least one point defined can
be found.
Returns dict of results as below (first with reduced=False, second with reduced=True)- OR -
#!python
# reduced=False returns dict similar to below
{
"2b2d8b068e404861b19f9e060877e002": {
"long": -1.74803,
"matches": 3.500,
"lat": 52.4539,
"label": "Weather Station #2",
"owner": "3bbf307b43b1460289fe707619dece3d",
"points": {
"a300cc90147f4e2990195639de0af201": {
"matches": 3.000,
"label": "Feed 201",
"type": "Feed",
"storesRecent": true
},
"a300cc90147f4e2990195639de0af202": {
"matches": 1.500,
"label": "Feed 202",
"type": "Feed",
"storesRecent": false
}
}
},
"76a3b24b02d34f20b675257624b0e001": {
"long": 0.716356,
"matches": 2.000,
"lat": 52.244384,
"label": "Weather Station #1",
"owner": "3bbf307b43b1460289fe707619dece3d",
"points": {
"fb1a4a4dbb2642ab9f836892da93f101": {
"matches": 1.000,
"label": "My weather feed",
"type": "Feed",
"storesRecent": false
},
"fb1a4a4dbb2642ab9f836892da93c102": {
"matches": 1.000,
"label": None,
"type": "Control",
"storesRecent": false
}
}
}
}
# reduced=True returns dict similar to below
{
"2b2d8b068e404861b19f9e060877e002": {
"a300cc90147f4e2990195639de0af201": "Feed",
"a300cc90147f4e2990195639de0af202": "Feed"
},
"76a3b24b02d34f20b675257624b0e001": {
"fb1a4a4dbb2642ab9f836892da93f101": "Feed",
"fb1a4a4dbb2642ab9f836892da93f102": "Control"
}
}
Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException)
containing the error if the infrastructure detects a problem
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure
`text` (optional) (string) The text to search for. Label and description will be searched
for both Thing and Point and each word will be used as a tag search too. Text search is case-insensitive. Tag
search is language neutral.
`lang` (optional) (string) The two-character ISO 639-1 language code to search in, e.g. "en" "fr"
Language is used to limit search to only labels and descriptions in that language. You will only get labels `in
that language` back from search and then only if there are any in that language.
`location` (optional) (dictionary) Latitude, longitude and radius to search within.
All values are float, Radius is in kilometers (km). E.g. `{"lat"=1.2345, "long"=54.321, "radius"=6.789}`. Note:
If `text` has not been specified, radius can at most be 25km.
`unit` (optional) (string) Valid URL of a unit in an ontology. Or use a constant from the
[units](../Units.m.html#IoticAgent.Units) class - such as [METRE](../Units.m.html#IoticAgent.Units.METRE).
`limit` (optional) (integer) Return this many search results.
`offset` (optional) (integer) Return results starting at this offset - good for paging.
`reduced` (optional) (boolean) If `true`, return the reduced results just containing points and
their type.
`local` (optional) (boolean) **Deprecated**, use `scope` instead. If `true`, perform search at container level.
Check the local_meta flag to determine whether local metadata functionality is available. (Takes precedence over
`scope`.)
`scope` (optional) ([SearchScope](../Core/Const.m.html#IoticAgent.Core.Const.SearchScope)) Whether to perform
PUBLIC, LOCAL (container level) or LOCAL_OWN (container level restricted to own things) search. Check the
[local_meta](#IoticAgent.IOT.Client.Client.local_meta) flag to determine whether local metadata functionality is
available. (Note that PUBLIC and LOCAL_OWN scopes are always available.) | entailment |
def search_reduced(self, text=None, lang=None, location=None, unit=None, limit=100, offset=0, local=None,
scope=SearchScope.PUBLIC):
"""Shorthand for [search()](#IoticAgent.IOT.Client.Client.search) with `reduced=True`"""
return self.search(text, lang, location, unit, limit, offset, reduced=True, local=local, scope=scope) | Shorthand for [search()](#IoticAgent.IOT.Client.Client.search) with `reduced=True` | entailment |
def search_located(self, text=None, lang=None, location=None, unit=None, limit=100, offset=0, local=None,
scope=SearchScope.PUBLIC):
"""See [search()](#IoticAgent.IOT.Client.Client.search) for general documentation. Provides a thing-only
result set comprised only of things which have a location set, e.g.:
#!python
{
# Keyed by thing id
'2b2d8b068e404861b19f9e060877e002':
# location (g, lat & long), label (l, optional)
{'g': (52.4539, -1.74803), 'l': 'Weather Station #2'},
'76a3b24b02d34f20b675257624b0e001':
{'g': (52.244384, 0.716356), 'l': None},
'76a3b24b02d34f20b675257624b0e004':
{'g': (52.245384, 0.717356), 'l': 'Gasometer'},
'76a3b24b02d34f20b675257624b0e005':
{'g': (52.245384, 0.717356), 'l': 'Zepellin'}
}
"""
logger.info("search_located(text=\"%s\", lang=\"%s\", location=\"%s\", unit=\"%s\", limit=%s, offset=%s)",
text, lang, location, unit, limit, offset)
evt = self._request_search(text, lang, location, unit, limit, offset, SearchType.LOCATED, local, scope)
self._wait_and_except_if_failed(evt)
return evt.payload['result'] | See [search()](#IoticAgent.IOT.Client.Client.search) for general documentation. Provides a thing-only
result set comprised only of things which have a location set, e.g.:
#!python
{
# Keyed by thing id
'2b2d8b068e404861b19f9e060877e002':
# location (g, lat & long), label (l, optional)
{'g': (52.4539, -1.74803), 'l': 'Weather Station #2'},
'76a3b24b02d34f20b675257624b0e001':
{'g': (52.244384, 0.716356), 'l': None},
'76a3b24b02d34f20b675257624b0e004':
{'g': (52.245384, 0.717356), 'l': 'Gasometer'},
'76a3b24b02d34f20b675257624b0e005':
{'g': (52.245384, 0.717356), 'l': 'Zepellin'}
} | entailment |
def describe(self, guid_or_resource, lang=None, local=None, scope=DescribeScope.AUTO):
"""Describe returns the public (or local) description of a Thing or Point
Returns the description dict (see below for Thing example) if available, otherwise `None`
#!python
{
"type": "Entity",
"meta": {
"long": 0.716356,
"lat": 52.244384,
"label": "Weather Station #1",
"parent": "3bbf307b43b1460289fe707619dece3d",
"points": [
{
"type": "Control",
"label": "Control 101",
"guid": "fb1a4a4dbb2642ab9f836892da93c101",
"storesRecent": false
},
{
"type": "Feed",
"label": "My weather feed",
"guid": "fb1a4a4dbb2642ab9f836892da93f101",
"storesRecent": true
}
],
"comment": "A lovely weather station...",
"tags": [
"blue",
"garden"
]
}
}
Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException)
containing the error if the infrastructure detects a problem
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure
`guid_or_resource` (mandatory) (string or object).
If a `string`, it should contain the globally unique id of the resource you want to describe in 8-4-4-4-12
(or undashed) format.
If an `object`, it should be an instance of Thing, Point, RemoteFeed or RemoteControl. The system will return
you the description of that object.
`lang` (optional) (string) The two-character ISO 639-1 language code for which labels and comments will be
returned. This does not affect Values (i.e. when describing a Point, apart from value comments) and tags as
these are language neutral).
`local` (optional) (boolean) **Deprecated**, use `scope` instead. If `true`, lookup metadata at container level.
Check the local_meta flag to determine whether local metadata functionality is available. (Takes precedence over
`scope`.)
`scope` (optional) ([DescribeScope](../Core/Const.m.html#IoticAgent.Core.Const.DescribeScope)) Whether to
perform PUBLIC, LOCAL (container level) or LOCAL_OWN (container level restricted to own things) metadata lookup.
Check the [local_meta](#IoticAgent.IOT.Client.Client.local_meta) flag to determine whether local metadata
functionality is available. (Note that AUTO, PUBLIC and LOCAL_OWN scopes are always available.). AUTO mode
first attempts to look up private metadata, then public.
"""
if isinstance(guid_or_resource, self.__guid_resources):
guid = guid_or_resource.guid
elif isinstance(guid_or_resource, string_types):
guid = uuid_to_hex(guid_or_resource)
else:
raise ValueError("describe requires guid string or Thing, Point, RemoteFeed or RemoteControl instance")
logger.info('describe() [guid="%s"]', guid)
evt = self._request_describe(guid, lang, local, scope)
self._wait_and_except_if_failed(evt)
return evt.payload['result'] | Describe returns the public (or local) description of a Thing or Point
Returns the description dict (see below for Thing example) if available, otherwise `None`
#!python
{
"type": "Entity",
"meta": {
"long": 0.716356,
"lat": 52.244384,
"label": "Weather Station #1",
"parent": "3bbf307b43b1460289fe707619dece3d",
"points": [
{
"type": "Control",
"label": "Control 101",
"guid": "fb1a4a4dbb2642ab9f836892da93c101",
"storesRecent": false
},
{
"type": "Feed",
"label": "My weather feed",
"guid": "fb1a4a4dbb2642ab9f836892da93f101",
"storesRecent": true
}
],
"comment": "A lovely weather station...",
"tags": [
"blue",
"garden"
]
}
}
Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException)
containing the error if the infrastructure detects a problem
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure
`guid_or_resource` (mandatory) (string or object).
If a `string`, it should contain the globally unique id of the resource you want to describe in 8-4-4-4-12
(or undashed) format.
If an `object`, it should be an instance of Thing, Point, RemoteFeed or RemoteControl. The system will return
you the description of that object.
`lang` (optional) (string) The two-character ISO 639-1 language code for which labels and comments will be
returned. This does not affect Values (i.e. when describing a Point, apart from value comments) and tags as
these are language neutral).
`local` (optional) (boolean) **Deprecated**, use `scope` instead. If `true`, lookup metadata at container level.
Check the local_meta flag to determine whether local metadata functionality is available. (Takes precedence over
`scope`.)
`scope` (optional) ([DescribeScope](../Core/Const.m.html#IoticAgent.Core.Const.DescribeScope)) Whether to
perform PUBLIC, LOCAL (container level) or LOCAL_OWN (container level restricted to own things) metadata lookup.
Check the [local_meta](#IoticAgent.IOT.Client.Client.local_meta) flag to determine whether local metadata
functionality is available. (Note that AUTO, PUBLIC and LOCAL_OWN scopes are always available.). AUTO mode
first attempts to look up private metadata, then public. | entailment |
def from_post_request(post_request):
'''
Convenience method for creating a new OutcomeRequest from a request
object.
post_request is assumed to be a Django HttpRequest object
'''
request = OutcomeRequest()
request.post_request = post_request
request.process_xml(post_request.data)
return request | Convenience method for creating a new OutcomeRequest from a request
object.
post_request is assumed to be a Django HttpRequest object | entailment |
def post_replace_result(self, score, result_data=None):
'''
POSTs the given score to the Tool Consumer with a replaceResult.
OPTIONAL:
result_data must be a dictionary
Note: ONLY ONE of these values can be in the dict at a time,
due to the Canvas specification.
'text' : str text
'url' : str url
'''
self.operation = REPLACE_REQUEST
self.score = score
self.result_data = result_data
if result_data is not None:
if len(result_data) > 1:
error_msg = ('Dictionary result_data can only have one entry. '
'{0} entries were found.'.format(len(result_data)))
raise InvalidLTIConfigError(error_msg)
elif 'text' not in result_data and 'url' not in result_data:
error_msg = ('Dictionary result_data can only have the key '
'"text" or the key "url".')
raise InvalidLTIConfigError(error_msg)
else:
return self.post_outcome_request()
else:
return self.post_outcome_request() | POSTs the given score to the Tool Consumer with a replaceResult.
OPTIONAL:
result_data must be a dictionary
Note: ONLY ONE of these values can be in the dict at a time,
due to the Canvas specification.
'text' : str text
'url' : str url | entailment |
def post_outcome_request(self):
'''
POST an OAuth signed request to the Tool Consumer.
'''
if not self.has_required_attributes():
raise InvalidLTIConfigError(
'OutcomeRequest does not have all required attributes')
consumer = oauth2.Consumer(key=self.consumer_key,
secret=self.consumer_secret)
client = oauth2.Client(consumer)
# monkey_patch_headers ensures that Authorization
# header is NOT lower cased
monkey_patch_headers = True
monkey_patch_function = None
if monkey_patch_headers:
import httplib2
http = httplib2.Http
normalize = http._normalize_headers
def my_normalize(self, headers):
print("My Normalize", headers)
ret = normalize(self, headers)
if 'authorization' in ret:
ret['Authorization'] = ret.pop('authorization')
print("My Normalize", ret)
return ret
http._normalize_headers = my_normalize
monkey_patch_function = normalize
response, content = client.request(
self.lis_outcome_service_url,
'POST',
body=self.generate_request_xml(),
headers={'Content-Type': 'application/xml'})
if monkey_patch_headers and monkey_patch_function:
import httplib2
http = httplib2.Http
http._normalize_headers = monkey_patch_function
self.outcome_response = OutcomeResponse.from_post_response(response,
content)
return self.outcome_response | POST an OAuth signed request to the Tool Consumer. | entailment |
def process_xml(self, xml):
'''
Parse Outcome Request data from XML.
'''
root = objectify.fromstring(xml)
self.message_identifier = str(
root.imsx_POXHeader.imsx_POXRequestHeaderInfo.
imsx_messageIdentifier)
try:
result = root.imsx_POXBody.replaceResultRequest
self.operation = REPLACE_REQUEST
# Get result sourced id from resultRecord
self.lis_result_sourcedid = result.resultRecord.\
sourcedGUID.sourcedId
self.score = str(result.resultRecord.result.
resultScore.textString)
except:
pass
try:
result = root.imsx_POXBody.deleteResultRequest
self.operation = DELETE_REQUEST
# Get result sourced id from resultRecord
self.lis_result_sourcedid = result.resultRecord.\
sourcedGUID.sourcedId
except:
pass
try:
result = root.imsx_POXBody.readResultRequest
self.operation = READ_REQUEST
# Get result sourced id from resultRecord
self.lis_result_sourcedid = result.resultRecord.\
sourcedGUID.sourcedId
except:
pass | Parse Outcome Request data from XML. | entailment |
def _setup_transport(self):
"""Wrap the socket in an SSL object."""
if hasattr(self, 'sslopts'):
self.sock = ssl.wrap_socket(self.sock, **self.sslopts)
elif hasattr(self, 'sslctx'):
self.sock = self.sslctx.wrap_socket(self.sock,
server_hostname=self.hostname)
else:
self.sock = ssl.wrap_socket(self.sock)
self.sock.do_handshake()
self._quick_recv = self.sock.read | Wrap the socket in an SSL object. | entailment |
def _shutdown_transport(self):
"""Unwrap a Python 2.6 SSL socket, so we can call shutdown()"""
if self.sock is not None:
try:
unwrap = self.sock.unwrap
except AttributeError:
return
try:
self.sock = unwrap()
except ValueError:
# Failure within SSL might mean unwrap exists but socket is not
# deemed wrapped
pass | Unwrap a Python 2.6 SSL socket, so we can call shutdown() | entailment |
def _file_loc(self):
"""_file_loc helper returns a possible config filename.
EG /tmp/stuff/fish.py -> /tmp/stuff/fish.ini
"""
if self.__fname is None:
f = os.path.splitext(os.path.basename(argv[0]))[0] + '.ini'
cwd = os.getcwd()
# todo: prefer script path or current path ??
# print(os.path.realpath(sys.argv[0]))
# todo: if os.path.exists(os.path.join(cwd, main.__file__)):
return os.path.join(cwd, f)
return self.__fname | _file_loc helper returns a possible config filename.
EG /tmp/stuff/fish.py -> /tmp/stuff/fish.ini | entailment |
def setup_logging(self):
"""Setup logging module based on known modules in the config file
"""
logging.getLogger('amqp').setLevel(str_to_logging(self.get('logging', 'amqp')))
logging.getLogger('rdflib').setLevel(str_to_logging(self.get('logging', 'rdflib'))) | Setup logging module based on known modules in the config file | entailment |
def save(self, filename=None):
"""Write config to file."""
if self.__fname is None and filename is None:
raise ValueError('Config loaded from string, no filename specified')
conf = self.__config
cpa = dict_to_cp(conf)
with open(self.__fname if filename is None else filename, 'w') as f:
cpa.write(f) | Write config to file. | entailment |
def get(self, section, val):
"""Get a setting or the default
`Returns` The current value of the setting `val` or the default, or `None` if not found
`section` (mandatory) (string) the section name in the config E.g. `"agent"`
`val` (mandatory) (string) the section name in the config E.g. `"host"`
"""
val = val.lower()
if section in self.__config:
if val in self.__config[section]:
# logger.debug('get config %s %s = %s', section, val, self.__config[section][val])
return self.__config[section][val]
if section in self.__defaults:
if val in self.__defaults[section]:
# logger.debug('get defaults %s %s = %s', section, val, self.__defaults[section][val])
return self.__defaults[section][val]
return None | Get a setting or the default
`Returns` The current value of the setting `val` or the default, or `None` if not found
`section` (mandatory) (string) the section name in the config E.g. `"agent"`
`val` (mandatory) (string) the section name in the config E.g. `"host"` | entailment |
def set(self, section, val, data):
"""Add a setting to the config
`section` (mandatory) (string) the section name in the config E.g. `"agent"`
`val` (mandatory) (string) the section name in the config E.g. `"host"`
`data` (mandatory) (as appropriate) the new value for the `val`
"""
val = val.lower()
if section in self.__config:
# logger.debug('set %s %s = %s', section, val, data)
self.__config[section][val] = data | Add a setting to the config
`section` (mandatory) (string) the section name in the config E.g. `"agent"`
`val` (mandatory) (string) the section name in the config E.g. `"host"`
`data` (mandatory) (as appropriate) the new value for the `val` | entailment |
def update(self, section, val, data):
"""Add a setting to the config, but if same as default or None then no action.
This saves the .save writing the defaults
`section` (mandatory) (string) the section name in the config E.g. `"agent"`
`val` (mandatory) (string) the section name in the config E.g. `"host"`
`data` (mandatory) (as appropriate) the new value for the `val`
"""
k = self.get(section, val)
# logger.debug('update %s %s from: %s to: %s', section, val, k, data)
if data is not None and k != data:
self.set(section, val, data) | Add a setting to the config, but if same as default or None then no action.
This saves the .save writing the defaults
`section` (mandatory) (string) the section name in the config E.g. `"agent"`
`val` (mandatory) (string) the section name in the config E.g. `"host"`
`data` (mandatory) (as appropriate) the new value for the `val` | entailment |
def get_location(self):
"""Gets the current geo location of your Thing
Returns tuple of `(lat, lon)` in `float` or `(None, None)` if location is not set for this Thing
"""
lat = None
lon = None
# note: always picks from first triple
for _, _, o in self._graph.triples((None, GEO_NS.lat, None)):
lat = float(o)
break
for _, _, o in self._graph.triples((None, GEO_NS.long, None)):
lon = float(o)
break
return lat, lon | Gets the current geo location of your Thing
Returns tuple of `(lat, lon)` in `float` or `(None, None)` if location is not set for this Thing | entailment |
def delete_location(self):
"""Deletes all the `geo:lat` and `geo:long` metadata properties on your Thing
"""
# normally this should only remove one triple each
for s, p, o in self._graph.triples((None, GEO_NS.lat, None)):
self._graph.remove((s, p, o))
for s, p, o in self._graph.triples((None, GEO_NS.long, None)):
self._graph.remove((s, p, o)) | Deletes all the `geo:lat` and `geo:long` metadata properties on your Thing | entailment |
def start(self):
"""start connection threads, blocks until started
"""
if not (self.__recv_thread or self.__send_thread):
self.__end.clear()
self.__send_ready.clear()
self.__recv_ready.clear()
timeout = self.__socket_timeout + 1
ignore_exc = self.__startup_ignore_exc
self.__send_exc_clear()
self.__recv_exc_clear()
# start & await send thread success (unless timeout reached or an exception has occured)
self.__send_thread = Thread(target=self.__send_run, name='amqplink_send')
self.__send_thread.start()
start_time = monotonic()
success = False
while not (success or (not ignore_exc and self.__send_exc) or monotonic() - start_time > timeout):
success = self.__send_ready.wait(.25)
if success:
# start & await receiver thread success
self.__recv_thread = Thread(target=self.__recv_run, name='amqplink_recv')
self.__recv_thread.start()
start_time = monotonic()
success = False
while not (success or (not ignore_exc and self.__recv_exc) or monotonic() - start_time >= timeout):
success = self.__recv_ready.wait(.25)
# handler either thread's failure
if not success:
logger.warning("AmqpLink Failed to start. Giving up.")
self.stop()
if self.__recv_exc:
# prioritise receive thread since this can get access-denied whereas send does not (until sending)
raise_from(LinkException('Receive thread failure'), self.__recv_exc)
elif self.__send_exc:
raise_from(LinkException('Send thread failure'), self.__send_exc)
else:
raise LinkException('Unknown link failure (timeout reached)')
else:
raise LinkException('amqplink already started') | start connection threads, blocks until started | entailment |
def is_alive(self):
"""Helper function to show if send & recv Threads are running
"""
if self.__send_ready.is_set() and self.__recv_ready.is_set():
if self.__send_thread is not None and self.__recv_thread is not None:
return self.__send_thread.is_alive() and self.__recv_thread.is_alive()
return False | Helper function to show if send & recv Threads are running | entailment |
def stop(self):
"""disconnect, blocks until stopped
"""
self.__end.set()
if self.__recv_thread:
self.__recv_thread.join()
self.__recv_thread = None
if self.__send_thread:
self.__send_thread.join()
self.__send_thread = None | disconnect, blocks until stopped | entailment |
def send(self, body, content_type='application/ubjson', timeout=5):
"""timeout indicates amount of time to wait for sending thread to be ready. set to larger than zero to wait
(in seconds, fractional) or None to block.
"""
if self.__send_ready.wait(timeout):
try:
with self.__send_lock:
# access denied response might be received inside send thread rather than here how to best handle?
self.__send_channel.basic_publish(msg=Message(body, delivery_mode=2, content_type=content_type),
exchange=self.__epid)
except exceptions.AccessRefused as exc:
raise_from(LinkException('Access denied'), exc)
except (exceptions.AMQPError, SocketError) as exc:
raise_from(LinkException('amqp/transport failure'), exc)
except Exception as exc: # pylint: disable=broad-except
raise_from(LinkException('unexpected failure'), exc)
else:
exc = self.__send_exc
if exc:
raise_from(LinkException('Sender unavailable'), exc)
else:
raise LinkException('Sender unavailable (unknown error)') | timeout indicates amount of time to wait for sending thread to be ready. set to larger than zero to wait
(in seconds, fractional) or None to block. | entailment |
def __get_ssl_context(cls, sslca=None):
"""Make an SSLConext for this Python version using public or sslca
"""
if ((version_info[0] == 2 and (version_info[1] >= 7 and version_info[2] >= 5)) or
(version_info[0] == 3 and version_info[1] >= 4)):
logger.debug('SSL method for 2.7.5+ / 3.4+')
# pylint: disable=no-name-in-module
from ssl import SSLContext, PROTOCOL_TLSv1_2, CERT_REQUIRED, OP_NO_COMPRESSION
ctx = SSLContext(PROTOCOL_TLSv1_2)
ctx.set_ciphers('HIGH:!SSLv3:!TLSv1:!aNULL:@STRENGTH')
# see CRIME security exploit
ctx.options |= OP_NO_COMPRESSION
# the following options are used to verify the identity of the broker
if sslca:
ctx.load_verify_locations(sslca)
ctx.verify_mode = CERT_REQUIRED
ctx.check_hostname = False
else:
# Verify public certifcates if sslca is None (default)
from ssl import Purpose # pylint: disable=no-name-in-module
ctx.load_default_certs(purpose=Purpose.SERVER_AUTH)
ctx.verify_mode = CERT_REQUIRED
ctx.check_hostname = True
elif version_info[0] == 3 and version_info[1] < 4:
logger.debug('Using SSL method for 3.2+, < 3.4')
# pylint: disable=no-name-in-module
from ssl import SSLContext, CERT_REQUIRED, PROTOCOL_SSLv23, OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_TLSv1
ctx = SSLContext(PROTOCOL_SSLv23)
ctx.options |= (OP_NO_SSLv2 | OP_NO_SSLv3 | OP_NO_TLSv1)
ctx.set_ciphers('HIGH:!SSLv3:!TLSv1:!aNULL:@STRENGTH')
# the following options are used to verify the identity of the broker
if sslca:
ctx.load_verify_locations(sslca)
ctx.verify_mode = CERT_REQUIRED
else:
# Verify public certifcates if sslca is None (default)
ctx.set_default_verify_paths()
ctx.verify_mode = CERT_REQUIRED
else:
raise Exception("Unsupported Python version %s" % '.'.join(str(item) for item in version_info[:3]))
return ctx | Make an SSLConext for this Python version using public or sslca | entailment |
def __recv_cb(self, msg):
"""Calls user-provided callback and marks message for Ack regardless of success
"""
try:
self.__msg_callback(msg)
except:
logger.exception("AmqpLink.__recv_cb exception calling msg_callback")
finally:
# only works if all messages handled in series
self.__last_id = msg.delivery_tag
self.__unacked += 1 | Calls user-provided callback and marks message for Ack regardless of success | entailment |
def __recv_run(self): # pylint: disable=too-many-branches,too-many-statements
"""Main receive thread/loop
"""
while not self.__end.is_set():
self.__unacked = 0
self.__last_id = None
try:
self.__recv_ready.clear() # Ensure event is cleared for EG network failure/retry loop
with Connection(userid=self.__prefix + self.__epid,
password=self.__passwd,
virtual_host=self.__vhost,
heartbeat=self.__heartbeat,
connect_timeout=self.__socket_timeout,
operation_timeout=self.__socket_timeout,
ssl=self.__get_ssl_context(self.__sslca),
host=self.__host) as conn,\
conn.channel(auto_encode_decode=False) as channel_data,\
conn.channel() as channel_ka:
logger.debug('Connected, using cipher %s', conn.transport.sock.cipher()[0])
channel_data.basic_qos(prefetch_size=0, prefetch_count=self.__prefetch, a_global=False)
# exclusive=True. There can be only one (receiver)
msgtag = channel_data.basic_consume(queue=self.__epid, exclusive=True, callback=self.__recv_cb)
acktag = channel_ka.basic_consume(queue=('%s_ka' % self.__epid), exclusive=True, no_ack=True,
callback=self.__recv_ka_cb)
self.__ka_channel = channel_ka
self.__recv_exc_clear(log_if_exc_set='reconnected')
self.__recv_ready.set()
try:
#
# Drain loop
while not self.__end.is_set():
try:
while not self.__end.is_set() and self.__unacked < self.__ack_threshold:
# inner loop to handle all outstanding amqp messages
conn.drain_events(.1)
except SocketTimeout:
pass
# either have waited for .1s or threshold reached, so always ack
if self.__unacked:
logger.debug('acking (%d) up to %s', self.__unacked, self.__last_id)
channel_data.basic_ack(self.__last_id, multiple=True)
self.__unacked = 0
conn.heartbeat_tick()
finally:
self.__recv_ready.clear()
try:
channel_data.basic_cancel(msgtag)
channel_ka.basic_cancel(acktag)
except:
pass
except exceptions.AccessRefused:
self.__recv_log_set_exc_and_wait('Access Refused (Credentials already in use?)')
except exceptions.ConnectionForced:
self.__recv_log_set_exc_and_wait('Disconnected by broker (ConnectionForced)')
except SocketTimeout:
self.__recv_log_set_exc_and_wait('SocketTimeout exception. wrong credentials, vhost or prefix?')
except SSLError:
self.__recv_log_set_exc_and_wait('ssl.SSLError Bad Certificate?')
except (exceptions.AMQPError, SocketError):
self.__recv_log_set_exc_and_wait('amqp/transport failure, sleeping before retry')
except:
self.__recv_log_set_exc_and_wait('unexpected failure, exiting', wait_seconds=0)
break
logger.debug('finished') | Main receive thread/loop | entailment |
def __recv_log_set_exc_and_wait(self, msg, level=None, wait_seconds=CONN_RETRY_DELAY_SECONDS):
"""Equivalent to __send_log_set_exc_and_wait but for receiver thread"""
logger.log(
((logging.DEBUG if self.__recv_exc else logging.ERROR) if level is None else level),
msg,
exc_info=DEBUG_ENABLED
)
self.__recv_exc = exc_info()[1]
self.__end.wait(wait_seconds) | Equivalent to __send_log_set_exc_and_wait but for receiver thread | entailment |
def __recv_exc_clear(self, log_if_exc_set=None):
"""Equivalent to __send_exc_clear"""
if not (log_if_exc_set is None or self.__recv_exc is None):
logger.info(log_if_exc_set)
self.__recv_exc = None | Equivalent to __send_exc_clear | entailment |
def __send_run(self):
"""Send request thread
"""
while not self.__end.is_set():
try:
with Connection(userid=self.__prefix + self.__epid,
password=self.__passwd,
virtual_host=self.__vhost,
heartbeat=self.__heartbeat,
connect_timeout=self.__socket_timeout,
operation_timeout=self.__socket_timeout,
ssl=self.__get_ssl_context(self.__sslca),
host=self.__host) as conn,\
conn.channel(auto_encode_decode=False) as channel:
self.__send_channel = channel
self.__send_exc_clear(log_if_exc_set='reconnected')
self.__send_ready.set()
try:
self.__send_ready_callback(self.__send_exc_time)
while not self.__end.is_set():
with self.__send_lock:
try:
# deal with any incoming messages (AMQP protocol only, not QAPI)
conn.drain_events(0)
except (BlockingIOError, SocketTimeout):
pass
conn.heartbeat_tick()
# idle
self.__end.wait(.25)
finally:
# locked so can make sure another call to send() is not made whilst shutting down
with self.__send_lock:
self.__send_ready.clear()
except exceptions.AccessRefused:
self.__send_log_set_exc_and_wait('Access Refused (Credentials already in use?)')
except exceptions.ConnectionForced:
self.__send_log_set_exc_and_wait('Disconnected by broker (ConnectionForced)')
except SocketTimeout:
self.__send_log_set_exc_and_wait('SocketTimeout exception. wrong credentials, vhost or prefix?')
except SSLError:
self.__send_log_set_exc_and_wait('ssl.SSLError Bad Certificate?')
except (exceptions.AMQPError, SocketError):
self.__send_log_set_exc_and_wait('amqp/transport failure, sleeping before retry')
except:
self.__send_log_set_exc_and_wait('unexpected failure, exiting', wait_seconds=0)
break
logger.debug('finished') | Send request thread | entailment |
def __send_log_set_exc_and_wait(self, msg, level=None, wait_seconds=CONN_RETRY_DELAY_SECONDS):
"""To be called in exception context only.
msg - message to log
level - logging level. If not specified, ERROR unless it is a repeated failure in which case DEBUG. If
specified, the given level will always be used.
wait_seconds - how long to pause for (so retry is not triggered immediately)
"""
logger.log(
((logging.DEBUG if self.__send_exc else logging.ERROR) if level is None else level),
msg,
exc_info=DEBUG_ENABLED
)
self.__send_exc_time = monotonic()
self.__send_exc = exc_info()[1]
self.__end.wait(wait_seconds) | To be called in exception context only.
msg - message to log
level - logging level. If not specified, ERROR unless it is a repeated failure in which case DEBUG. If
specified, the given level will always be used.
wait_seconds - how long to pause for (so retry is not triggered immediately) | entailment |
def __send_exc_clear(self, log_if_exc_set=None):
"""Clear send exception and time. If exception was previously was set, optionally log log_if_exc_set at INFO
level.
"""
if not (log_if_exc_set is None or self.__send_exc is None):
logger.info(log_if_exc_set)
self.__send_exc_time = None
self.__send_exc = None | Clear send exception and time. If exception was previously was set, optionally log log_if_exc_set at INFO
level. | entailment |
def valid_mimetype(type_, allow_none=True):
"""Checks for validity of given type, optionally allowing for a None value. Note: Unknown idx/NUMBER notation, where
NUMBER is not a known shorthand mapping, will be rejected, i.e. type_ is valid if it
1) is an ASCII-only string between 1 & 64 characters long
2a) does not begin with "idx/" OR
2b) begins with "idx/" and is followed by a known shorthand index (integer)
"""
if isinstance(type_, unicode_type):
match = __IDX_PATTERN.match(type_)
if match:
return match.group(1) in __IDX_MAPPING
else:
return __is_ascii(type_, 1, __MAX_LEN)
else:
return type_ is None and allow_none | Checks for validity of given type, optionally allowing for a None value. Note: Unknown idx/NUMBER notation, where
NUMBER is not a known shorthand mapping, will be rejected, i.e. type_ is valid if it
1) is an ASCII-only string between 1 & 64 characters long
2a) does not begin with "idx/" OR
2b) begins with "idx/" and is followed by a known shorthand index (integer) | entailment |
def expand_idx_mimetype(type_):
"""Returns long equivalent of type_, if available, otherwise type_ itself. Does not raise exceptions"""
if isinstance(type_, unicode_type):
match = __IDX_PATTERN.match(type_)
return __IDX_MAPPING.get(match.group(1), type_) if match else type_
else:
return type_ | Returns long equivalent of type_, if available, otherwise type_ itself. Does not raise exceptions | entailment |
def throttle(self):
"""Uses time.monotonic() (or time.sleep() if not available) to limit to the desired rate. Should be called once
per iteration of action which is to be throttled. Returns None unless a custom wait_cmd was specified in the
constructor in which case its return value is used if a wait was required.
"""
iterations = self.__iterations
timestamp = monotonic()
outdated_threshold = timestamp - self.__interval
with self.__lock:
# remove any iterations older than interval
try:
while iterations[0] < outdated_threshold:
iterations.popleft()
except IndexError:
pass
# apply throttling if rate would be exceeded
if len(iterations) <= self.__max_iterations:
iterations.append(timestamp)
retval = None
else:
# wait until oldest sample is too old
delay = max(0, iterations[0] + self.__interval - timestamp)
# only notify user about longer delays
if delay > 1:
logger.warning('Send throttling delay (interval=%d, max_iterations=%d): %.2fs', self.__interval,
self.__max_iterations, delay)
retval = self.__wait_cmd(delay)
# log actual addition time
iterations.append(monotonic())
return retval | Uses time.monotonic() (or time.sleep() if not available) to limit to the desired rate. Should be called once
per iteration of action which is to be throttled. Returns None unless a custom wait_cmd was specified in the
constructor in which case its return value is used if a wait was required. | entailment |
def decode_sent_msg(pref, message, pretty=False):
"""decode_sent_msg: Return a string of the decoded message
"""
newline = "\n" if pretty else " "
indent = " " if pretty else ""
start = newline + indent
out = []
out.append("%s%s{%sSEQNUM: %d," % (pref, newline, start, message[Const.W_SEQ]))
out.append("%sCOMPRESSION: %d," % (start, message[Const.W_COMPRESSION]))
out.append("%sHASH: %s...," % (start, str(binascii.b2a_hex(message[Const.W_HASH]).decode('ascii'))[:10]))
out.append("%sMESSAGE:%s{%sCLIENTREF: %s," % (start, start, start + indent,
message[Const.W_MESSAGE][Const.M_CLIENTREF]))
out.append("%sRESOURCE: %s," % (start + indent, R_TYPES[message[Const.W_MESSAGE][Const.M_RESOURCE]]))
out.append("%sTYPE: %s," % (start + indent, C_TYPES[message[Const.W_MESSAGE][Const.M_TYPE]]))
out.append("%sACTION: %s," % (start + indent, message[Const.W_MESSAGE][Const.M_ACTION]))
if Const.M_RANGE in message[Const.W_MESSAGE]:
out.append("%sRANGE: %s," % (start + indent, message[Const.W_MESSAGE][Const.M_RANGE]))
out.append("%sPAYLOAD: %s%s}%s}" % (start + indent, message[Const.W_MESSAGE][Const.M_PAYLOAD], start, newline))
return ''.join(out) | decode_sent_msg: Return a string of the decoded message | entailment |
def decode_rcvd_msg(pref, message, seqnum, pretty=False):
"""decode_rcvd_msg: Return string of received message expanding short codes, optionally with newlines and indent
"""
newline = "\n" if pretty else " "
indent = " " if pretty else ""
start = newline + indent
out = []
out.append("%s%s{%sSEQNUM: %d," % (pref, newline, start, seqnum))
out.append("%sCLIENTREF: %s," % (start, message[Const.M_CLIENTREF]))
out.append("%sTYPE: %s," % (start, M_TYPES[message[Const.M_TYPE]]))
if message[Const.M_TYPE] in M_SUB_TYPES:
out.append("%sPAYLOAD: {CODE: %s, MESSAGE: %s}" %
(start, M_SUB_TYPES[message[Const.M_TYPE]][message[Const.M_PAYLOAD][Const.P_CODE]],
message[Const.M_PAYLOAD][Const.P_MESSAGE]))
else:
payload = None
if message[Const.M_PAYLOAD] is not None:
payload = {}
for item in message[Const.M_PAYLOAD]:
if item == Const.P_RESOURCE:
payload['RESOURCE'] = R_TYPES[message[Const.M_PAYLOAD][Const.P_RESOURCE]]
else:
payload[item] = message[Const.M_PAYLOAD][item]
out.append("%sPAYLOAD: %s" % (start, payload))
out.append("%s}" % newline)
return ''.join(out) | decode_rcvd_msg: Return string of received message expanding short codes, optionally with newlines and indent | entailment |
def _long_to_bytes(self, long_value):
"""
Turns a long value into its byte string equivalent.
:param long_value: the long value to be returned as a byte string
:return: a byte string equivalent of a long value
"""
_byte_string = b''
pack = struct.pack
while long_value > 0:
_byte_string = pack(b'>I', long_value & 0xffffffff) + _byte_string
long_value = long_value >> 32
for i in range(len(_byte_string)):
if _byte_string[i] != b'\000'[0]:
break
else:
_byte_string = b'\000'
i = 0
_byte_string = _byte_string[i:]
return _byte_string | Turns a long value into its byte string equivalent.
:param long_value: the long value to be returned as a byte string
:return: a byte string equivalent of a long value | entailment |
def restore_event(self, requestId):
"""restore an event based on the requestId.
For example if the user app had to shutdown with pending requests.
The user can rebuild the Events they were waiting for based on the requestId(s).
"""
with self.__requests:
if requestId not in self.__requests:
self.__requests[requestId] = RequestEvent(requestId)
return True
return False | restore an event based on the requestId.
For example if the user app had to shutdown with pending requests.
The user can rebuild the Events they were waiting for based on the requestId(s). | entailment |
def __add_callback(self, type_, func, serialised_if_crud=True):
"""sync_if_crud indicates whether to serialise this callback (applies only to CRUD)"""
Validation.callable_check(func)
with self.__callbacks:
self.__callbacks[type_].append((func, serialised_if_crud)) | sync_if_crud indicates whether to serialise this callback (applies only to CRUD) | entailment |
def register_callback_created(self, func, serialised=True):
"""Register a callback function to receive QAPI Unsolicited (resource) CREATED. The
callback receives a single argument - the inner message. If `serialised` is not set,
the callbacks might arrive out-of-order (e.g. created point before created thing).
"""
self.__add_callback(_CB_CREATED, func, serialised_if_crud=serialised) | Register a callback function to receive QAPI Unsolicited (resource) CREATED. The
callback receives a single argument - the inner message. If `serialised` is not set,
the callbacks might arrive out-of-order (e.g. created point before created thing). | entailment |
def register_callback_duplicate(self, func, serialised=True):
"""Register a callback function to receive QAPI Unsolicited (resource) DUPLICATE. The
callback receives a single argument - the inner message. If `serialised` is not set,
the callbacks might arrive out-of-order.
"""
self.__add_callback(_CB_DUPLICATE, func, serialised_if_crud=serialised) | Register a callback function to receive QAPI Unsolicited (resource) DUPLICATE. The
callback receives a single argument - the inner message. If `serialised` is not set,
the callbacks might arrive out-of-order. | entailment |
def register_callback_renamed(self, func, serialised=True):
"""Register a callback function to receive QAPI Unsolicited (resource) RENAMED. The
callback receives a single argument - the inner message. If `serialised` is not set,
the callbacks might arrive out-of-order.
"""
self.__add_callback(_CB_RENAMED, func, serialised_if_crud=serialised) | Register a callback function to receive QAPI Unsolicited (resource) RENAMED. The
callback receives a single argument - the inner message. If `serialised` is not set,
the callbacks might arrive out-of-order. | entailment |
def register_callback_deleted(self, func, serialised=True):
"""Register a callback function to receive QAPI Unsolicited (resource) DELETED. The
callback receives a single argument - the inner message. If `serialised` is not set,
the callbacks might arrive out-of-order.
"""
self.__add_callback(_CB_DELETED, func, serialised_if_crud=serialised) | Register a callback function to receive QAPI Unsolicited (resource) DELETED. The
callback receives a single argument - the inner message. If `serialised` is not set,
the callbacks might arrive out-of-order. | entailment |
def register_callback_reassigned(self, func, serialised=True):
"""Register a callback function to receive QAPI Unsolicited (entity) REASSIGNED. The
callback receives a single argument - the inner message. If `serialised` is not set,
the callbacks might arrive out-of-order.
"""
self.__add_callback(_CB_REASSIGNED, func, serialised_if_crud=serialised) | Register a callback function to receive QAPI Unsolicited (entity) REASSIGNED. The
callback receives a single argument - the inner message. If `serialised` is not set,
the callbacks might arrive out-of-order. | entailment |
def simulate_feeddata(self, feedid, data, mime=None, time=None):
"""Send feed data"""
# Separate public method since internal one does not require parameter checks
feedid = Validation.guid_check_convert(feedid)
mime = Validation.mime_check_convert(mime, allow_none=True)
Validation.datetime_check_convert(time, allow_none=True, to_iso8601=False)
self.__simulate_feeddata(feedid, data, mime, datetime.utcnow() if time is None else time) | Send feed data | entailment |
def start(self): # noqa (complexity)
"""Start the send & recv Threads.
Start can be delayed to EG restore requestIds before attaching to the QAPI
Note: This function waits for/blocks until amqplink connect(s) and the current
sequence number has been obtained from the container (within 5 seconds)
"""
if not self.__end.is_set():
return
self.__end.clear()
try:
self.__network_retry_queue = Queue(self.__network_retry_queue_size)
self.__network_retry_thread = Thread(target=self.__network_retry, name='network')
self.__network_retry_thread.start()
try:
self.__amqplink.start()
except Exception as exc: # pylint: disable=broad-except
if not self.__amqplink.is_alive():
raise_from(LinkException("Core.AmqpLink: Failed to connect"), exc)
logger.exception("Unhandled startup error")
raise
req = self.request_ping()
if not req.wait(5):
raise LinkException("No container response to ping within 5s")
# (for req.payload) pylint: disable=unsubscriptable-object
if not req.success:
try:
info = ': %s' % req.payload[P_MESSAGE]
except (KeyError, TypeError):
info = ''
raise Exception("Unexpected ping failure: %s" % info)
payload = req.payload
self.__qapi_version_check(payload)
if self.__default_lang is None:
self.__default_lang = payload['lang']
self.__container_params = payload
try:
self.set_compression(payload['compression'])
except ValueError as ex:
raise_from(Exception('Container compression method (%d) unsupported' % payload['compression']), ex)
self.__local_meta = payload['local_meta']
self.__threadpool.start()
self.__crud_threadpool.start()
except:
self.stop()
raise | Start the send & recv Threads.
Start can be delayed to EG restore requestIds before attaching to the QAPI
Note: This function waits for/blocks until amqplink connect(s) and the current
sequence number has been obtained from the container (within 5 seconds) | entailment |
def stop(self):
"""Stop the Client, disconnect from queue
"""
if self.__end.is_set():
return
self.__end.set()
self.__send_retry_requests_timer.cancel()
self.__threadpool.stop()
self.__crud_threadpool.stop()
self.__amqplink.stop()
self.__network_retry_thread.join()
# Clear out remaining pending requests
with self.__requests:
shutdown = LinkShutdownException('Client stopped')
for req in self.__requests.values():
req.exception = shutdown
req._set()
self.__clear_references(req, remove_request=False)
if self.__requests:
logger.warning('%d unfinished request(s) discarded', len(self.__requests))
self.__requests.clear()
#
self.__network_retry_thread = None
self.__network_retry_queue = None
self.__container_params = None | Stop the Client, disconnect from queue | entailment |
def set_compression(self, comp=COMP_DEFAULT, size=COMP_SIZE):
"""Override compression method (defined by container) and threshold"""
if comp not in COMPRESSORS:
if comp == COMP_LZ4F:
raise ValueError('lz4f compression not available, required lz4framed')
else:
raise ValueError('Invalid compression method')
if not isinstance(size, int_types) or size < 1:
raise ValueError('size must be non-negative integer')
self.__comp_default = comp
self.__comp_size = size
return self.__comp_default, self.__comp_size | Override compression method (defined by container) and threshold | entailment |
def _request(self, resource, rtype, action=None, payload=None, offset=None, limit=None, requestId=None,
is_crud=False):
"""_request amqp queue publish helper
return: RequestEvent object or None for failed to publish
"""
end = self.__end
if end.is_set():
raise LinkShutdownException('Client stopped')
rng = None
if offset is not None and limit is not None:
Validation.limit_offset_check(limit, offset)
rng = "%d/%d" % (offset, limit)
with self.__requests:
if requestId is None:
requestId = self.__new_request_id()
elif requestId in self.__requests:
raise ValueError('requestId %s already in use' % requestId)
inner_msg = self.__make_innermsg(resource, rtype, requestId, action, payload, rng)
self.__requests[requestId] = ret = RequestEvent(requestId, inner_msg, is_crud=is_crud)
#
if not self.__retry_enqueue(PreparedMessage(inner_msg, requestId)):
raise LinkShutdownException('Client stopping')
return ret | _request amqp queue publish helper
return: RequestEvent object or None for failed to publish | entailment |
def __send_ready_cb(self, last_send_failure_time):
"""Callback from AmqpLink on send transport readiness. (Only ever comes from a single thread.)"""
logger.debug('Readiness notification (last failed=%s)', last_send_failure_time)
# It is possible for multiple timers to be scheduled (if multiple transport failures happen in a fairly short
# amount of time. See logic for __send_retry_requests
if last_send_failure_time is not None:
self.__send_retry_requests_timer.cancel()
# allow 10s for responses to come in before attempting to resend
self.__send_retry_requests_timer = Timer(10, self.__send_retry_requests, args=(last_send_failure_time,))
self.__send_retry_requests_timer.start() | Callback from AmqpLink on send transport readiness. (Only ever comes from a single thread.) | entailment |
def __send_retry_requests(self, last_send_failure_time):
"""Called via Timer from __send_ready to resend requests which might not have been sent due to transport
failure. This can happen since the current transport implementation does not received acknowledgements
for sent messages."""
# make sure multiple failures having set multiple times do not run concurrently
with self.__send_retry_requests_lock:
with self.__requests:
# produce list instead of generator as requests mapping can change during subsequent loop
retry_reqs = [req for req in self.__requests.values()
if req._sent_without_response(last_send_failure_time)]
retry_req_count = 0
# don't continue if another network failure has occured (which will trigger this function again)
while retry_reqs and self.__amqplink.last_send_exc_time <= last_send_failure_time:
req = retry_reqs.pop()
# lock individuallly so incoming request handling does not 'pause' for too long
with self.__requests:
# might have received a response (or finished since)
if not (req.id_ in self.__requests and req._sent_without_response(last_send_failure_time)):
logger.debug('Not resending request %s (finished or has received response)', req.id_)
continue
logger.debug('Resending request %s', req.id_)
if not self.__retry_enqueue(PreparedMessage(req._inner_msg_out, req.id_)):
# client shutdown
break
retry_req_count += 1
if retry_req_count:
logger.debug('Resending of %d request(s) complete (before %s)', retry_req_count, last_send_failure_time) | Called via Timer from __send_ready to resend requests which might not have been sent due to transport
failure. This can happen since the current transport implementation does not received acknowledgements
for sent messages. | entailment |
def request_entity_create(self, lid, epId=None):
"""request entity create: lid = local name to user
If epId=None (default), the current agent/EP is chosen
If epId=False, no agent is assigned
If epId=guid, said agent is chosen
"""
lid = Validation.lid_check_convert(lid)
if epId is None:
epId = self.__epId
elif epId is False:
epId = None
else:
epId = Validation.guid_check_convert(epId)
logger.debug("request_entity_create lid='%s'", lid)
return self._request(R_ENTITY, C_CREATE, None, {'epId': epId, 'lid': lid}, is_crud=True) | request entity create: lid = local name to user
If epId=None (default), the current agent/EP is chosen
If epId=False, no agent is assigned
If epId=guid, said agent is chosen | entailment |
def request_entity_reassign(self, lid, nepId=None):
"""request entity to be reassigned to given ep/agent
If nepId=None (default), the current agent/EP is chosen
If nepId=False, no agent is assigned
If nepId=guid, said agent is chosen
"""
lid = Validation.lid_check_convert(lid)
if nepId is None:
nepId = self.__epId
elif nepId is False:
nepId = None
else:
nepId = Validation.guid_check_convert(nepId)
logger.debug("request_entity_reassign lid='%s' -> nepId='%s'", lid, nepId)
return self._request(R_ENTITY, C_UPDATE, (lid, 'reassign'), {'epId': nepId}, is_crud=True) | request entity to be reassigned to given ep/agent
If nepId=None (default), the current agent/EP is chosen
If nepId=False, no agent is assigned
If nepId=guid, said agent is chosen | entailment |
def request_point_create(self, foc, lid, pid, control_cb=None, save_recent=0):
"""request point create: feed or control, lid and pid point lid
"""
Validation.foc_check(foc)
lid = Validation.lid_check_convert(lid)
pid = Validation.pid_check_convert(pid)
save_recent = validate_int(save_recent, 'save_recent')
logger.debug("request_point_create foc=%i lid='%s' pid='%s' save_recent=%d", foc, lid, pid, save_recent)
if foc == R_CONTROL:
Validation.callable_check(control_cb)
if save_recent:
logger.warning('ignoring non-zero save_recent value for control')
evt = self._request(foc, C_CREATE, (lid,), {'lid': pid}, is_crud=True)
with self.__pending_controls:
self.__pending_controls[evt.id_] = control_cb
return evt
elif control_cb:
raise ValueError('callback specified for Feed')
else:
return self._request(foc, C_CREATE, (lid,), {'lid': pid, 'saveRecent': save_recent}, is_crud=True) | request point create: feed or control, lid and pid point lid | entailment |
def __point_data_to_bytes(self, data, mime=None): # pylint: disable=too-many-branches
"""Returns tuple of mime type & data. Auto encodes unicode strings (to utf8) and
dictionaries (to ubjson) depending on client setting."""
if mime is None:
if self.__auto_encode_decode:
if isinstance(data, bytes):
return None, data
elif isinstance(data, dict):
# check top level dictionary keys
if all(isinstance(key, unicode_type) for key in data):
return 'idx/1', ubjdumpb(data) # application/ubjson
else:
raise ValueError('At least one key in dict not real (unicode) string')
elif isinstance(data, unicode_type):
return 'idx/2', data.encode('utf8') # text/plain; charset=utf8
else:
raise ValueError('cannot auto-encode data of type %s' % type(data))
elif isinstance(data, bytes):
return None, data
else:
raise ValueError('No mime type specified and not bytes object (auto-encode disabled)')
elif valid_mimetype(mime):
if isinstance(data, bytes):
return mime, data
else:
raise ValueError('mime specified but data not bytes object')
else:
raise ValueError('invalid mime type %s' % mime) | Returns tuple of mime type & data. Auto encodes unicode strings (to utf8) and
dictionaries (to ubjson) depending on client setting. | entailment |
def __bytes_to_share_data(self, payload):
"""Attempt to auto-decode data"""
rbytes = payload[P_DATA]
mime = payload[P_MIME]
if mime is None or not self.__auto_encode_decode:
return rbytes, mime
mime = expand_idx_mimetype(mime).lower()
try:
if mime == 'application/ubjson':
return ubjloadb(rbytes), None
elif mime == 'text/plain; charset=utf8':
return rbytes.decode('utf-8'), None
else:
return rbytes, mime
except:
logger.warning('auto-decode failed, returning bytes', exc_info=DEBUG_ENABLED)
return rbytes, mime | Attempt to auto-decode data | entailment |
def __new_request_id(self):
"""requestId follows form "pre num" where pre is some random ascii prefix EG 6 chars long
and num is an ever increasing number (self.__reqnum). MUST be called within self.__requests lock
"""
while True:
# Since seqnum wraps on 2^64 at most, this should always fit into 32 chars (QAPI request id limit)
with self.__seqnum_lock:
requestId = "%s%d" % (self.__reqpre, self.__reqnum)
self.__reqnum += 1
if requestId not in self.__requests:
break
# in the unlikely event of a collision update prefix
self.__reqpre = self.__rnd_string(6)
return requestId | requestId follows form "pre num" where pre is some random ascii prefix EG 6 chars long
and num is an ever increasing number (self.__reqnum). MUST be called within self.__requests lock | entailment |
def __make_hash(cls, innermsg, token, seqnum):
"""return the hash for this innermsg, token, seqnum
return digest bytes
"""
hobj = hmacNew(token, digestmod=hashfunc)
hobj.update(innermsg)
hobj.update(cls.__byte_packer(seqnum))
return hobj.digest() | return the hash for this innermsg, token, seqnum
return digest bytes | entailment |
def __check_hash(self, message):
"""return true/false if hash is good
message = dict
"""
return message[W_HASH] == self.__make_hash(message[W_MESSAGE], self.__token, message[W_SEQ]) | return true/false if hash is good
message = dict | entailment |
def __make_innermsg(resource, rtype, ref, action=None, payload=None, limit=None):
"""return innermsg chunk (dict)
"""
if action is not None and not isinstance(action, (tuple, list)):
raise TypeError('action must be None/tuple/list')
p = {M_RESOURCE: resource,
M_TYPE: int(rtype),
M_CLIENTREF: ref,
# Ensure action path consists only of strings
M_ACTION: tuple(u(element) for element in action) if action else None,
M_PAYLOAD: payload}
if limit is not None: # Note: fmtted like "0/15" where 0 = offset, 15 = limit
p[M_RANGE] = limit
return p | return innermsg chunk (dict) | entailment |
def __request_except(self, requestId, exc, set_and_forget=True):
"""Set exception (if not None) for the given request and (optionally) remove from internal cache & setting its
event"""
try:
with self.__requests:
if set_and_forget:
req = self.__requests.pop(requestId)
else:
req = self.__requests[requestId]
except KeyError:
logger.error('Unknown request %s - cannot set exception', requestId)
else:
if exc is not None:
req.exception = exc
if set_and_forget:
req._set() | Set exception (if not None) for the given request and (optionally) remove from internal cache & setting its
event | entailment |
def __request_mark_sent(self, requestId):
"""Set send time & clear exception from request if set, ignoring non-existent requests"""
with self.__requests:
try:
req = self.__requests[requestId]
except KeyError:
# request might have had a response already have been removed by receiving thread
pass
else:
req.exception = None
req._send_time = monotonic() | Set send time & clear exception from request if set, ignoring non-existent requests | entailment |
def __publish(self, qmsg):
"""Returns True unless sending failed (at which point an exception will have been set in the request)"""
with self.__seqnum_lock:
seqnum = self.__seqnum
self.__seqnum = (self.__seqnum + 1) % _SEQ_WRAP_SIZE
#
innermsg = ubjdumpb(qmsg.inner_msg)
clevel = COMP_NONE
if len(innermsg) >= self.__comp_size:
logger.debug('Compressing payload')
try:
innermsg = COMPRESSORS[self.__comp_default].compress(innermsg)
except KeyError:
logger.warning('Unknown compression method %s, not compressing', self.__comp_default)
else:
clevel = self.__comp_default
p = {W_SEQ: seqnum,
W_MESSAGE: innermsg,
W_HASH: self.__make_hash(innermsg, self.__token, seqnum),
W_COMPRESSION: clevel}
msg = ubjdumpb(p)
# do not send messages exceeding size limit
if len(msg) > self.__max_encoded_length:
self.__request_except(qmsg.requestId, ValueError("Message Payload too large %d > %d"
% (len(msg), self.__max_encoded_length)))
return False
self.__amqplink.send(msg, content_type='application/ubjson')
if DEBUG_ENABLED:
p[W_MESSAGE] = qmsg.inner_msg
logger.debug(decode_sent_msg('decode_sent_msg', p))
# Callback any debuggers
self.__fire_callback(_CB_DEBUG_SEND, msg)
#
return True | Returns True unless sending failed (at which point an exception will have been set in the request) | entailment |
def __fire_callback(self, type_, *args, **kwargs):
"""Returns True if at least one callback was called"""
called = False
plain_submit = self.__threadpool.submit
with self.__callbacks:
submit = self.__crud_threadpool.submit if type_ in _CB_CRUD_TYPES else plain_submit
for func, serialised_if_crud in self.__callbacks[type_]:
called = True
# allow CRUD callbacks to not be serialised if requested
(submit if serialised_if_crud else plain_submit)(func, *args, **kwargs)
return called | Returns True if at least one callback was called | entailment |
def __validate_decode_msg(self, message): # noqa (complexity) pylint: disable=too-many-return-statements,too-many-branches
"""Decodes wrapper, check hash & seq, decodes body. Returns body or None, if validation / unpack failed"""
try:
if not _CONTENT_TYPE_PATTERN.match(message.content_type):
logger.debug('Message with unexpected content type %s from container, ignoring', message.content_type)
return None
except AttributeError:
logger.debug('Message without content type from container, ignoring')
return None
# Decode & check message wrapper
try:
body = ubjloadb(message.body)
except:
logger.warning('Failed to decode message wrapper, ignoring', exc_info=DEBUG_ENABLED)
return None
if not self.__valid_msg_wrapper(body):
logger.warning('Invalid message wrapper, ignoring')
return None
# currently only warn although maybe this should be an error
if self.__cnt_seqnum != -1 and not self.__valid_seqnum(body[W_SEQ], self.__cnt_seqnum):
logger.warning('Unexpected seqnum from container: %d (last seen: %d)', body[W_SEQ],
self.__cnt_seqnum)
self.__cnt_seqnum = body[W_SEQ]
# Check message hash
if not self.__check_hash(body):
logger.warning('Message has invalid hash, ignoring')
return None
# Decompress inner message
try:
msg = COMPRESSORS[body[W_COMPRESSION]].decompress(body[W_MESSAGE])
except KeyError:
logger.warning('Received message with unknown compression: %s', body[W_COMPRESSION])
return None
except OversizeException as ex:
logger.warning('Uncompressed message exceeds %d bytes, ignoring', ex.size, exc_info=DEBUG_ENABLED)
return None
except:
logger.warning('Decompression failed, ignoring message', exc_info=DEBUG_ENABLED)
return None
# Decode inner message
try:
msg = ubjloadb(msg, object_pairs_hook=OrderedDict)
except:
logger.warning('Failed to decode message, ignoring', exc_info=DEBUG_ENABLED)
return None
if self.__valid_msg_body(msg):
return (msg, body[W_SEQ])
else:
logger.warning('Message with invalid body, ignoring: %s', msg)
return None | Decodes wrapper, check hash & seq, decodes body. Returns body or None, if validation / unpack failed | entailment |
def __dispatch_msg(self, message):
"""Verify the signature and update RequestEvents / perform callbacks
Note messages with an invalid wrapper, invalid hash, invalid sequence number or unexpected clientRef
will be sent to debug_bad callback.
"""
msg = self.__validate_decode_msg(message)
if msg:
msg, seqnum = msg
else:
self.__fire_callback(_CB_DEBUG_BAD, message.body, message.content_type)
return
if DEBUG_ENABLED:
logger.debug(decode_rcvd_msg('decode_rcvd_msg', msg, seqnum))
self.__fire_callback(_CB_DEBUG_RCVD, msg)
# no reference, or set by client (not container)
if msg[M_TYPE] not in _RSP_CONTAINER_REF:
# solicitied
if msg[M_CLIENTREF]:
if not self.__handle_known_solicited(msg):
logger.debug('Ignoring response for unknown request %s of type %s', msg[M_CLIENTREF], msg[M_TYPE])
# unsolicitied
else:
self.__perform_unsolicited_callbacks(msg)
# unsolicited but can have reference set by container
elif msg[M_TYPE] == E_CONTROLREQ:
self.__handle_controlreq(msg[M_PAYLOAD], msg[M_CLIENTREF])
else:
logger.error('Unhandled unsolicited message of type %s', msg[M_TYPE]) | Verify the signature and update RequestEvents / perform callbacks
Note messages with an invalid wrapper, invalid hash, invalid sequence number or unexpected clientRef
will be sent to debug_bad callback. | entailment |
def __handle_known_solicited(self, msg):
"""returns True if message has been handled as a solicited response"""
with self.__requests:
try:
req = self.__requests[msg[M_CLIENTREF]]
except KeyError:
return False
if self.__handle_low_seq_resend(msg, req):
return True
perform_cb = finish = False
if msg[M_TYPE] not in _RSP_NO_REF:
self.__update_existing(msg, req)
# Finalise request if applicable (not marked as finished here so can perform callback first below)
if msg[M_TYPE] in _RSP_TYPE_FINISH:
finish = True
# Exception - DUPLICATED also should produce callback
perform_cb = (msg[M_TYPE] == E_DUPLICATED)
elif msg[M_TYPE] not in _RSP_TYPE_ONGOING:
perform_cb = True
else:
logger.warning('Reference unexpected for request %s of type %s', msg[M_CLIENTREF],
msg[M_TYPE])
# outside lock to avoid deadlock if callbacks try to perform request-related functions
if perform_cb:
self.__perform_unsolicited_callbacks(msg)
# mark request as finished
if finish:
req.success = msg[M_TYPE] in _RSP_TYPE_SUCCESS
req.payload = msg[M_PAYLOAD]
self.__clear_references(req)
# Serialise completion of CRUD requests (together with CREATED, DELETED, etc. messages)
if req.is_crud:
self.__crud_threadpool.submit(req._set)
else:
req._set()
return True | returns True if message has been handled as a solicited response | entailment |
def __clear_references(self, request, remove_request=True):
"""Remove any internal references to the given request"""
# remove request itself
if remove_request:
with self.__requests:
self.__requests.pop(request.id_)
# remove request type specific references
if not request.success:
with self.__pending_subs:
self.__pending_subs.pop(request.id_, None)
with self.__pending_controls:
self.__pending_controls.pop(request.id_, None) | Remove any internal references to the given request | entailment |
def __update_existing(self, msg, req):
"""Propagate changes based on type of message. MUST be called within self.__requests lock. Performs additional
actions when solicited messages arrive."""
req._messages.append(msg)
payload = msg[M_PAYLOAD]
if msg[M_TYPE] in _RSP_TYPE_CREATION:
if payload[P_RESOURCE] == R_SUB:
# Add callback for feeddata
with self.__pending_subs:
if msg[M_CLIENTREF] in self.__pending_subs:
callback = self.__pending_subs.pop(msg[M_CLIENTREF])
if payload[P_POINT_TYPE] == R_FEED:
self.__callbacks[_CB_FEED][payload[P_POINT_ID]] = callback
else:
logger.warning('Subscription intended to feed is actually control: %s', payload[P_POINT_ID])
elif payload[P_RESOURCE] == R_CONTROL:
with self.__pending_controls:
if msg[M_CLIENTREF] in self.__pending_controls:
# callbacks by thing
entity_point_callbacks = self.__callbacks[_CB_CONTROL].setdefault(payload[P_ENTITY_LID], {})
# callback by thing and point
entity_point_callbacks[payload[P_LID]] = self.__pending_controls.pop(msg[M_CLIENTREF])
elif msg[M_TYPE] == E_RECENTDATA:
samples = []
for sample in payload[P_SAMPLES]:
data, mime, time = self.__decode_data_time(sample)
samples.append({'data': data, 'mime': mime, 'time': time})
self.__fire_callback(_CB_RECENT_DATA, {'c': msg[M_CLIENTREF],
'samples': samples}) | Propagate changes based on type of message. MUST be called within self.__requests lock. Performs additional
actions when solicited messages arrive. | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.