code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
newline = "\n" if pretty else " " indent = " " if pretty else "" start = newline + indent out = [] out.append("%s%s{%sSEQNUM: %d," % (pref, newline, start, message[Const.W_SEQ])) out.append("%sCOMPRESSION: %d," % (start, message[Const.W_COMPRESSION])) out.append("%sHASH: %s...," % (start, str(binascii.b2a_hex(message[Const.W_HASH]).decode('ascii'))[:10])) out.append("%sMESSAGE:%s{%sCLIENTREF: %s," % (start, start, start + indent, message[Const.W_MESSAGE][Const.M_CLIENTREF])) out.append("%sRESOURCE: %s," % (start + indent, R_TYPES[message[Const.W_MESSAGE][Const.M_RESOURCE]])) out.append("%sTYPE: %s," % (start + indent, C_TYPES[message[Const.W_MESSAGE][Const.M_TYPE]])) out.append("%sACTION: %s," % (start + indent, message[Const.W_MESSAGE][Const.M_ACTION])) if Const.M_RANGE in message[Const.W_MESSAGE]: out.append("%sRANGE: %s," % (start + indent, message[Const.W_MESSAGE][Const.M_RANGE])) out.append("%sPAYLOAD: %s%s}%s}" % (start + indent, message[Const.W_MESSAGE][Const.M_PAYLOAD], start, newline)) return ''.join(out)
def decode_sent_msg(pref, message, pretty=False)
decode_sent_msg: Return a string of the decoded message
2.691792
2.679777
1.004484
newline = "\n" if pretty else " " indent = " " if pretty else "" start = newline + indent out = [] out.append("%s%s{%sSEQNUM: %d," % (pref, newline, start, seqnum)) out.append("%sCLIENTREF: %s," % (start, message[Const.M_CLIENTREF])) out.append("%sTYPE: %s," % (start, M_TYPES[message[Const.M_TYPE]])) if message[Const.M_TYPE] in M_SUB_TYPES: out.append("%sPAYLOAD: {CODE: %s, MESSAGE: %s}" % (start, M_SUB_TYPES[message[Const.M_TYPE]][message[Const.M_PAYLOAD][Const.P_CODE]], message[Const.M_PAYLOAD][Const.P_MESSAGE])) else: payload = None if message[Const.M_PAYLOAD] is not None: payload = {} for item in message[Const.M_PAYLOAD]: if item == Const.P_RESOURCE: payload['RESOURCE'] = R_TYPES[message[Const.M_PAYLOAD][Const.P_RESOURCE]] else: payload[item] = message[Const.M_PAYLOAD][item] out.append("%sPAYLOAD: %s" % (start, payload)) out.append("%s}" % newline) return ''.join(out)
def decode_rcvd_msg(pref, message, seqnum, pretty=False)
decode_rcvd_msg: Return string of received message expanding short codes, optionally with newlines and indent
2.450472
2.43605
1.00592
_byte_string = b'' pack = struct.pack while long_value > 0: _byte_string = pack(b'>I', long_value & 0xffffffff) + _byte_string long_value = long_value >> 32 for i in range(len(_byte_string)): if _byte_string[i] != b'\000'[0]: break else: _byte_string = b'\000' i = 0 _byte_string = _byte_string[i:] return _byte_string
def _long_to_bytes(self, long_value)
Turns a long value into its byte string equivalent. :param long_value: the long value to be returned as a byte string :return: a byte string equivalent of a long value
2.227377
2.29632
0.969977
with self.__requests: if requestId not in self.__requests: self.__requests[requestId] = RequestEvent(requestId) return True return False
def restore_event(self, requestId)
restore an event based on the requestId. For example if the user app had to shutdown with pending requests. The user can rebuild the Events they were waiting for based on the requestId(s).
4.729944
6.237975
0.75825
Validation.callable_check(func) with self.__callbacks: self.__callbacks[type_].append((func, serialised_if_crud))
def __add_callback(self, type_, func, serialised_if_crud=True)
sync_if_crud indicates whether to serialise this callback (applies only to CRUD)
6.019512
5.18278
1.161445
self.__add_callback(_CB_CREATED, func, serialised_if_crud=serialised)
def register_callback_created(self, func, serialised=True)
Register a callback function to receive QAPI Unsolicited (resource) CREATED. The callback receives a single argument - the inner message. If `serialised` is not set, the callbacks might arrive out-of-order (e.g. created point before created thing).
16.076887
16.623585
0.967113
self.__add_callback(_CB_DUPLICATE, func, serialised_if_crud=serialised)
def register_callback_duplicate(self, func, serialised=True)
Register a callback function to receive QAPI Unsolicited (resource) DUPLICATE. The callback receives a single argument - the inner message. If `serialised` is not set, the callbacks might arrive out-of-order.
15.686223
17.042591
0.920413
self.__add_callback(_CB_RENAMED, func, serialised_if_crud=serialised)
def register_callback_renamed(self, func, serialised=True)
Register a callback function to receive QAPI Unsolicited (resource) RENAMED. The callback receives a single argument - the inner message. If `serialised` is not set, the callbacks might arrive out-of-order.
15.888618
17.559055
0.904867
self.__add_callback(_CB_DELETED, func, serialised_if_crud=serialised)
def register_callback_deleted(self, func, serialised=True)
Register a callback function to receive QAPI Unsolicited (resource) DELETED. The callback receives a single argument - the inner message. If `serialised` is not set, the callbacks might arrive out-of-order.
15.645466
17.662012
0.885826
self.__add_callback(_CB_REASSIGNED, func, serialised_if_crud=serialised)
def register_callback_reassigned(self, func, serialised=True)
Register a callback function to receive QAPI Unsolicited (entity) REASSIGNED. The callback receives a single argument - the inner message. If `serialised` is not set, the callbacks might arrive out-of-order.
13.97417
14.600655
0.957092
# Separate public method since internal one does not require parameter checks feedid = Validation.guid_check_convert(feedid) mime = Validation.mime_check_convert(mime, allow_none=True) Validation.datetime_check_convert(time, allow_none=True, to_iso8601=False) self.__simulate_feeddata(feedid, data, mime, datetime.utcnow() if time is None else time)
def simulate_feeddata(self, feedid, data, mime=None, time=None)
Send feed data
5.675345
5.602876
1.012934
return self.__end.clear() try: self.__network_retry_queue = Queue(self.__network_retry_queue_size) self.__network_retry_thread = Thread(target=self.__network_retry, name='network') self.__network_retry_thread.start() try: self.__amqplink.start() except Exception as exc: # pylint: disable=broad-except if not self.__amqplink.is_alive(): raise_from(LinkException("Core.AmqpLink: Failed to connect"), exc) logger.exception("Unhandled startup error") raise req = self.request_ping() if not req.wait(5): raise LinkException("No container response to ping within 5s") # (for req.payload) pylint: disable=unsubscriptable-object if not req.success: try: info = ': %s' % req.payload[P_MESSAGE] except (KeyError, TypeError): info = '' raise Exception("Unexpected ping failure: %s" % info) payload = req.payload self.__qapi_version_check(payload) if self.__default_lang is None: self.__default_lang = payload['lang'] self.__container_params = payload try: self.set_compression(payload['compression']) except ValueError as ex: raise_from(Exception('Container compression method (%d) unsupported' % payload['compression']), ex) self.__local_meta = payload['local_meta'] self.__threadpool.start() self.__crud_threadpool.start() except: self.stop() raise
def start(self): # noqa (complexity) if not self.__end.is_set()
Start the send & recv Threads. Start can be delayed to EG restore requestIds before attaching to the QAPI Note: This function waits for/blocks until amqplink connect(s) and the current sequence number has been obtained from the container (within 5 seconds)
4.987746
4.771574
1.045304
if self.__end.is_set(): return self.__end.set() self.__send_retry_requests_timer.cancel() self.__threadpool.stop() self.__crud_threadpool.stop() self.__amqplink.stop() self.__network_retry_thread.join() # Clear out remaining pending requests with self.__requests: shutdown = LinkShutdownException('Client stopped') for req in self.__requests.values(): req.exception = shutdown req._set() self.__clear_references(req, remove_request=False) if self.__requests: logger.warning('%d unfinished request(s) discarded', len(self.__requests)) self.__requests.clear() # self.__network_retry_thread = None self.__network_retry_queue = None self.__container_params = None
def stop(self)
Stop the Client, disconnect from queue
6.005561
5.791245
1.037007
if comp not in COMPRESSORS: if comp == COMP_LZ4F: raise ValueError('lz4f compression not available, required lz4framed') else: raise ValueError('Invalid compression method') if not isinstance(size, int_types) or size < 1: raise ValueError('size must be non-negative integer') self.__comp_default = comp self.__comp_size = size return self.__comp_default, self.__comp_size
def set_compression(self, comp=COMP_DEFAULT, size=COMP_SIZE)
Override compression method (defined by container) and threshold
3.936677
3.804586
1.034719
end = self.__end if end.is_set(): raise LinkShutdownException('Client stopped') rng = None if offset is not None and limit is not None: Validation.limit_offset_check(limit, offset) rng = "%d/%d" % (offset, limit) with self.__requests: if requestId is None: requestId = self.__new_request_id() elif requestId in self.__requests: raise ValueError('requestId %s already in use' % requestId) inner_msg = self.__make_innermsg(resource, rtype, requestId, action, payload, rng) self.__requests[requestId] = ret = RequestEvent(requestId, inner_msg, is_crud=is_crud) # if not self.__retry_enqueue(PreparedMessage(inner_msg, requestId)): raise LinkShutdownException('Client stopping') return ret
def _request(self, resource, rtype, action=None, payload=None, offset=None, limit=None, requestId=None, is_crud=False)
_request amqp queue publish helper return: RequestEvent object or None for failed to publish
5.16904
4.874634
1.060395
logger.debug('Readiness notification (last failed=%s)', last_send_failure_time) # It is possible for multiple timers to be scheduled (if multiple transport failures happen in a fairly short # amount of time. See logic for __send_retry_requests if last_send_failure_time is not None: self.__send_retry_requests_timer.cancel() # allow 10s for responses to come in before attempting to resend self.__send_retry_requests_timer = Timer(10, self.__send_retry_requests, args=(last_send_failure_time,)) self.__send_retry_requests_timer.start()
def __send_ready_cb(self, last_send_failure_time)
Callback from AmqpLink on send transport readiness. (Only ever comes from a single thread.)
5.266405
4.82017
1.092577
# make sure multiple failures having set multiple times do not run concurrently with self.__send_retry_requests_lock: with self.__requests: # produce list instead of generator as requests mapping can change during subsequent loop retry_reqs = [req for req in self.__requests.values() if req._sent_without_response(last_send_failure_time)] retry_req_count = 0 # don't continue if another network failure has occured (which will trigger this function again) while retry_reqs and self.__amqplink.last_send_exc_time <= last_send_failure_time: req = retry_reqs.pop() # lock individuallly so incoming request handling does not 'pause' for too long with self.__requests: # might have received a response (or finished since) if not (req.id_ in self.__requests and req._sent_without_response(last_send_failure_time)): logger.debug('Not resending request %s (finished or has received response)', req.id_) continue logger.debug('Resending request %s', req.id_) if not self.__retry_enqueue(PreparedMessage(req._inner_msg_out, req.id_)): # client shutdown break retry_req_count += 1 if retry_req_count: logger.debug('Resending of %d request(s) complete (before %s)', retry_req_count, last_send_failure_time)
def __send_retry_requests(self, last_send_failure_time)
Called via Timer from __send_ready to resend requests which might not have been sent due to transport failure. This can happen since the current transport implementation does not received acknowledgements for sent messages.
7.190037
7.084448
1.014904
lid = Validation.lid_check_convert(lid) if epId is None: epId = self.__epId elif epId is False: epId = None else: epId = Validation.guid_check_convert(epId) logger.debug("request_entity_create lid='%s'", lid) return self._request(R_ENTITY, C_CREATE, None, {'epId': epId, 'lid': lid}, is_crud=True)
def request_entity_create(self, lid, epId=None)
request entity create: lid = local name to user If epId=None (default), the current agent/EP is chosen If epId=False, no agent is assigned If epId=guid, said agent is chosen
4.471268
4.332599
1.032006
lid = Validation.lid_check_convert(lid) if nepId is None: nepId = self.__epId elif nepId is False: nepId = None else: nepId = Validation.guid_check_convert(nepId) logger.debug("request_entity_reassign lid='%s' -> nepId='%s'", lid, nepId) return self._request(R_ENTITY, C_UPDATE, (lid, 'reassign'), {'epId': nepId}, is_crud=True)
def request_entity_reassign(self, lid, nepId=None)
request entity to be reassigned to given ep/agent If nepId=None (default), the current agent/EP is chosen If nepId=False, no agent is assigned If nepId=guid, said agent is chosen
4.270072
3.938075
1.084304
Validation.foc_check(foc) lid = Validation.lid_check_convert(lid) pid = Validation.pid_check_convert(pid) save_recent = validate_int(save_recent, 'save_recent') logger.debug("request_point_create foc=%i lid='%s' pid='%s' save_recent=%d", foc, lid, pid, save_recent) if foc == R_CONTROL: Validation.callable_check(control_cb) if save_recent: logger.warning('ignoring non-zero save_recent value for control') evt = self._request(foc, C_CREATE, (lid,), {'lid': pid}, is_crud=True) with self.__pending_controls: self.__pending_controls[evt.id_] = control_cb return evt elif control_cb: raise ValueError('callback specified for Feed') else: return self._request(foc, C_CREATE, (lid,), {'lid': pid, 'saveRecent': save_recent}, is_crud=True)
def request_point_create(self, foc, lid, pid, control_cb=None, save_recent=0)
request point create: feed or control, lid and pid point lid
4.318112
4.128239
1.045994
if self.__auto_encode_decode: if isinstance(data, bytes): return None, data elif isinstance(data, dict): # check top level dictionary keys if all(isinstance(key, unicode_type) for key in data): return 'idx/1', ubjdumpb(data) # application/ubjson else: raise ValueError('At least one key in dict not real (unicode) string') elif isinstance(data, unicode_type): return 'idx/2', data.encode('utf8') # text/plain; charset=utf8 else: raise ValueError('cannot auto-encode data of type %s' % type(data)) elif isinstance(data, bytes): return None, data else: raise ValueError('No mime type specified and not bytes object (auto-encode disabled)') elif valid_mimetype(mime): if isinstance(data, bytes): return mime, data else: raise ValueError('mime specified but data not bytes object') else: raise ValueError('invalid mime type %s' % mime)
def __point_data_to_bytes(self, data, mime=None): # pylint: disable=too-many-branches if mime is None
Returns tuple of mime type & data. Auto encodes unicode strings (to utf8) and dictionaries (to ubjson) depending on client setting.
4.479982
3.748623
1.195101
rbytes = payload[P_DATA] mime = payload[P_MIME] if mime is None or not self.__auto_encode_decode: return rbytes, mime mime = expand_idx_mimetype(mime).lower() try: if mime == 'application/ubjson': return ubjloadb(rbytes), None elif mime == 'text/plain; charset=utf8': return rbytes.decode('utf-8'), None else: return rbytes, mime except: logger.warning('auto-decode failed, returning bytes', exc_info=DEBUG_ENABLED) return rbytes, mime
def __bytes_to_share_data(self, payload)
Attempt to auto-decode data
5.920313
5.443294
1.087634
while True: # Since seqnum wraps on 2^64 at most, this should always fit into 32 chars (QAPI request id limit) with self.__seqnum_lock: requestId = "%s%d" % (self.__reqpre, self.__reqnum) self.__reqnum += 1 if requestId not in self.__requests: break # in the unlikely event of a collision update prefix self.__reqpre = self.__rnd_string(6) return requestId
def __new_request_id(self)
requestId follows form "pre num" where pre is some random ascii prefix EG 6 chars long and num is an ever increasing number (self.__reqnum). MUST be called within self.__requests lock
8.459791
6.31951
1.338678
hobj = hmacNew(token, digestmod=hashfunc) hobj.update(innermsg) hobj.update(cls.__byte_packer(seqnum)) return hobj.digest()
def __make_hash(cls, innermsg, token, seqnum)
return the hash for this innermsg, token, seqnum return digest bytes
6.070702
6.503987
0.933382
return message[W_HASH] == self.__make_hash(message[W_MESSAGE], self.__token, message[W_SEQ])
def __check_hash(self, message)
return true/false if hash is good message = dict
11.377951
11.109016
1.024209
if action is not None and not isinstance(action, (tuple, list)): raise TypeError('action must be None/tuple/list') p = {M_RESOURCE: resource, M_TYPE: int(rtype), M_CLIENTREF: ref, # Ensure action path consists only of strings M_ACTION: tuple(u(element) for element in action) if action else None, M_PAYLOAD: payload} if limit is not None: # Note: fmtted like "0/15" where 0 = offset, 15 = limit p[M_RANGE] = limit return p
def __make_innermsg(resource, rtype, ref, action=None, payload=None, limit=None)
return innermsg chunk (dict)
6.135465
6.074718
1.01
try: with self.__requests: if set_and_forget: req = self.__requests.pop(requestId) else: req = self.__requests[requestId] except KeyError: logger.error('Unknown request %s - cannot set exception', requestId) else: if exc is not None: req.exception = exc if set_and_forget: req._set()
def __request_except(self, requestId, exc, set_and_forget=True)
Set exception (if not None) for the given request and (optionally) remove from internal cache & setting its event
2.828996
2.857576
0.989998
with self.__requests: try: req = self.__requests[requestId] except KeyError: # request might have had a response already have been removed by receiving thread pass else: req.exception = None req._send_time = monotonic()
def __request_mark_sent(self, requestId)
Set send time & clear exception from request if set, ignoring non-existent requests
9.68097
7.627862
1.269159
with self.__seqnum_lock: seqnum = self.__seqnum self.__seqnum = (self.__seqnum + 1) % _SEQ_WRAP_SIZE # innermsg = ubjdumpb(qmsg.inner_msg) clevel = COMP_NONE if len(innermsg) >= self.__comp_size: logger.debug('Compressing payload') try: innermsg = COMPRESSORS[self.__comp_default].compress(innermsg) except KeyError: logger.warning('Unknown compression method %s, not compressing', self.__comp_default) else: clevel = self.__comp_default p = {W_SEQ: seqnum, W_MESSAGE: innermsg, W_HASH: self.__make_hash(innermsg, self.__token, seqnum), W_COMPRESSION: clevel} msg = ubjdumpb(p) # do not send messages exceeding size limit if len(msg) > self.__max_encoded_length: self.__request_except(qmsg.requestId, ValueError("Message Payload too large %d > %d" % (len(msg), self.__max_encoded_length))) return False self.__amqplink.send(msg, content_type='application/ubjson') if DEBUG_ENABLED: p[W_MESSAGE] = qmsg.inner_msg logger.debug(decode_sent_msg('decode_sent_msg', p)) # Callback any debuggers self.__fire_callback(_CB_DEBUG_SEND, msg) # return True
def __publish(self, qmsg)
Returns True unless sending failed (at which point an exception will have been set in the request)
5.702077
5.512911
1.034313
called = False plain_submit = self.__threadpool.submit with self.__callbacks: submit = self.__crud_threadpool.submit if type_ in _CB_CRUD_TYPES else plain_submit for func, serialised_if_crud in self.__callbacks[type_]: called = True # allow CRUD callbacks to not be serialised if requested (submit if serialised_if_crud else plain_submit)(func, *args, **kwargs) return called
def __fire_callback(self, type_, *args, **kwargs)
Returns True if at least one callback was called
8.841715
8.063457
1.096517
if not _CONTENT_TYPE_PATTERN.match(message.content_type): logger.debug('Message with unexpected content type %s from container, ignoring', message.content_type) return None except AttributeError: logger.debug('Message without content type from container, ignoring') return None # Decode & check message wrapper try: body = ubjloadb(message.body) except: logger.warning('Failed to decode message wrapper, ignoring', exc_info=DEBUG_ENABLED) return None if not self.__valid_msg_wrapper(body): logger.warning('Invalid message wrapper, ignoring') return None # currently only warn although maybe this should be an error if self.__cnt_seqnum != -1 and not self.__valid_seqnum(body[W_SEQ], self.__cnt_seqnum): logger.warning('Unexpected seqnum from container: %d (last seen: %d)', body[W_SEQ], self.__cnt_seqnum) self.__cnt_seqnum = body[W_SEQ] # Check message hash if not self.__check_hash(body): logger.warning('Message has invalid hash, ignoring') return None # Decompress inner message try: msg = COMPRESSORS[body[W_COMPRESSION]].decompress(body[W_MESSAGE]) except KeyError: logger.warning('Received message with unknown compression: %s', body[W_COMPRESSION]) return None except OversizeException as ex: logger.warning('Uncompressed message exceeds %d bytes, ignoring', ex.size, exc_info=DEBUG_ENABLED) return None except: logger.warning('Decompression failed, ignoring message', exc_info=DEBUG_ENABLED) return None # Decode inner message try: msg = ubjloadb(msg, object_pairs_hook=OrderedDict) except: logger.warning('Failed to decode message, ignoring', exc_info=DEBUG_ENABLED) return None if self.__valid_msg_body(msg): return (msg, body[W_SEQ]) else: logger.warning('Message with invalid body, ignoring: %s', msg) return None
def __validate_decode_msg(self, message): # noqa (complexity) pylint: disable=too-many-return-statements,too-many-branches try
Decodes wrapper, check hash & seq, decodes body. Returns body or None, if validation / unpack failed
3.188741
3.150391
1.012173
msg = self.__validate_decode_msg(message) if msg: msg, seqnum = msg else: self.__fire_callback(_CB_DEBUG_BAD, message.body, message.content_type) return if DEBUG_ENABLED: logger.debug(decode_rcvd_msg('decode_rcvd_msg', msg, seqnum)) self.__fire_callback(_CB_DEBUG_RCVD, msg) # no reference, or set by client (not container) if msg[M_TYPE] not in _RSP_CONTAINER_REF: # solicitied if msg[M_CLIENTREF]: if not self.__handle_known_solicited(msg): logger.debug('Ignoring response for unknown request %s of type %s', msg[M_CLIENTREF], msg[M_TYPE]) # unsolicitied else: self.__perform_unsolicited_callbacks(msg) # unsolicited but can have reference set by container elif msg[M_TYPE] == E_CONTROLREQ: self.__handle_controlreq(msg[M_PAYLOAD], msg[M_CLIENTREF]) else: logger.error('Unhandled unsolicited message of type %s', msg[M_TYPE])
def __dispatch_msg(self, message)
Verify the signature and update RequestEvents / perform callbacks Note messages with an invalid wrapper, invalid hash, invalid sequence number or unexpected clientRef will be sent to debug_bad callback.
6.24143
5.712713
1.092551
with self.__requests: try: req = self.__requests[msg[M_CLIENTREF]] except KeyError: return False if self.__handle_low_seq_resend(msg, req): return True perform_cb = finish = False if msg[M_TYPE] not in _RSP_NO_REF: self.__update_existing(msg, req) # Finalise request if applicable (not marked as finished here so can perform callback first below) if msg[M_TYPE] in _RSP_TYPE_FINISH: finish = True # Exception - DUPLICATED also should produce callback perform_cb = (msg[M_TYPE] == E_DUPLICATED) elif msg[M_TYPE] not in _RSP_TYPE_ONGOING: perform_cb = True else: logger.warning('Reference unexpected for request %s of type %s', msg[M_CLIENTREF], msg[M_TYPE]) # outside lock to avoid deadlock if callbacks try to perform request-related functions if perform_cb: self.__perform_unsolicited_callbacks(msg) # mark request as finished if finish: req.success = msg[M_TYPE] in _RSP_TYPE_SUCCESS req.payload = msg[M_PAYLOAD] self.__clear_references(req) # Serialise completion of CRUD requests (together with CREATED, DELETED, etc. messages) if req.is_crud: self.__crud_threadpool.submit(req._set) else: req._set() return True
def __handle_known_solicited(self, msg)
returns True if message has been handled as a solicited response
6.994942
6.807994
1.02746
# remove request itself if remove_request: with self.__requests: self.__requests.pop(request.id_) # remove request type specific references if not request.success: with self.__pending_subs: self.__pending_subs.pop(request.id_, None) with self.__pending_controls: self.__pending_controls.pop(request.id_, None)
def __clear_references(self, request, remove_request=True)
Remove any internal references to the given request
3.884982
3.863529
1.005553
req._messages.append(msg) payload = msg[M_PAYLOAD] if msg[M_TYPE] in _RSP_TYPE_CREATION: if payload[P_RESOURCE] == R_SUB: # Add callback for feeddata with self.__pending_subs: if msg[M_CLIENTREF] in self.__pending_subs: callback = self.__pending_subs.pop(msg[M_CLIENTREF]) if payload[P_POINT_TYPE] == R_FEED: self.__callbacks[_CB_FEED][payload[P_POINT_ID]] = callback else: logger.warning('Subscription intended to feed is actually control: %s', payload[P_POINT_ID]) elif payload[P_RESOURCE] == R_CONTROL: with self.__pending_controls: if msg[M_CLIENTREF] in self.__pending_controls: # callbacks by thing entity_point_callbacks = self.__callbacks[_CB_CONTROL].setdefault(payload[P_ENTITY_LID], {}) # callback by thing and point entity_point_callbacks[payload[P_LID]] = self.__pending_controls.pop(msg[M_CLIENTREF]) elif msg[M_TYPE] == E_RECENTDATA: samples = [] for sample in payload[P_SAMPLES]: data, mime, time = self.__decode_data_time(sample) samples.append({'data': data, 'mime': mime, 'time': time}) self.__fire_callback(_CB_RECENT_DATA, {'c': msg[M_CLIENTREF], 'samples': samples})
def __update_existing(self, msg, req)
Propagate changes based on type of message. MUST be called within self.__requests lock. Performs additional actions when solicited messages arrive.
4.39055
4.257754
1.031189
if msg[M_TYPE] == E_FAILED and msg[M_PAYLOAD][P_CODE] == E_FAILED_CODE_LOWSEQNUM: with self.__seqnum_lock: self.__seqnum = int(msg[M_PAYLOAD][P_MESSAGE]) # return value indicating shutdown not useful here since this is run in receiver thread self.__retry_enqueue(PreparedMessage(req._inner_msg_out, req.id_)) return True return False
def __handle_low_seq_resend(self, msg, req)
special error case - low sequence number (update sequence number & resend if applicable). Returns True if a resend was scheduled, False otherwise. MUST be called within self.__requests lock.
11.882723
11.277291
1.053686
data, mime = self.__bytes_to_share_data(payload) try: time = datetime.strptime(payload.get(P_TIME), self.__share_time_fmt) except (ValueError, TypeError): logger.warning('Share payload from container has invalid timestamp (%s), will use naive local time', payload.get(P_TIME)) time = datetime.utcnow() return data, mime, time
def __decode_data_time(self, payload)
Extract time and decode payload (based on mime type) from payload. Applies to E_FEEDDATA and E_RECENTDATA. Returns tuple of data, mime, time.
6.317207
5.031795
1.255458
type_ = msg[M_TYPE] payload = msg[M_PAYLOAD] # callbacks for responses which might be unsolicited (e.g. created or deleted) if type_ in _RSP_PAYLOAD_CB_MAPPING: self.__fire_callback(_RSP_PAYLOAD_CB_MAPPING[type_], msg) # Perform callbacks for feed data elif type_ == E_FEEDDATA: self.__simulate_feeddata(payload[P_FEED_ID], *self.__decode_data_time(payload)) # Perform callbacks for unsolicited subscriber message elif type_ == E_SUBSCRIBED: self.__fire_callback(_CB_SUBSCRIPTION, payload) else: logger.error('Unexpected message type for unsolicited callback %s', type_)
def __perform_unsolicited_callbacks(self, msg)
Callbacks for which a client reference is either optional or does not apply at all
5.257532
5.190916
1.012833
''' Set the roles for the current launch. Full list of roles can be found here: http://www.imsglobal.org/LTI/v1p1/ltiIMGv1p1.html#_Toc319560479 LIS roles include: * Student * Faculty * Member * Learner * Instructor * Mentor * Staff * Alumni * ProspectiveStudent * Guest * Other * Administrator * Observer * None ''' if roles_list and isinstance(roles_list, list): self.roles = [].extend(roles_list) elif roles_list and isinstance(roles_list, basestring): self.roles = [role.lower() for role in roles_list.split(',')]
def roles(self, roles_list)
Set the roles for the current launch. Full list of roles can be found here: http://www.imsglobal.org/LTI/v1p1/ltiIMGv1p1.html#_Toc319560479 LIS roles include: * Student * Faculty * Member * Learner * Instructor * Mentor * Staff * Alumni * ProspectiveStudent * Guest * Other * Administrator * Observer * None
4.870506
1.537882
3.167021
''' Populates the launch data from a dictionary. Only cares about keys in the LAUNCH_DATA_PARAMETERS list, or that start with 'custom_' or 'ext_'. ''' for key, val in params.items(): if key in LAUNCH_DATA_PARAMETERS and val != 'None': if key == 'roles': if isinstance(val, list): # If it's already a list, no need to parse self.roles = list(val) else: # If it's a ',' delimited string, split self.roles = val.split(',') else: setattr(self, key, touni(val)) elif 'custom_' in key: self.custom_params[key] = touni(val) elif 'ext_' in key: self.ext_params[key] = touni(val)
def process_params(self, params)
Populates the launch data from a dictionary. Only cares about keys in the LAUNCH_DATA_PARAMETERS list, or that start with 'custom_' or 'ext_'.
3.852528
2.203814
1.748118
''' Createa a new dictionary with all launch data. Custom / Extension keys will be included. Roles are set as a ',' separated string. ''' params = {} custom_params = {} for key in self.custom_params: custom_params[key] = self.custom_params[key] ext_params = {} for key in self.ext_params: ext_params[key] = self.ext_params[key] for key in LAUNCH_DATA_PARAMETERS: if hasattr(self, key): params[key] = getattr(self, key) params.update(custom_params) params.update(ext_params) return params
def to_params(self)
Createa a new dictionary with all launch data. Custom / Extension keys will be included. Roles are set as a ',' separated string.
4.021961
1.818328
2.2119
self._flushbits() if isinstance(s, text_t): s = s.encode('utf-8') self.write_long(len(s)) self.out.write(s)
def write_longstr(self, s)
Write a string up to 2**32 bytes long after encoding. If passed a unicode string, encode as UTF-8.
4.269728
4.025367
1.060705
if self.__bgthread: raise Exception('run has already been called (since last stop)') self.__shutdown.clear() if background: self.__bgthread = Thread(target=self.__run, name=('bg_' + self.__client.agent_id)) self.__bgthread.daemon = True self.__bgthread.start() else: self.__run()
def run(self, background=False)
Runs `on_startup`, `main` and `on_shutdown`, blocking until finished, unless background is set.
4.503252
4.292454
1.049109
stopped = True self.__shutdown.set() if self.__bgthread: logger.debug('Stopping bgthread') self.__bgthread.join(timeout) if self.__bgthread.is_alive(): logger.warning('bgthread did not finish within timeout') stopped = False self.__bgthread = None return stopped
def stop(self, timeout=None)
Requests device to stop running, waiting at most the given timout in seconds (fractional). Has no effect if `run()` was not called with background=True set. Returns True if successfully stopped (or already not running).
3.408651
3.50317
0.973019
return self.queue.send_message(MessageBody=json.dumps(payload), MessageAttributes={k: {"StringValue": v} for k, v in attributes.items()})
def add_message_to_queue(self, payload, **attributes)
Given a payload (a dict) and any optional message attributes (also a dict), add it to the queue.
3.745029
3.949419
0.948248
return self.queue.receive_messages(MaxNumberOfMessages=num_messages, WaitTimeSeconds=wait_time)
def receive_messages_from_queue(self, wait_time=15, num_messages=1)
Returns the first (according to FIFO) element in the queue; if none, then returns None.
3.590703
3.640358
0.98636
if jsonfn is None: jsonfn = os.path.join(os.getcwd(), 'units.json') # jsondump = None sort_keys = False indent = 0 if pretty: sort_keys = True indent = 4 jsondump = json.dumps(self.units, sort_keys=sort_keys, indent=indent) # with open(jsonfn, 'w') as f: f.write(jsondump) return True return False
def save_json(self, jsonfn=None, pretty=True)
Write a .json file with the units tree jsonfn='path/file.name' default os.getcwd() + 'units.json' pretty=True use JSON dumps pretty print for human readability
2.268931
2.004253
1.132059
metrics_cache = cls._get_metrics_cache() metrics_cache.setdefault(name, 0) metrics_cache.set(name, value)
def accumulate_metric(cls, name, value)
Accumulate a custom metric (name and value) in the metrics cache.
4.639085
3.406935
1.361659
if not newrelic: return metrics_cache = cls._get_metrics_cache() try: newrelic.agent.add_custom_parameter('user_id', request.user.id) except AttributeError: pass for key, value in metrics_cache.data.items(): newrelic.agent.add_custom_parameter(key, value)
def _batch_report(cls, request)
Report the collected custom metrics to New Relic.
3.357552
2.873152
1.168595
if self._is_enabled(): self._cache.set(self.guid_key, six.text_type(uuid4())) log_prefix = self._log_prefix(u"Before", request) self._cache.set(self.memory_data_key, self._memory_data(log_prefix))
def process_request(self, request)
Store memory data to log later.
6.916451
5.8904
1.17419
if self._is_enabled(): log_prefix = self._log_prefix(u"After", request) new_memory_data = self._memory_data(log_prefix) log_prefix = self._log_prefix(u"Diff", request) cached_memory_data_response = self._cache.get_cached_response(self.memory_data_key) old_memory_data = cached_memory_data_response.get_value_or_default(None) self._log_diff_memory_data(log_prefix, new_memory_data, old_memory_data) return response
def process_response(self, request, response)
Logs memory data after processing response.
3.864618
3.444658
1.121916
# After a celery task runs, the request cache is cleared. So if celery # tasks are running synchronously (CELERY_ALWAYS _EAGER), "guid_key" # will no longer be in the request cache when process_response executes. cached_guid_response = self._cache.get_cached_response(self.guid_key) cached_guid = cached_guid_response.get_value_or_default(u"without_guid") return u"{} request '{} {} {}'".format(prefix, request.method, request.path, cached_guid)
def _log_prefix(self, prefix, request)
Returns a formatted prefix for logging for the given request.
8.923123
8.912797
1.001159
machine_data = psutil.virtual_memory() process = psutil.Process() process_data = { 'memory_info': process.get_memory_info(), 'ext_memory_info': process.get_ext_memory_info(), 'memory_percent': process.get_memory_percent(), 'cpu_percent': process.get_cpu_percent(), } log.info(u"%s Machine memory usage: %s; Process memory usage: %s", log_prefix, machine_data, process_data) return { 'machine_data': machine_data, 'process_data': process_data, }
def _memory_data(self, log_prefix)
Returns a dict with information for current memory utilization. Uses log_prefix in log statements.
2.443714
2.43445
1.003805
def _vmem_used(memory_data): return memory_data['machine_data'].used def _process_mem_percent(memory_data): return memory_data['process_data']['memory_percent'] def _process_rss(memory_data): return memory_data['process_data']['memory_info'].rss def _process_vms(memory_data): return memory_data['process_data']['memory_info'].vms if new_memory_data and old_memory_data: log.info( u"%s Diff Vmem used: %s, Diff percent memory: %s, Diff rss: %s, Diff vms: %s", prefix, _vmem_used(new_memory_data) - _vmem_used(old_memory_data), _process_mem_percent(new_memory_data) - _process_mem_percent(old_memory_data), _process_rss(new_memory_data) - _process_rss(old_memory_data), _process_vms(new_memory_data) - _process_vms(old_memory_data), )
def _log_diff_memory_data(self, prefix, new_memory_data, old_memory_data)
Computes and logs the difference in memory utilization between the given old and new memory data.
2.100367
2.082159
1.008744
assert namespace if namespace in self._data: return self._data[namespace] new_data = {} self._data[namespace] = new_data return new_data
def data(self, namespace)
Gets the thread.local data (dict) for a given namespace. Args: namespace (string): The namespace, or key, of the data dict. Returns: (dict)
2.993376
4.022962
0.744073
cached_value = self.data.get(key, _CACHE_MISS) is_found = cached_value is not _CACHE_MISS return CachedResponse(is_found, key, cached_value)
def get_cached_response(self, key)
Retrieves a CachedResponse for the provided key. Args: key (string) Returns: A CachedResponse with is_found status and value.
4.632606
4.501351
1.029159
request_cached_response = DEFAULT_REQUEST_CACHE.get_cached_response(key) if not request_cached_response.is_found: django_cached_response = cls._get_cached_response_from_django_cache(key) cls._set_request_cache_if_django_cache_hit(key, django_cached_response) return django_cached_response return request_cached_response
def get_cached_response(cls, key)
Retrieves a CachedResponse for the provided key. Args: key (string) Returns: A CachedResponse with is_found status and value.
3.507108
3.514291
0.997956
DEFAULT_REQUEST_CACHE.set(key, value) django_cache.set(key, value, django_cache_timeout)
def set_all_tiers(key, value, django_cache_timeout=DEFAULT_TIMEOUT)
Caches the value for the provided key in both the request cache and the django cache. Args: key (string) value (object) django_cache_timeout (int): (Optional) Timeout used to determine if and for how long to cache in the django cache. A timeout of 0 will skip the django cache. If timeout is provided, use that timeout for the key; otherwise use the default cache timeout.
4.577779
5.012031
0.913358
if TieredCache._should_force_django_cache_miss(): return CachedResponse(is_found=False, key=key, value=None) cached_value = django_cache.get(key, _CACHE_MISS) is_found = cached_value is not _CACHE_MISS return CachedResponse(is_found, key, cached_value)
def _get_cached_response_from_django_cache(key)
Retrieves a CachedResponse for the given key from the django cache. If the request was set to force cache misses, then this will always return a cache miss response. Args: key (string) Returns: A CachedResponse with is_found status and value.
4.454817
3.519011
1.265929
if django_cached_response.is_found: DEFAULT_REQUEST_CACHE.set(key, django_cached_response.value)
def _set_request_cache_if_django_cache_hit(key, django_cached_response)
Sets the value in the request cache if the django cached response was a hit. Args: key (string) django_cached_response (CachedResponse)
5.830141
4.825851
1.208106
if not (request.user and request.user.is_active and request.user.is_staff): force_cache_miss = False else: force_cache_miss = request.GET.get(FORCE_CACHE_MISS_PARAM, 'false').lower() == 'true' DEFAULT_REQUEST_CACHE.set(SHOULD_FORCE_CACHE_MISS_KEY, force_cache_miss)
def _get_and_set_force_cache_miss(request)
Gets value for request query parameter FORCE_CACHE_MISS and sets it in the default request cache. This functionality is only available for staff. Example: http://clobert.com/api/v1/resource?force_cache_miss=true
2.672639
2.644689
1.010568
cached_response = DEFAULT_REQUEST_CACHE.get_cached_response(SHOULD_FORCE_CACHE_MISS_KEY) return False if not cached_response.is_found else cached_response.value
def _should_force_django_cache_miss(cls)
Returns True if the tiered cache should force a cache miss for the django cache, and False otherwise.
7.223532
6.15121
1.174327
if not name: name = 'Argument' obj = ensure_unicode(obj, name=name) if no_whitespace: if _PATTERN_WHITESPACE.match(obj): raise ValueError('%s cannot contain whitespace' % name) elif no_leading_trailing_whitespace and _PATTERN_LEAD_TRAIL_WHITESPACE.match(obj): raise ValueError('%s contains leading/trailing whitespace' % name) if (min_len and len(obj) < min_len) or (max_len and len(obj) > max_len): raise ValueError('%s too short/long (%d/%d)' % (name, min_len, max_len)) if whole_word: if not _PATTERN_WORD.match(obj): raise ValueError('%s can only contain alphanumeric (unicode) characters, numbers and the underscore' % name) # whole words cannot contain newline so additional check not required elif no_newline and '\n' in obj: raise ValueError('%s cannot contain line breaks' % name) return obj
def check_convert_string(obj, name=None, no_leading_trailing_whitespace=True, no_whitespace=False, no_newline=True, whole_word=False, min_len=1, max_len=0)
Ensures the provided object can be interpreted as a unicode string, optionally with additional restrictions imposed. By default this means a non-zero length string which does not begin or end in whitespace.
2.699342
2.638885
1.02291
if isinstance(guid, string_types): return ensure_unicode(UUID(guid).hex) elif guid is None and allow_none: return None else: raise ValueError('guid must be a string')
def guid_check_convert(guid, allow_none=False)
Take a GUID in the form of hex string "32" or "8-4-4-4-12". Returns hex string "32" or raises ValueError: badly formed hexadecimal UUID string
3.280398
3.429115
0.956631
# single string check comes first since string is also a Sequence if isinstance(tags, string_types): return [cls.__tag_check_convert(tags)] elif isinstance(tags, Sequence): if not tags: raise ValueError("Tag list is empty") return [cls.__tag_check_convert(tag) for tag in tags] else: raise ValueError("tags must be a single string or list of sequence of strings")
def tags_check_convert(cls, tags)
Accept one tag as string or multiple tags in list of strings. Returns list (with tags in unicode form) or raises ValueError
4.220772
4.185796
1.008356
bits = urlparse(url) return ((bits.scheme == "http" or bits.scheme == "https") and _PATTERN_URL_PART.match(bits.netloc) and _PATTERN_URL_PART.match(bits.path))
def __valid_url(cls, url)
Expects input to already be a valid string
3.771353
3.621417
1.041403
if not (isinstance(lat, number_types) and -90 <= lat <= 90): raise ValueError("Latitude: '{latitude}' invalid".format(latitude=lat)) if not (isinstance(lon, number_types) and -180 <= lon <= 180): raise ValueError("Longitude: '{longitude}' invalid".format(longitude=lon))
def location_check(lat, lon)
For use by Core client wrappers
2.246662
2.122536
1.05848
if not (isinstance(location, Mapping) and set(location.keys()) == _LOCATION_SEARCH_ARGS): raise ValueError('Search location should be mapping with keys: %s' % _LOCATION_SEARCH_ARGS) cls.location_check(location['lat'], location['long']) radius = location['radius'] if not (isinstance(radius, number_types) and 0 < radius <= 20038): # half circumference raise ValueError("Radius: '{radius}' is invalid".format(radius=radius))
def search_location_check(cls, location)
Core.Client.request_search location parameter should be a dictionary that contains lat, lon and radius floats
4.558443
4.423537
1.030497
text = cls.check_convert_string(text, name='text', no_leading_trailing_whitespace=False) if len(text) > VALIDATION_META_SEARCH_TEXT: raise ValueError("Search text can contain at most %d characters" % VALIDATION_META_SEARCH_TEXT) text = ' '.join(_PATTERN_WORDS.findall(text)) if not text: raise ValueError('Search text must contain at least one non-whitespace term (word)') return text
def __search_text_check_convert(cls, text)
Converts and keeps only words in text deemed to be valid
5.419502
5.453957
0.993683
if func is None: if not allow_none: raise ValueError('callable cannot be None') elif not arg_checker(func, *[arg_value for _ in range(arg_count)]): raise ValueError('callable %s invalid (for %d arguments)' % (func, arg_count))
def callable_check(func, arg_count=1, arg_value=None, allow_none=False)
Check whether func is callable, with the given number of positional arguments. Returns True if check succeeded, False otherwise.
3.93034
4.276619
0.91903
return self.__list(R_FEED, limit=limit, offset=offset)['feeds']
def list_feeds(self, limit=500, offset=0)
List `all` the feeds on this Thing. Returns QAPI list function payload Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `limit` (optional) (integer) Return this many Point details `offset` (optional) (integer) Return Point details starting at this offset
11.499392
21.499832
0.53486
return self.__list(R_CONTROL, limit=limit, offset=offset)['controls']
def list_controls(self, limit=500, offset=0)
List `all` the controls on this Thing. Returns QAPI list function payload Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `limit` (optional) (integer) Return this many Point details `offset` (optional) (integer) Return Point details starting at this offset
11.560927
22.471239
0.514477
logger.info("set_public(public=%s) [lid=%s]", public, self.__lid) evt = self._client._request_entity_meta_setpublic(self.__lid, public) self._client._wait_and_except_if_failed(evt)
def set_public(self, public=True)
Sets your Thing to be public to all. If `public=True`. This means the tags, label and description of your Thing are now searchable by anybody, along with its location and the units of any values on any Points. If `public=False` the metadata of your Thing is no longer searchable. Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `public` (optional) (boolean) Whether (or not) to allow your Thing's metadata to be searched by anybody
8.444867
9.338118
0.904344
logger.info("rename(new_lid=\"%s\") [lid=%s]", new_lid, self.__lid) evt = self._client._request_entity_rename(self.__lid, new_lid) self._client._wait_and_except_if_failed(evt) self.__lid = new_lid self._client._notify_thing_lid_change(self.__lid, new_lid)
def rename(self, new_lid)
Rename the Thing. `ADVANCED USERS ONLY` This can be confusing. You are changing the local id of a Thing to `new_lid`. If you create another Thing using the "old_lid", the system will oblige, but it will be a completely _new_ Thing. Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `new_lid` (required) (string) the new local identifier of your Thing
4.818519
5.006933
0.962369
logger.info("reassign(new_epid=\"%s\") [lid=%s]", new_epid, self.__lid) evt = self._client._request_entity_reassign(self.__lid, new_epid) self._client._wait_and_except_if_failed(evt)
def reassign(self, new_epid)
Reassign the Thing from one agent to another. `ADVANCED USERS ONLY` This will lead to any local instances of a Thing being rendered useless. They won't be able to receive control requests, feed data or to share any feeds as they won't be in this agent. Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `new_epid` (required) (string) the new agent id to which your Thing should be assigned. If None, current agent will be chosen. If False, existing agent will be unassigned.
6.20123
6.467276
0.958863
if isinstance(tags, str): tags = [tags] evt = self._client._request_entity_tag_update(self.__lid, tags, delete=False) self._client._wait_and_except_if_failed(evt)
def create_tag(self, tags)
Create tags for a Thing in the language you specify. Tags can only contain alphanumeric (unicode) characters and the underscore. Tags will be stored lower-cased. Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `tags` (mandatory) (list) - the list of tags you want to add to your Thing, e.g. `["garden", "soil"]`
8.820239
9.891139
0.891731
if isinstance(tags, str): tags = [tags] evt = self._client._request_entity_tag_update(self.__lid, tags, delete=True) self._client._wait_and_except_if_failed(evt)
def delete_tag(self, tags)
Delete tags for a Thing in the language you specify. Case will be ignored and any tags matching lower-cased will be deleted. Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `tags` (mandatory) (list) - the list of tags you want to delete from your Thing, e.g. `["garden", "soil"]`
8.416232
9.316254
0.903392
evt = self._client._request_entity_tag_list(self.__lid, limit=limit, offset=offset) self._client._wait_and_except_if_failed(evt) return evt.payload['tags']
def list_tag(self, limit=500, offset=0)
List `all` the tags for this Thing Returns lists of tags, as below #!python [ "mytag1", "mytag2" "ein_name", "nochein_name" ] - OR... Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `limit` (optional) (integer) Return at most this many tags `offset` (optional) (integer) Return tags starting at this offset
9.919152
10.901125
0.90992
rdf = self.get_meta_rdf(fmt='n3') return ThingMeta(self, rdf, self._client.default_lang, fmt='n3')
def get_meta(self)
Get the metadata object for this Thing Returns a [ThingMeta](ThingMeta.m.html#IoticAgent.IOT.ThingMeta.ThingMeta) object
14.356752
9.169774
1.56566
evt = self._client._request_entity_meta_get(self.__lid, fmt=fmt) self._client._wait_and_except_if_failed(evt) return evt.payload['meta']
def get_meta_rdf(self, fmt='n3')
Get the metadata for this Thing in rdf fmt Advanced users who want to manipulate the RDF for this Thing directly without the [ThingMeta](ThingMeta.m.html#IoticAgent.IOT.ThingMeta.ThingMeta) helper object Returns the RDF in the format you specify. - OR - Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `fmt` (optional) (string) The format of RDF you want returned. Valid formats are: "xml", "n3", "turtle"
14.009337
14.699487
0.953049
evt = self._client._request_entity_meta_set(self.__lid, rdf, fmt=fmt) self._client._wait_and_except_if_failed(evt)
def set_meta_rdf(self, rdf, fmt='n3')
Set the metadata for this Thing in RDF fmt Advanced users who want to manipulate the RDF for this Thing directly without the [ThingMeta](ThingMeta.m.html#IoticAgent.IOT.ThingMeta.ThingMeta) helper object Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `fmt` (optional) (string) The format of RDF you have sent. Valid formats are: "xml", "n3", "turtle"
11.914351
13.203488
0.902364
with self.__new_feeds: try: return self.__new_feeds.pop(pid) except KeyError as ex: raise_from(KeyError('Feed %s not know as new' % pid), ex)
def get_feed(self, pid)
Get the details of a newly created feed. This only applies to asynchronous creation of feeds and the new feed instance can only be retrieved once. `NOTE` - Destructive Read. Once you've called get_feed once, any further calls will raise a `KeyError` Returns a [Feed](Point.m.html#IoticAgent.IOT.Point.Feed) object, which corresponds to the cached entry for this local feed id `pid` (required) (string) Point id - local identifier of your feed. Raises `KeyError` if the feed has not been newly created (or has already been retrieved by a previous call)
6.289534
6.44592
0.975739
with self.__new_controls: try: return self.__new_controls.pop(pid) except KeyError as ex: raise_from(KeyError('Control %s not know as new' % pid), ex)
def get_control(self, pid)
Get the details of a newly created control. This only applies to asynchronous creation of feeds and the new control instance can only be retrieved once. `NOTE` - Destructive Read. Once you've called get_control once, any further calls will raise a `KeyError` Returns a [Control](Point.m.html#IoticAgent.IOT.Point.Control) object, which corresponds to the cached entry for this local control id `pid` (required) (string) local identifier of your control. Raises `KeyError` if the control has not been newly created (or has already been retrieved by a previous call)
6.178385
6.881028
0.897887
logger.info("create_feed(pid=\"%s\") [lid=%s]", pid, self.__lid) return self.__create_point(R_FEED, pid, save_recent=save_recent)
def create_feed(self, pid, save_recent=0)
Create a new Feed for this Thing with a local point id (pid). Returns a new [Feed](Point.m.html#IoticAgent.IOT.Point.Feed) object, or the existing one, if the Feed already exists Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `pid` (required) (string) local id of your Feed `save_recent` (optional) (int) how many shares to store for later retrieval. If not supported by container, this argument will be ignored. A value of zero disables this feature whilst a negative value requests the maximum sample store amount. See also [Feed.set_recent_config](./Point.m.html#IoticAgent.IOT.Point.Feed.set_recent_config).
8.338972
7.40182
1.126611
logger.info("create_control(pid=\"%s\", control_cb=%s) [lid=%s]", pid, callback, self.__lid) if callback_parsed: callback = self._client._get_parsed_control_callback(callback_parsed, callback) return self.__create_point(R_CONTROL, pid, control_cb=callback)
def create_control(self, pid, callback, callback_parsed=None)
Create a control for this Thing with a local point id (pid) and a control request feedback Returns a new [Control](Point.m.html#IoticAgent.IOT.Point.Control) object or the existing one if the Control already exists Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `pid` (required) (string) local id of your Control `callback` (required) (function reference) callback function to invoke on receipt of a control request. The callback receives a single dict argument, with keys of: #!python 'data' # (decoded or raw bytes) 'mime' # (None, unless payload was not decoded and has a mime type) 'subId' # (the global id of the associated subscripion) 'entityLid' # (local id of the Thing to which the control belongs) 'lid' # (local id of control) 'confirm' # (whether a confirmation is expected) 'requestId' # (required for sending confirmation) `callback_parsed` (optional) (function reference) callback function to invoke on receipt of control data. This is equivalent to `callback` except the dict includes the `parsed` key which holds the set of values in a [PointDataObject](./Point.m.html#IoticAgent.IOT.Point.PointDataObject) instance. If both `callback_parsed` and `callback` have been specified, the former takes precedence and `callback` is only called if the point data could not be parsed according to its current value description. `NOTE`: `callback_parsed` can only be used if `auto_encode_decode` is enabled for the client instance.
6.523992
6.610949
0.986846
logger.info("delete_feed(pid=\"%s\") [lid=%s]", pid, self.__lid) return self.__delete_point(R_FEED, pid)
def delete_feed(self, pid)
Delete a feed, identified by its local id. Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `pid` (required) (string) local identifier of your feed you want to delete
10.100542
11.960959
0.844459
logger.info("delete_control(pid=\"%s\") [lid=%s]", pid, self.__lid) return self.__delete_point(R_CONTROL, pid)
def delete_control(self, pid)
Delete a control, identified by its local id. Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `pid` (required) (string) local identifier of your control you want to delete
10.01972
11.895393
0.842319
new_subs = self.__new_subs with new_subs: # don't allow multiple subscription requests to overwrite internal reference if key in new_subs: raise ValueError('subscription for given args pending: %s' % str(key)) new_subs[key] = None try: yield except: # don't preserve reference if request creation failed with new_subs: new_subs.pop(key, None) raise
def __sub_add_reference(self, key)
Used by __sub_make_request to save reference for pending sub request
7.369452
6.323061
1.165488
if not req.success: try: self.__new_subs.pop(key) except KeyError: logger.warning('No sub ref %s', key)
def __sub_del_reference(self, req, key)
Blindly clear reference to pending subscription on failure.
7.847003
5.74376
1.366179
# global if isinstance(gpid, string_types): gpid = uuid_to_hex(gpid) ref = (foc, gpid) with self.__sub_add_reference(ref): req = self._client._request_sub_create(self.__lid, foc, gpid, callback=callback) # local elif isinstance(gpid, Sequence) and len(gpid) == 2: ref = (foc, tuple(gpid)) with self.__sub_add_reference(ref): req = self._client._request_sub_create_local(self.__lid, foc, *gpid, callback=callback) else: raise ValueError('gpid must be string or two-element tuple') req._run_on_completion(self.__sub_del_reference, ref) return req
def __sub_make_request(self, foc, gpid, callback)
Make right subscription request depending on whether local or global - used by __sub*
3.06877
2.933814
1.046
if callback_parsed: callback = self._client._get_parsed_feed_callback(callback_parsed, callback) return self.__sub(R_FEED, gpid, callback=callback)
def follow(self, gpid, callback=None, callback_parsed=None)
Create a subscription (i.e. follow) a Feed/Point with a global point id (gpid) and a feed data callback Returns a new [RemoteFeed](RemotePoint.m.html#IoticAgent.IOT.RemotePoint.RemoteFeed) object or the existing one if the subscription already exists - OR - Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `gpid` (required) (uuid) global id of the Point you want to follow `-OR-` `gpid` (required) (lid,pid) tuple of `(thing_localid, point_localid)` for local subscription `callback` (optional) (function reference) callback function to invoke on receipt of feed data. The callback receives a single dict argument, with keys of: #!python 'data' # (decoded or raw bytes) 'mime' # (None, unless payload was not decoded and has a mime type) 'pid' # (the global id of the feed from which the data originates) 'time' # (datetime representing UTC timestamp of share) `callback_parsed` (optional) (function reference) callback function to invoke on receipt of feed data. This is equivalent to `callback` except the dict includes the `parsed` key which holds the set of values in a [PointDataObject](./Point.m.html#IoticAgent.IOT.Point.PointDataObject) instance. If both `callback_parsed` and `callback` have been specified, the former takes precedence and `callback` is only called if the point data could not be parsed according to its current value description. `NOTE`: `callback_parsed` can only be used if `auto_encode_decode` is enabled for the client instance.
7.092255
8.945583
0.792822
evt = self._client._request_sub_list(self.__lid, limit=limit, offset=offset) self._client._wait_and_except_if_failed(evt) return evt.payload['subs']
def list_connections(self, limit=500, offset=0)
List Points to which this Things is subscribed. I.e. list all the Points this Thing is following and controls it's attached to Returns subscription list e.g. #!python { "<Subscription GUID 1>": { "id": "<Control GUID>", "entityId": "<Control's Thing GUID>", "type": 3 # R_CONTROL from IoticAgent.Core.Const }, "<Subscription GUID 2>": { "id": "<Feed GUID>", "entityId": "<Feed's Thing GUID>", "type": 2 # R_FEED from IoticAgent.Core.Const } Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure Note: For Things following a Point see [list_followers](./Point.m.html#IoticAgent.IOT.Point.Point.list_followers)
11.868688
10.951196
1.08378
if payload[P_RESOURCE] in _POINT_TYPE_TO_CLASS: store = self.__new_feeds if payload[P_RESOURCE] == R_FEED else self.__new_controls cls = _POINT_TYPE_TO_CLASS[payload[P_RESOURCE]] with store: store[payload[P_LID]] = cls(self._client, payload[P_ENTITY_LID], payload[P_LID], payload[P_ID]) logger.debug('Added %s: %s (for %s)', foc_to_str(payload[P_RESOURCE]), payload[P_LID], payload[P_ENTITY_LID]) elif payload[P_RESOURCE] == R_SUB: # local if P_POINT_ENTITY_LID in payload: key = (payload[P_POINT_TYPE], (payload[P_POINT_ENTITY_LID], payload[P_POINT_LID])) # global else: key = (payload[P_POINT_TYPE], payload[P_POINT_ID]) new_subs = self.__new_subs with new_subs: if key in new_subs: cls = RemoteFeed if payload[P_POINT_TYPE] == R_FEED else RemoteControl new_subs[key] = cls(self._client, payload[P_ID], payload[P_POINT_ID], payload[P_ENTITY_LID]) else: logger.warning('Ignoring subscription creation for unexpected %s: %s', foc_to_str(payload[P_POINT_TYPE]), key[1]) else: logger.error('Resource creation of type %d unhandled', payload[P_RESOURCE])
def _cb_created(self, payload, duplicated)
Indirect callback (via Client) for point & subscription creation responses
3.221356
3.087753
1.043269
method_sig, args, content = self.connection._wait_method( self.channel_id, allowed_methods) return self.dispatch_method(method_sig, args, content)
def wait(self, allowed_methods=None)
Wait for a method that matches our allowed_methods parameter (the default value of None means match any method), and dispatch to it.
8.191685
6.765438
1.210814
queue = Queue() evt = self.get_recent_async(count, queue.put) timeout_time = monotonic() + self._client.sync_timeout while True: try: yield queue.get(True, .1) except Empty: if evt.is_set() or monotonic() >= timeout_time: break self._client._except_if_failed(evt)
def get_recent(self, count)
Get the last instance(s) of feeddata from the feed. Useful if the remote Thing doesn't publish very often. Returns an iterable of dicts (in chronologically ascending order) containing: #!python 'data' # (decoded or raw bytes) 'mime' # (None, unless payload was not decoded and has a mime type) 'time' # (datetime representing UTC timestamp of share) `count` (mandatory) (integer) How many recent instances to retrieve. High values might be floored to a maximum as defined by the container. Note: Feed data is iterable as soon as it arrives, rather than when the request completes.
5.036066
5.467054
0.921166
validate_nonnegative_int(count, 'count') Validation.callable_check(callback, allow_none=True) evt = self._client._request_sub_recent(self.subid, count=count) self._client._add_recent_cb_for(evt, callback) return evt
def get_recent_async(self, count, callback)
Similar to `get_recent` except instead of returning an iterable, passes each dict to the given function which must accept a single argument. Returns the request. `callback` (mandatory) (function) instead of returning an iterable, pass each dict (as described above) to the given function which must accept a single argument. Nothing is returned.
9.714679
11.789731
0.823995
self._client.simulate_feeddata(self.__pointid, data, mime)
def simulate(self, data, mime=None)
Simulate the arrival of feeddata into the feed. Useful if the remote Thing doesn't publish very often. `data` (mandatory) (as applicable) The data you want to use to simulate the arrival of remote feed data `mime` (optional) (string) The mime type of your data. See: [share()](./Point.m.html#IoticAgent.IOT.Point.Feed.share)
28.278938
15.686567
1.802749
evt = self.ask_async(data, mime=mime) self._client._wait_and_except_if_failed(evt)
def ask(self, data, mime=None)
Request a remote control to do something. Ask is "fire-and-forget" in that you won't receive any notification of the success or otherwise of the action at the far end. Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `data` (mandatory) (as applicable) The data you want to share `mime` (optional) (string) The mime type of the data you're sharing. See: [share()](./Point.m.html#IoticAgent.IOT.Point.Feed.share)
12.081558
16.357786
0.738581
evt = self.tell_async(data, timeout=timeout, mime=mime) # No point in waiting longer than supplied timeout (as opposed to waiting for sync timeout) try: self._client._wait_and_except_if_failed(evt, timeout=timeout) except IOTSyncTimeout: return 'timeout' return True if evt.payload['success'] else evt.payload['reason']
def tell(self, data, timeout=10, mime=None)
Order a remote control to do something. Tell is confirmed in that you will receive a notification of the success or otherwise of the action at the far end via a callback `Example` #!python data = {"thermostat":18.0} retval = r_thermostat.tell(data, timeout=10, mime=None) if retval is not True: print("Thermostat not reset - reason: {reason}".format(reason=retval)) Returns True on success or else returns the reason (string) one of: #!python "timeout" # The request-specified timeout has been reached. "unreachable" # The remote control is not associated with an agent # or is not reachable in some other way. "failed" # The remote control indicates it did not perform # the request. Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `data` (mandatory) (as applicable) The data you want to share `timeout` (optional) (int) Default 10. The delay in seconds before your tell request times out. `mime` (optional) (string) See: [share()](./Point.m.html#IoticAgent.IOT.Point.Feed.share)
9.408003
8.617722
1.091704
logger.info("tell(timeout=%s) [subid=%s]", timeout, self.subid) if mime is None and isinstance(data, PointDataObject): data = data.to_dict() return self._client._request_sub_tell(self.subid, data, timeout, mime=mime)
def tell_async(self, data, timeout=10, mime=None)
Asyncronous version of [tell()](./RemotePoint.m.html#IoticAgent.IOT.RemotePoint.RemoteControl.tell) `Note` payload contains the success and reason keys they are not separated out as in the synchronous version
6.146558
6.847578
0.897625