_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q15000
pack_mac
train
def pack_mac(macstr): """Converts a mac address given in colon delimited notation to a six byte string in network byte order. >>> pack_mac("30:31:32:33:34:35") == b'012345' True >>> pack_mac("bad") Traceback (most recent call last): ... ValueError: given mac addresses has an invalid number of colons @type macstr: str @rtype: bytes @raises ValueError: for badly formatted mac addresses """ if not isinstance(macstr, basestring): raise ValueError("given mac addresses is not a string") parts = macstr.split(":") if len(parts) != 6: raise ValueError("given mac addresses has an invalid number of colons") parts = [int(part, 16) for part in parts] # raises ValueError return int_seq_to_bytes(parts)
python
{ "resource": "" }
q15001
unpack_mac
train
def unpack_mac(sixbytes): """Converts a mac address given in a six byte string in network byte order to a string in colon delimited notation. >>> unpack_mac(b"012345") '30:31:32:33:34:35' >>> unpack_mac(b"bad") Traceback (most recent call last): ... ValueError: given buffer is not exactly six bytes long @type sixbytes: bytes @rtype: str @raises ValueError: for bad input """ if not isinstance(sixbytes, bytes): raise ValueError("given buffer is not a string") if len(sixbytes) != 6: raise ValueError("given buffer is not exactly six bytes long") return ":".join(["%2.2x".__mod__(x) for x in bytes_to_int_seq(sixbytes)])
python
{ "resource": "" }
q15002
OmapiStartupMessage.validate
train
def validate(self): """Checks whether this OmapiStartupMessage matches the implementation. @raises OmapiError: """ if self.implemented_protocol_version != self.protocol_version: raise OmapiError("protocol mismatch") if self.implemented_header_size != self.header_size: raise OmapiError("header size mismatch")
python
{ "resource": "" }
q15003
OmapiStartupMessage.serialize
train
def serialize(self, outbuffer): """Serialize this OmapiStartupMessage to the given outbuffer. @type outbuffer: OutBuffer """ outbuffer.add_net32int(self.protocol_version) outbuffer.add_net32int(self.header_size)
python
{ "resource": "" }
q15004
OmapiMessage.sign
train
def sign(self, authenticator): """Sign this OMAPI message. @type authenticator: OmapiAuthenticatorBase """ self.authid = authenticator.authid self.signature = b"\0" * authenticator.authlen # provide authlen self.signature = authenticator.sign(self.as_string(forsigning=True)) assert len(self.signature) == authenticator.authlen
python
{ "resource": "" }
q15005
OmapiMessage.verify
train
def verify(self, authenticators): """Verify this OMAPI message. >>> a1 = OmapiHMACMD5Authenticator(b"egg", b"spam") >>> a2 = OmapiHMACMD5Authenticator(b"egg", b"tomatoes") >>> a1.authid = a2.authid = 5 >>> m = OmapiMessage.open(b"host") >>> m.verify({a1.authid: a1}) False >>> m.sign(a1) >>> m.verify({a1.authid: a1}) True >>> m.sign(a2) >>> m.verify({a1.authid: a1}) False @type authenticators: {int: OmapiAuthenticatorBase} @rtype: bool """ try: return authenticators[self.authid]. sign(self.as_string(forsigning=True)) == self.signature except KeyError: return False
python
{ "resource": "" }
q15006
OmapiMessage.open
train
def open(cls, typename): """Create an OMAPI open message with given typename. @type typename: bytes @rtype: OmapiMessage """ return cls(opcode=OMAPI_OP_OPEN, message=[(b"type", typename)], tid=-1)
python
{ "resource": "" }
q15007
InBuffer.parse_startup_message
train
def parse_startup_message(self): """results in an OmapiStartupMessage >>> d = b"\\0\\0\\0\\x64\\0\\0\\0\\x18" >>> next(InBuffer(d).parse_startup_message()).validate() """ return parse_map(lambda args: OmapiStartupMessage(*args), parse_chain(self.parse_net32int, lambda _: self.parse_net32int()))
python
{ "resource": "" }
q15008
InBuffer.parse_message
train
def parse_message(self): """results in an OmapiMessage""" parser = parse_chain(self.parse_net32int, # authid lambda *_: self.parse_net32int(), # authlen lambda *_: self.parse_net32int(), # opcode lambda *_: self.parse_net32int(), # handle lambda *_: self.parse_net32int(), # tid lambda *_: self.parse_net32int(), # rid lambda *_: self.parse_bindict(), # message lambda *_: self.parse_bindict(), # object lambda *args: self.parse_fixedbuffer(args[1])) # signature return parse_map(lambda args: # skip authlen in args: OmapiMessage(*(args[0:1] + args[2:])), parser)
python
{ "resource": "" }
q15009
TCPClientTransport.fill_inbuffer
train
def fill_inbuffer(self): """Read bytes from the connection and hand them to the protocol. @raises OmapiError: @raises socket.error: """ if not self.connection: raise OmapiError("not connected") try: data = self.connection.recv(2048) except socket.error: self.close() raise if not data: self.close() raise OmapiError("connection closed") try: self.protocol.data_received(data) except OmapiSizeLimitError: self.close() raise
python
{ "resource": "" }
q15010
TCPClientTransport.write
train
def write(self, data): """Send all of data to the connection. @type data: bytes @raises socket.error: """ try: self.connection.sendall(data) except socket.error: self.close() raise
python
{ "resource": "" }
q15011
OmapiProtocol.send_message
train
def send_message(self, message, sign=True): """Send the given message to the connection. @type message: OmapiMessage @param sign: whether the message needs to be signed @raises OmapiError: @raises socket.error: """ if sign: message.sign(self.authenticators[self.defauth]) logger.debug("sending %s", LazyStr(message.dump_oneline)) self.transport.write(message.as_string())
python
{ "resource": "" }
q15012
Omapi.receive_message
train
def receive_message(self): """Read the next message from the connection. @rtype: OmapiMessage @raises OmapiError: @raises socket.error: """ while not self.recv_message_queue: self.transport.fill_inbuffer() message = self.recv_message_queue.pop(0) assert message is not None if not message.verify(self.protocol.authenticators): self.close() raise OmapiError("bad omapi message signature") return message
python
{ "resource": "" }
q15013
Omapi.receive_response
train
def receive_response(self, message, insecure=False): """Read the response for the given message. @type message: OmapiMessage @type insecure: bool @param insecure: avoid an OmapiError about a wrong authenticator @rtype: OmapiMessage @raises OmapiError: @raises socket.error: """ response = self.receive_message() if not response.is_response(message): raise OmapiError("received message is not the desired response") # signature already verified if response.authid != self.protocol.defauth and not insecure: raise OmapiError("received message is signed with wrong authenticator") return response
python
{ "resource": "" }
q15014
Omapi.send_message
train
def send_message(self, message, sign=True): """Sends the given message to the connection. @type message: OmapiMessage @type sign: bool @param sign: whether the message needs to be signed @raises OmapiError: @raises socket.error: """ self.check_connected() self.protocol.send_message(message, sign)
python
{ "resource": "" }
q15015
Omapi.lookup_ip_host
train
def lookup_ip_host(self, mac): """Lookup a host object with with given mac address. @type mac: str @raises ValueError: @raises OmapiError: @raises OmapiErrorNotFound: if no lease object with the given mac could be found @raises OmapiErrorAttributeNotFound: if lease could be found, but objects lacks a ip @raises socket.error: """ res = self.lookup_by_host(mac=mac) try: return res["ip-address"] except KeyError: raise OmapiErrorAttributeNotFound()
python
{ "resource": "" }
q15016
Omapi.lookup_ip
train
def lookup_ip(self, mac): """Look for a lease object with given mac address and return the assigned ip address. @type mac: str @rtype: str or None @raises ValueError: @raises OmapiError: @raises OmapiErrorNotFound: if no lease object with the given mac could be found @raises OmapiErrorAttributeNotFound: if lease could be found, but objects lacks a ip @raises socket.error: """ res = self.lookup_by_lease(mac=mac) try: return res["ip-address"] except KeyError: raise OmapiErrorAttributeNotFound()
python
{ "resource": "" }
q15017
Omapi.lookup_mac
train
def lookup_mac(self, ip): """Look up a lease object with given ip address and return the associated mac address. @type ip: str @rtype: str or None @raises ValueError: @raises OmapiError: @raises OmapiErrorNotFound: if no lease object with the given ip could be found @raises OmapiErrorAttributeNotFound: if lease could be found, but objects lacks a mac @raises socket.error: """ res = self.lookup_by_lease(ip=ip) try: return res["hardware-address"] except KeyError: raise OmapiErrorAttributeNotFound()
python
{ "resource": "" }
q15018
Omapi.lookup_host
train
def lookup_host(self, name): """Look for a host object with given name and return the name, mac, and ip address @type name: str @rtype: dict or None @raises ValueError: @raises OmapiError: @raises OmapiErrorNotFound: if no host object with the given name could be found @raises OmapiErrorAttributeNotFound: if lease could be found, but objects lacks ip, mac or name @raises socket.error: """ res = self.lookup_by_host(name=name) try: return dict(ip=res["ip-address"], mac=res["hardware-address"], hostname=res["name"].decode('utf-8')) except KeyError: raise OmapiErrorAttributeNotFound()
python
{ "resource": "" }
q15019
Omapi.lookup_host_host
train
def lookup_host_host(self, mac): """Look for a host object with given mac address and return the name, mac, and ip address @type mac: str @rtype: dict or None @raises ValueError: @raises OmapiError: @raises OmapiErrorNotFound: if no host object with the given mac address could be found @raises OmapiErrorAttributeNotFound: if lease could be found, but objects lacks ip, mac or name @raises socket.error: """ res = self.lookup_by_host(mac=mac) try: return dict(ip=res["ip-address"], mac=res["hardware-address"], name=res["name"].decode('utf-8')) except KeyError: raise OmapiErrorAttributeNotFound()
python
{ "resource": "" }
q15020
Omapi.lookup_hostname
train
def lookup_hostname(self, ip): """Look up a lease object with given ip address and return the associated client hostname. @type ip: str @rtype: str or None @raises ValueError: @raises OmapiError: @raises OmapiErrorNotFound: if no lease object with the given ip address could be found @raises OmapiErrorAttributeNotFound: if lease could be found, but objects lacks a hostname @raises socket.error: """ res = self.lookup_by_lease(ip=ip) if "client-hostname" not in res: raise OmapiErrorAttributeNotFound() return res["client-hostname"].decode('utf-8')
python
{ "resource": "" }
q15021
Omapi.__lookup
train
def __lookup(self, ltype, **kwargs): """Generic Lookup function @type ltype: str @type rvalues: list @type ip: str @type mac: str @type name: str @rtype: dict or str (if len(rvalues) == 1) or None @raises ValueError: @raises OmapiError: @raises OmapiErrorNotFound: if no host object with the given name could be found or the object lacks an ip address or mac @raises socket.error: """ ltype_utf = ltype.encode("utf-8") assert ltype_utf in [b"host", b"lease"] msg = OmapiMessage.open(ltype_utf) for k in kwargs: if k == "raw": continue _k = k.replace("_", "-") if _k in ["ip", "ip-address"]: msg.obj.append((b"ip-address", pack_ip(kwargs[k]))) elif _k in ["mac", "hardware-address"]: msg.obj.append((b"hardware-address", pack_mac(kwargs[k]))) msg.obj.append((b"hardware-type", struct.pack("!I", 1))) elif _k == "name": msg.obj.append((b"name", kwargs[k].encode('utf-8'))) else: msg.obj.append((str(k).encode(), kwargs[k].encode('utf-8'))) response = self.query_server(msg) if response.opcode != OMAPI_OP_UPDATE: raise OmapiErrorNotFound() if "raw" in kwargs and kwargs["raw"]: return dict(response.obj) res = dict() for k, v in dict(response.obj).items(): _k = k.decode('utf-8') try: if _k == "ip-address": v = unpack_ip(v) elif _k in ["hardware-address"]: v = unpack_mac(v) elif _k in ["starts", "ends", "tstp", "tsfp", "atsfp", "cltt", "subnet", "pool", "state", "hardware-type"]: v = struct.unpack(">I", v)[0] elif _k in ["flags"]: v = struct.unpack(">I", v)[0] except struct.error: pass res[_k] = v return res
python
{ "resource": "" }
q15022
Omapi.add_host
train
def add_host(self, ip, mac): """Create a host object with given ip address and and mac address. @type ip: str @type mac: str @raises ValueError: @raises OmapiError: @raises socket.error: """ msg = OmapiMessage.open(b"host") msg.message.append((b"create", struct.pack("!I", 1))) msg.message.append((b"exclusive", struct.pack("!I", 1))) msg.obj.append((b"hardware-address", pack_mac(mac))) msg.obj.append((b"hardware-type", struct.pack("!I", 1))) msg.obj.append((b"ip-address", pack_ip(ip))) response = self.query_server(msg) if response.opcode != OMAPI_OP_UPDATE: raise OmapiError("add failed")
python
{ "resource": "" }
q15023
Omapi.add_host_supersede_name
train
def add_host_supersede_name(self, ip, mac, name): # pylint:disable=E0213 """Add a host with a fixed-address and override its hostname with the given name. @type self: Omapi @type ip: str @type mac: str @type name: str @raises ValueError: @raises OmapiError: @raises socket.error: """ msg = OmapiMessage.open(b"host") msg.message.append((b"create", struct.pack("!I", 1))) msg.message.append((b"exclusive", struct.pack("!I", 1))) msg.obj.append((b"hardware-address", pack_mac(mac))) msg.obj.append((b"hardware-type", struct.pack("!I", 1))) msg.obj.append((b"ip-address", pack_ip(ip))) msg.obj.append((b"name", name.encode('utf-8'))) msg.obj.append((b"statements", 'supersede host-name "{0}";'.format(name).encode('utf-8'))) response = self.query_server(msg) if response.opcode != OMAPI_OP_UPDATE: raise OmapiError("add failed")
python
{ "resource": "" }
q15024
Omapi.add_host_supersede
train
def add_host_supersede(self, ip, mac, name, hostname=None, router=None, domain=None): # pylint:disable=too-many-arguments """Create a host object with given ip, mac, name, hostname, router and domain. hostname, router and domain are optional arguments. @type ip: str @type mac: str @type name: str @type hostname: str @type router: str @type domain: str @raises OmapiError: @raises socket.error: """ stmts = [] msg = OmapiMessage.open(b"host") msg.message.append((b"create", struct.pack("!I", 1))) msg.obj.append((b"name", name)) msg.obj.append((b"hardware-address", pack_mac(mac))) msg.obj.append((b"hardware-type", struct.pack("!I", 1))) msg.obj.append((b"ip-address", pack_ip(ip))) if hostname: stmts.append('supersede host-name "{0}";\n '.format(hostname)) if router: stmts.append('supersede routers {0};\n '.format(router)) if domain: stmts.append('supersede domain-name "{0}";'.format(domain)) if stmts: encoded_stmts = "".join(stmts).encode("utf-8") msg.obj.append((b"statements", encoded_stmts)) response = self.query_server(msg) if response.opcode != OMAPI_OP_UPDATE: raise OmapiError("add failed")
python
{ "resource": "" }
q15025
Omapi.del_host
train
def del_host(self, mac): """Delete a host object with with given mac address. @type mac: str @raises ValueError: @raises OmapiError: @raises OmapiErrorNotFound: if no lease object with the given mac address could be found @raises socket.error: """ msg = OmapiMessage.open(b"host") msg.obj.append((b"hardware-address", pack_mac(mac))) msg.obj.append((b"hardware-type", struct.pack("!I", 1))) response = self.query_server(msg) if response.opcode != OMAPI_OP_UPDATE: raise OmapiErrorNotFound() if response.handle == 0: raise OmapiError("received invalid handle from server") response = self.query_server(OmapiMessage.delete(response.handle)) if response.opcode != OMAPI_OP_STATUS: raise OmapiError("delete failed")
python
{ "resource": "" }
q15026
Omapi.add_group
train
def add_group(self, groupname, statements): """ Adds a group @type groupname: bytes @type statements: str """ msg = OmapiMessage.open(b"group") msg.message.append(("create", struct.pack("!I", 1))) msg.obj.append(("name", groupname)) msg.obj.append(("statements", statements)) response = self.query_server(msg) if response.opcode != OMAPI_OP_UPDATE: raise OmapiError("add group failed")
python
{ "resource": "" }
q15027
Omapi.add_host_with_group
train
def add_host_with_group(self, ip, mac, groupname): """ Adds a host with given ip and mac in a group named groupname @type ip: str @type mac: str @type groupname: str """ msg = OmapiMessage.open(b"host") msg.message.append(("create", struct.pack("!I", 1))) msg.message.append(("exclusive", struct.pack("!I", 1))) msg.obj.append(("hardware-address", pack_mac(mac))) msg.obj.append(("hardware-type", struct.pack("!I", 1))) msg.obj.append(("ip-address", pack_ip(ip))) msg.obj.append(("group", groupname)) response = self.query_server(msg) if response.opcode != OMAPI_OP_UPDATE: raise OmapiError("add failed")
python
{ "resource": "" }
q15028
Omapi.change_group
train
def change_group(self, name, group): """Change the group of a host given the name of the host. @type name: str @type group: str """ m1 = OmapiMessage.open(b"host") m1.update_object(dict(name=name)) r1 = self.query_server(m1) if r1.opcode != OMAPI_OP_UPDATE: raise OmapiError("opening host %s failed" % name) m2 = OmapiMessage.update(r1.handle) m2.update_object(dict(group=group)) r2 = self.query_server(m2) if r2.opcode != OMAPI_OP_UPDATE: raise OmapiError("changing group of host %s to %s failed" % (name, group))
python
{ "resource": "" }
q15029
Resource._query
train
def _query(cls, **kwargs): """ Generic query implementation that is used by the resources. """ from sevenbridges.models.link import Link from sevenbridges.meta.collection import Collection api = kwargs.pop('api', cls._API) url = kwargs.pop('url') extra = {'resource': cls.__name__, 'query': kwargs} logger.info('Querying {} resource'.format(cls), extra=extra) response = api.get(url=url, params=kwargs) data = response.json() total = response.headers['x-total-matching-query'] items = [cls(api=api, **item) for item in data['items']] links = [Link(**link) for link in data['links']] href = data['href'] return Collection( resource=cls, href=href, total=total, items=items, links=links, api=api )
python
{ "resource": "" }
q15030
Resource.delete
train
def delete(self): """ Deletes the resource on the server. """ if 'delete' in self._URL: extra = {'resource': self.__class__.__name__, 'query': { 'id': self.id}} logger.info("Deleting {} resource.".format(self), extra=extra) self._api.delete(url=self._URL['delete'].format(id=self.id)) else: raise SbgError('Resource can not be deleted!')
python
{ "resource": "" }
q15031
Resource.reload
train
def reload(self): """ Refreshes the resource with the data from the server. """ try: if hasattr(self, 'href'): data = self._api.get(self.href, append_base=False).json() resource = self.__class__(api=self._api, **data) elif hasattr(self, 'id') and hasattr(self, '_URL') and \ 'get' in self._URL: data = self._api.get( self._URL['get'].format(id=self.id)).json() resource = self.__class__(api=self._api, **data) else: raise SbgError('Resource can not be refreshed!') query = {'id': self.id} if hasattr(self, 'id') else {} extra = {'resource': self.__class__.__name__, 'query': query} logger.info('Reloading {} resource.'.format(self), extra=extra) except Exception: raise SbgError('Resource can not be refreshed!') self._data = resource._data self._dirty = resource._dirty self._old = copy.deepcopy(self._data.data) return self
python
{ "resource": "" }
q15032
RecordConverter.convert
train
def convert(cls, record): """ Converts a single dictionary or list of dictionaries into converted list of dictionaries. """ if isinstance(record, list): return [cls._convert(r) for r in record] else: return [cls._convert(record)]
python
{ "resource": "" }
q15033
RecordConverter._convert_internal
train
def _convert_internal(cls, record): """ Converts a single dictionary into converted dictionary or list of dictionaries into converted list of dictionaries. Used while passing dictionaries to another converter. """ if isinstance(record, list): return [cls._convert(r) for r in record] else: return cls._convert(record)
python
{ "resource": "" }
q15034
RecordConverter._convert
train
def _convert(cls, record): """ Core method of the converter. Converts a single dictionary into another dictionary. """ if not record: return {} converted_dict = {} for field in cls.conversion: key = field[0] if len(field) >= 2 and field[1]: converted_key = field[1] else: converted_key = key if len(field) >= 3 and field[2]: conversion_method = field[2] else: conversion_method = cls.default_conversion_method if len(field) >= 4: converter = field[3] else: converter = None try: value = conversion_method(record[key]) except KeyError: continue if converter: value = converter._convert_internal(value) if converted_key is APPEND: if isinstance(value, list): for v in value: converted_dict.update(v) else: converted_dict.update(value) else: converted_dict[converted_key] = value return converted_dict
python
{ "resource": "" }
q15035
create_room
train
def create_room(room): """Creates a MUC room on the XMPP server.""" if room.custom_server: return def _create_room(xmpp): muc = xmpp.plugin['xep_0045'] muc.joinMUC(room.jid, xmpp.requested_jid.user) muc.configureRoom(room.jid, _set_form_values(xmpp, room)) current_plugin.logger.info('Creating room %s', room.jid) _execute_xmpp(_create_room)
python
{ "resource": "" }
q15036
update_room
train
def update_room(room): """Updates a MUC room on the XMPP server.""" if room.custom_server: return def _update_room(xmpp): muc = xmpp.plugin['xep_0045'] muc.joinMUC(room.jid, xmpp.requested_jid.user) muc.configureRoom(room.jid, _set_form_values(xmpp, room, muc.getRoomConfig(room.jid))) current_plugin.logger.info('Updating room %s', room.jid) _execute_xmpp(_update_room)
python
{ "resource": "" }
q15037
delete_room
train
def delete_room(room, reason=''): """Deletes a MUC room from the XMPP server.""" if room.custom_server: return def _delete_room(xmpp): muc = xmpp.plugin['xep_0045'] muc.destroy(room.jid, reason=reason) current_plugin.logger.info('Deleting room %s', room.jid) _execute_xmpp(_delete_room) delete_logs(room)
python
{ "resource": "" }
q15038
get_room_config
train
def get_room_config(jid): """Retrieves basic data of a MUC room from the XMPP server. :return: dict containing name, description and password of the room """ mapping = { 'name': 'muc#roomconfig_roomname', 'description': 'muc#roomconfig_roomdesc', 'password': 'muc#roomconfig_roomsecret' } def _get_room_config(xmpp): muc = xmpp.plugin['xep_0045'] try: form = muc.getRoomConfig(jid) except ValueError: # probably the room doesn't exist return None fields = form.values['fields'] return {key: fields[muc_key].values['value'] for key, muc_key in mapping.iteritems()} return _execute_xmpp(_get_room_config)
python
{ "resource": "" }
q15039
room_exists
train
def room_exists(jid): """Checks if a MUC room exists on the server.""" def _room_exists(xmpp): disco = xmpp.plugin['xep_0030'] try: disco.get_info(jid) except IqError as e: if e.condition == 'item-not-found': return False raise else: return True return _execute_xmpp(_room_exists)
python
{ "resource": "" }
q15040
sanitize_jid
train
def sanitize_jid(s): """Generates a valid JID node identifier from a string""" jid = unicode_to_ascii(s).lower() jid = WHITESPACE.sub('-', jid) jid = INVALID_JID_CHARS.sub('', jid) return jid.strip()[:256]
python
{ "resource": "" }
q15041
generate_jid
train
def generate_jid(name, append_date=None): """Generates a v alid JID based on the room name. :param append_date: appends the given date to the JID """ if not append_date: return sanitize_jid(name) return '{}-{}'.format(sanitize_jid(name), append_date.strftime('%Y-%m-%d'))
python
{ "resource": "" }
q15042
_execute_xmpp
train
def _execute_xmpp(connected_callback): """Connects to the XMPP server and executes custom code :param connected_callback: function to execute after connecting :return: return value of the callback """ from indico_chat.plugin import ChatPlugin check_config() jid = ChatPlugin.settings.get('bot_jid') password = ChatPlugin.settings.get('bot_password') if '@' not in jid: jid = '{}@{}'.format(jid, ChatPlugin.settings.get('server')) result = [None, None] # result, exception app = current_app._get_current_object() # callback runs in another thread def _session_start(event): try: with app.app_context(): result[0] = connected_callback(xmpp) except Exception as e: result[1] = e if isinstance(e, IqError): current_plugin.logger.exception('XMPP callback failed: %s', e.condition) else: current_plugin.logger.exception('XMPP callback failed') finally: xmpp.disconnect(wait=0) xmpp = ClientXMPP(jid, password) xmpp.register_plugin('xep_0045') xmpp.register_plugin('xep_0004') xmpp.register_plugin('xep_0030') xmpp.add_event_handler('session_start', _session_start) try: xmpp.connect() except Exception: current_plugin.logger.exception('XMPP connection failed') xmpp.disconnect() raise try: xmpp.process(threaded=False) finally: xmpp.disconnect(wait=0) if result[1] is not None: raise result[1] return result[0]
python
{ "resource": "" }
q15043
retrieve_logs
train
def retrieve_logs(room, start_date=None, end_date=None): """Retrieves chat logs :param room: the `Chatroom` :param start_date: the earliest date to get logs for :param end_date: the latest date to get logs for :return: logs in html format """ from indico_chat.plugin import ChatPlugin base_url = ChatPlugin.settings.get('log_url') if not base_url or room.custom_server: return None params = {'cr': room.jid} if start_date: params['sdate'] = start_date.strftime('%Y-%m-%d') if end_date: params['edate'] = end_date.strftime('%Y-%m-%d') try: response = requests.get(base_url, params=params) except RequestException: current_plugin.logger.exception('Could not retrieve logs for %s', room.jid) return None if response.headers.get('content-type') == 'application/json': current_plugin.logger.warning('Could not retrieve logs for %s: %s', room.jid, response.json().get('error')) return None return response.text
python
{ "resource": "" }
q15044
delete_logs
train
def delete_logs(room): """Deletes chat logs""" from indico_chat.plugin import ChatPlugin base_url = ChatPlugin.settings.get('log_url') if not base_url or room.custom_server: return try: response = requests.get(posixpath.join(base_url, 'delete'), params={'cr': room.jid}).json() except (RequestException, ValueError): current_plugin.logger.exception('Could not delete logs for %s', room.jid) return if not response.get('success'): current_plugin.logger.warning('Could not delete logs for %s: %s', room.jid, response.get('error'))
python
{ "resource": "" }
q15045
Project.create_task
train
def create_task(self, name, app, revision=None, batch_input=None, batch_by=None, inputs=None, description=None, run=False, disable_batch=False, interruptible=True, execution_settings=None): """ Creates a task for this project. :param name: Task name. :param app: CWL app identifier. :param revision: CWL app revision. :param batch_input: Batch input. :param batch_by: Batch criteria. :param inputs: Input map. :param description: Task description. :param run: True if you want to run a task upon creation. :param disable_batch: True if you want to disable batching. :param interruptible: True if you want to use interruptible instances. :param execution_settings: Execution settings for the task. :return: Task object. """ return self._api.tasks.create( name=name, project=self, app=app, revision=revision, batch_input=batch_input, batch_by=batch_by, inputs=inputs, description=description, run=run, disable_batch=disable_batch, interruptible=interruptible, execution_settings=execution_settings )
python
{ "resource": "" }
q15046
validate_business
train
def validate_business(form, field): """Valiates a PayPal business string. It can either be an email address or a paypal business account ID. """ if not is_valid_mail(field.data, multi=False) and not re.match(r'^[a-zA-Z0-9]{13}$', field.data): raise ValidationError(_('Invalid email address / paypal ID'))
python
{ "resource": "" }
q15047
DPartedFile.submit
train
def submit(self): """ Partitions the file into chunks and submits them into group of 4 for download on the api download pool. """ futures = [] while self.submitted < 4 and not self.done(): part = self.parts.pop(0) futures.append( self.pool.submit( _download_part, self.file_path, self.session, self.url, self.retry, self.timeout, *part) ) self.submitted += 1 self.total_submitted += 1 return futures
python
{ "resource": "" }
q15048
DPartedFile.get_parts
train
def get_parts(self): """ Partitions the file and saves the part information in memory. """ parts = [] start_b = 0 end_byte = start_b + PartSize.DOWNLOAD_MINIMUM_PART_SIZE - 1 for i in range(self.total): parts.append([start_b, end_byte]) start_b = end_byte + 1 end_byte = start_b + PartSize.DOWNLOAD_MINIMUM_PART_SIZE - 1 return parts
python
{ "resource": "" }
q15049
LiveSyncBackendBase.run
train
def run(self): """Runs the livesync export""" if self.uploader is None: # pragma: no cover raise NotImplementedError records = self.fetch_records() uploader = self.uploader(self) LiveSyncPlugin.logger.info('Uploading %d records', len(records)) uploader.run(records) self.update_last_run()
python
{ "resource": "" }
q15050
LiveSyncBackendBase.run_initial_export
train
def run_initial_export(self, events): """Runs the initial export. This process is expected to take a very long time. :param events: iterable of all events in this indico instance """ if self.uploader is None: # pragma: no cover raise NotImplementedError uploader = self.uploader(self) uploader.run_initial(events)
python
{ "resource": "" }
q15051
check_config
train
def check_config(quiet=False): """Checks if all required config options are set :param quiet: if True, return the result as a bool, otherwise raise `IndicoError` if any setting is missing """ from indico_chat.plugin import ChatPlugin settings = ChatPlugin.settings.get_all() missing = not all(settings[x] for x in ('server', 'muc_server', 'bot_jid', 'bot_password')) if missing and not quiet: raise IndicoError(_('Chat plugin is not configured properly')) return not missing
python
{ "resource": "" }
q15052
is_chat_admin
train
def is_chat_admin(user): """Checks if a user is a chat admin""" from indico_chat.plugin import ChatPlugin return ChatPlugin.settings.acls.contains_user('admins', user)
python
{ "resource": "" }
q15053
inplace_reload
train
def inplace_reload(method): """ Executes the wrapped function and reloads the object with data returned from the server. """ # noinspection PyProtectedMember def wrapped(obj, *args, **kwargs): in_place = True if kwargs.get('inplace') in (True, None) else False api_object = method(obj, *args, **kwargs) if in_place and api_object: obj._data = api_object._data obj._dirty = api_object._dirty obj._data.fetched = False return obj elif api_object: return api_object else: return obj return wrapped
python
{ "resource": "" }
q15054
retry_on_excs
train
def retry_on_excs(excs, retry_count=3, delay=5): """Retry decorator used to retry callables on for specific exceptions. :param excs: Exceptions tuple. :param retry_count: Retry count. :param delay: Delay in seconds between retries. :return: Wrapped function object. """ def wrapper(f): @functools.wraps(f) def deco(*args, **kwargs): for i in range(0, retry_count): try: return f(*args, **kwargs) except excs: if logger: logger.warning( 'HTTPError caught.Retrying ...'.format(f.__name__), exc_info=True ) time.sleep(delay) else: logger.error( '{} failed after {} retries'.format( f.__name__, retry_count) ) return f(*args, **kwargs) return deco return wrapper
python
{ "resource": "" }
q15055
retry
train
def retry(retry_count): """ Retry decorator used during file upload and download. """ def func(f): @functools.wraps(f) def wrapper(*args, **kwargs): for backoff in range(retry_count): try: return f(*args, **kwargs) except Exception: time.sleep(2 ** backoff) else: raise SbgError('{}: failed to complete: {}'.format( threading.current_thread().getName(), f.__name__) ) return wrapper return func
python
{ "resource": "" }
q15056
check_for_error
train
def check_for_error(func): """ Executes the wrapped function and inspects the response object for specific errors. """ @functools.wraps(func) def wrapper(*args, **kwargs): try: response = func(*args, **kwargs) status_code = response.status_code if status_code in range(200, 204): return response if status_code == 204: return data = response.json() e = { 400: BadRequest, 401: Unauthorized, 403: Forbidden, 404: NotFound, 405: MethodNotAllowed, 408: RequestTimeout, 409: Conflict, 429: TooManyRequests, 500: ServerError, 503: ServiceUnavailable, }.get(status_code, SbgError)() if 'message' in data: e.message = data['message'] if 'code' in data: e.code = data['code'] if 'status' in data: e.status = data['status'] if 'more_info' in data: e.more_info = data['more_info'] raise e except requests.RequestException as e: raise SbgError(message=six.text_type(e)) except JSONDecodeError: message = ( 'Service might be unavailable. Can also occur by providing ' 'too many query parameters.' ) raise_from( ServiceUnavailable(message=six.text_type(message)), None ) except ValueError as e: raise SbgError(message=six.text_type(e)) return wrapper
python
{ "resource": "" }
q15057
ReportBase.get
train
def get(cls, *args, **kwargs): """Create and return a serializable Report object, retrieved from cache if possible""" from indico_piwik.plugin import PiwikPlugin if not PiwikPlugin.settings.get('cache_enabled'): return cls(*args, **kwargs).to_serializable() cache = GenericCache('Piwik.Report') key = u'{}-{}-{}'.format(cls.__name__, args, kwargs) report = cache.get(key) if not report: report = cls(*args, **kwargs) cache.set(key, report, PiwikPlugin.settings.get('cache_ttl')) return report.to_serializable()
python
{ "resource": "" }
q15058
ReportBase._init_date_range
train
def _init_date_range(self, start_date=None, end_date=None): """Set date range defaults if no dates are passed""" self.end_date = end_date self.start_date = start_date if self.end_date is None: today = now_utc().date() end_date = self.event.end_dt.date() self.end_date = end_date if end_date < today else today if self.start_date is None: self.start_date = self.end_date - timedelta(days=ReportBase.default_report_interval)
python
{ "resource": "" }
q15059
ReportGeneral._build_report
train
def _build_report(self): """Build the report by performing queries to Piwik""" self.metrics = {} queries = {'visits': PiwikQueryReportEventMetricVisits(**self.params), 'unique_visits': PiwikQueryReportEventMetricUniqueVisits(**self.params), 'visit_duration': PiwikQueryReportEventMetricVisitDuration(**self.params), 'referrers': PiwikQueryReportEventMetricReferrers(**self.params), 'peak': PiwikQueryReportEventMetricPeakDateAndVisitors(**self.params)} for query_name, query in queries.iteritems(): self.metrics[query_name] = query.get_result() self._fetch_contribution_info()
python
{ "resource": "" }
q15060
ReportGeneral._fetch_contribution_info
train
def _fetch_contribution_info(self): """Build the list of information entries for contributions of the event""" self.contributions = {} query = (Contribution.query .with_parent(self.event) .options(joinedload('legacy_mapping'), joinedload('timetable_entry').lazyload('*'))) for contribution in query: if not contribution.start_dt: continue cid = (contribution.legacy_mapping.legacy_contribution_id if contribution.legacy_mapping else contribution.id) key = '{}t{}'.format(contribution.event_id, cid) self.contributions[key] = u'{} ({})'.format(contribution.title, to_unicode(format_time(contribution.start_dt)))
python
{ "resource": "" }
q15061
DeconzDevice.remove_callback
train
def remove_callback(self, callback): """Remove callback previously registered.""" if callback in self._async_callbacks: self._async_callbacks.remove(callback)
python
{ "resource": "" }
q15062
DeconzDevice.update_attr
train
def update_attr(self, attr): """Update input attr in self. Return list of attributes with changed values. """ changed_attr = [] for key, value in attr.items(): if value is None: continue if getattr(self, "_{0}".format(key), None) != value: changed_attr.append(key) self.__setattr__("_{0}".format(key), value) _LOGGER.debug('%s: update %s with %s', self.name, key, value) return changed_attr
python
{ "resource": "" }
q15063
DeconzGroup.async_set_state
train
async def async_set_state(self, data): """Set state of light group. { "on": true, "bri": 180, "hue": 43680, "sat": 255, "transitiontime": 10 } Also update local values of group since websockets doesn't. """ field = self.deconz_id + '/action' await self._async_set_state_callback(field, data) self.async_update({'state': data})
python
{ "resource": "" }
q15064
DeconzGroup.async_add_scenes
train
def async_add_scenes(self, scenes, async_set_state_callback): """Add scenes belonging to group.""" self._scenes = { scene['id']: DeconzScene(self, scene, async_set_state_callback) for scene in scenes if scene['id'] not in self._scenes }
python
{ "resource": "" }
q15065
DeconzGroup.update_color_state
train
def update_color_state(self, light): """Sync color state with light.""" x, y = light.xy or (None, None) self.async_update({ 'state': { 'bri': light.brightness, 'hue': light.hue, 'sat': light.sat, 'ct': light.ct, 'x': x, 'y': y, 'colormode': light.colormode, }, })
python
{ "resource": "" }
q15066
DeconzScene.async_set_state
train
async def async_set_state(self, data): """Recall scene to group.""" field = self._deconz_id + '/recall' await self._async_set_state_callback(field, data)
python
{ "resource": "" }
q15067
DeconzLightBase.async_update
train
def async_update(self, event): """New event for light. Check that state is part of event. Signal that light has updated state. """ self.update_attr(event.get('state', {})) super().async_update(event)
python
{ "resource": "" }
q15068
File.upload
train
def upload(cls, path, project=None, parent=None, file_name=None, overwrite=False, retry=5, timeout=10, part_size=PartSize.UPLOAD_MINIMUM_PART_SIZE, wait=True, api=None): """ Uploads a file using multipart upload and returns an upload handle if the wait parameter is set to False. If wait is set to True it will block until the upload is completed. :param path: File path on local disc. :param project: Project identifier :param parent: Parent folder identifier :param file_name: Optional file name. :param overwrite: If true will overwrite the file on the server. :param retry: Number of retries if error occurs during upload. :param timeout: Timeout for http requests. :param part_size: Part size in bytes. :param wait: If true will wait for upload to complete. :param api: Api instance. """ api = api or cls._API extra = {'resource': cls.__name__, 'query': { 'path': path, 'project': project, 'file_name': file_name, 'overwrite': overwrite, 'retry': retry, 'timeout': timeout, 'part_size': part_size, 'wait': wait, }} logger.info('Uploading file', extra=extra) if not project and not parent: raise SbgError('A project or parent identifier is required.') if project and parent: raise SbgError( 'Project and parent identifiers are mutually exclusive.' ) if project: project = Transform.to_project(project) if parent: parent = Transform.to_file(parent) upload = Upload( file_path=path, project=project, parent=parent, file_name=file_name, overwrite=overwrite, retry_count=retry, timeout=timeout, part_size=part_size, api=api ) if wait: upload.start() upload.wait() return upload else: return upload
python
{ "resource": "" }
q15069
File.reload
train
def reload(self): """ Refreshes the file with the data from the server. """ try: data = self._api.get(self.href, append_base=False).json() resource = File(api=self._api, **data) except Exception: try: data = self._api.get( self._URL['get'].format(id=self.id)).json() resource = File(api=self._api, **data) except Exception: raise SbgError('Resource can not be refreshed!') self._data = resource._data self._dirty = resource._dirty self._old = copy.deepcopy(self._data.data) # If file.metadata = value was executed # file object will have attribute _method='PUT', which tells us # to force overwrite of metadata on the server. This is metadata # specific. Once we reload the resource we delete the attribute # _method from the instance. try: delattr(self, '_method') except AttributeError: pass
python
{ "resource": "" }
q15070
File.content
train
def content(self, path=None, overwrite=True, encoding='utf-8'): """ Downloads file to the specified path or as temporary file and reads the file content in memory. Should not be used on very large files. :param path: Path for file download If omitted tmp file will be used. :param overwrite: Overwrite file if exists locally :param encoding: File encoding, by default it is UTF-8 :return: File content. """ if path: self.download(wait=True, path=path, overwrite=overwrite) with io.open(path, 'r', encoding=encoding) as fp: return fp.read() with tempfile.NamedTemporaryFile() as tmpfile: self.download(wait=True, path=tmpfile.name, overwrite=overwrite) with io.open(tmpfile.name, 'r', encoding=encoding) as fp: return fp.read()
python
{ "resource": "" }
q15071
get_json_from_remote_server
train
def get_json_from_remote_server(func, **kwargs): """ Safely manage calls to the remote server by encapsulating JSON creation from Piwik data. """ rawjson = func(**kwargs) if rawjson is None: # If the request failed we already logged in in PiwikRequest; # no need to get into the exception handler below. return {} try: data = json.loads(rawjson) if isinstance(data, dict) and data.get('result') == 'error': current_plugin.logger.error('The Piwik server responded with an error: %s', data['message']) return {} return data except Exception: current_plugin.logger.exception('Unable to load JSON from source %s', rawjson) return {}
python
{ "resource": "" }
q15072
reduce_json
train
def reduce_json(data): """Reduce a JSON object""" return reduce(lambda x, y: int(x) + int(y), data.values())
python
{ "resource": "" }
q15073
stringify_seconds
train
def stringify_seconds(seconds=0): """ Takes time as a value of seconds and deduces the delta in human-readable HHh MMm SSs format. """ seconds = int(seconds) minutes = seconds / 60 ti = {'h': 0, 'm': 0, 's': 0} if seconds > 0: ti['s'] = seconds % 60 ti['m'] = minutes % 60 ti['h'] = minutes / 60 return "%dh %dm %ds" % (ti['h'], ti['m'], ti['s'])
python
{ "resource": "" }
q15074
create_sensor
train
def create_sensor(sensor_id, sensor, async_set_state_callback): """Simplify creating sensor by not needing to know type.""" if sensor['type'] in CONSUMPTION: return Consumption(sensor_id, sensor) if sensor['type'] in CARBONMONOXIDE: return CarbonMonoxide(sensor_id, sensor) if sensor['type'] in DAYLIGHT: return Daylight(sensor_id, sensor) if sensor['type'] in FIRE: return Fire(sensor_id, sensor) if sensor['type'] in GENERICFLAG: return GenericFlag(sensor_id, sensor) if sensor['type'] in GENERICSTATUS: return GenericStatus(sensor_id, sensor) if sensor['type'] in HUMIDITY: return Humidity(sensor_id, sensor) if sensor['type'] in LIGHTLEVEL: return LightLevel(sensor_id, sensor) if sensor['type'] in OPENCLOSE: return OpenClose(sensor_id, sensor) if sensor['type'] in POWER: return Power(sensor_id, sensor) if sensor['type'] in PRESENCE: return Presence(sensor_id, sensor) if sensor['type'] in PRESSURE: return Pressure(sensor_id, sensor) if sensor['type'] in SWITCH: return Switch(sensor_id, sensor) if sensor['type'] in TEMPERATURE: return Temperature(sensor_id, sensor) if sensor['type'] in THERMOSTAT: return Thermostat(sensor_id, sensor, async_set_state_callback) if sensor['type'] in VIBRATION: return Vibration(sensor_id, sensor) if sensor['type'] in WATER: return Water(sensor_id, sensor)
python
{ "resource": "" }
q15075
supported_sensor
train
def supported_sensor(sensor): """Check if sensor is supported by pydeconz.""" if sensor['type'] in DECONZ_BINARY_SENSOR + DECONZ_SENSOR + OTHER_SENSOR: return True _LOGGER.info('Unsupported sensor type %s (%s)', sensor['type'], sensor['name']) return False
python
{ "resource": "" }
q15076
DeconzSensor.async_update
train
def async_update(self, event, reason={}): """New event for sensor. Check if state or config is part of event. Signal that sensor has updated attributes. Inform what attributes got changed values. """ reason['attr'] = [] for data in ['state', 'config']: changed_attr = self.update_attr(event.get(data, {})) reason[data] = data in event reason['attr'] += changed_attr super().async_update(event, reason)
python
{ "resource": "" }
q15077
Daylight.status
train
def status(self): """Return the daylight status string.""" if self._status == 100: return "nadir" elif self._status == 110: return "night_end" elif self._status == 120: return "nautical_dawn" elif self._status == 130: return "dawn" elif self._status == 140: return "sunrise_start" elif self._status == 150: return "sunrise_end" elif self._status == 160: return "golden_hour_1" elif self._status == 170: return "solar_noon" elif self._status == 180: return "golden_hour_2" elif self._status == 190: return "sunset_start" elif self._status == 200: return "sunset_end" elif self._status == 210: return "dusk" elif self._status == 220: return "nautical_dusk" elif self._status == 230: return "night_start" else: return "unknown"
python
{ "resource": "" }
q15078
Thermostat.async_set_config
train
async def async_set_config(self, data): """Set config of thermostat. { "mode": "auto", "heatsetpoint": 180, } """ field = self.deconz_id + '/config' await self._async_set_state_callback(field, data)
python
{ "resource": "" }
q15079
_metric_value
train
def _metric_value(value_str, metric_type): """ Return a Python-typed metric value from a metric value string. """ if metric_type in (int, float): try: return metric_type(value_str) except ValueError: raise ValueError("Invalid {} metric value: {!r}". format(metric_type.__class__.__name__, value_str)) elif metric_type is six.text_type: # In Python 3, decode('unicode_escape) requires bytes, so we need # to encode to bytes. This also works in Python 2. return value_str.strip('"').encode('utf-8').decode('unicode_escape') else: assert metric_type is bool lower_str = value_str.lower() if lower_str == 'true': return True elif lower_str == 'false': return False else: raise ValueError("Invalid boolean metric value: {!r}". format(value_str))
python
{ "resource": "" }
q15080
_metric_unit_from_name
train
def _metric_unit_from_name(metric_name): """ Return a metric unit string for human consumption, that is inferred from the metric name. If a unit cannot be inferred, `None` is returned. """ for item in _PATTERN_UNIT_LIST: pattern, unit = item if pattern.match(metric_name): return unit return None
python
{ "resource": "" }
q15081
MetricsContext._setup_metric_group_definitions
train
def _setup_metric_group_definitions(self): """ Return the dict of MetricGroupDefinition objects for this metrics context, by processing its 'metric-group-infos' property. """ # Dictionary of MetricGroupDefinition objects, by metric group name metric_group_definitions = dict() for mg_info in self.properties['metric-group-infos']: mg_name = mg_info['group-name'] mg_def = MetricGroupDefinition( name=mg_name, resource_class=_resource_class_from_group(mg_name), metric_definitions=dict()) for i, m_info in enumerate(mg_info['metric-infos']): m_name = m_info['metric-name'] m_def = MetricDefinition( index=i, name=m_name, type=_metric_type(m_info['metric-type']), unit=_metric_unit_from_name(m_name)) mg_def.metric_definitions[m_name] = m_def metric_group_definitions[mg_name] = mg_def return metric_group_definitions
python
{ "resource": "" }
q15082
MetricsResponse._setup_metric_group_values
train
def _setup_metric_group_values(self): """ Return the list of MetricGroupValues objects for this metrics response, by processing its metrics response string. The lines in the metrics response string are:: MetricsResponse: MetricsGroup{0,*} <emptyline> a third empty line at the end MetricsGroup: MetricsGroupName ObjectValues{0,*} <emptyline> a second empty line after each MG ObjectValues: ObjectURI Timestamp ValueRow{1,*} <emptyline> a first empty line after this blk """ mg_defs = self._metrics_context.metric_group_definitions metric_group_name = None resource_uri = None dt_timestamp = None object_values = None metric_group_values = list() state = 0 for mr_line in self._metrics_response_str.splitlines(): if state == 0: if object_values is not None: # Store the result from the previous metric group mgv = MetricGroupValues(metric_group_name, object_values) metric_group_values.append(mgv) object_values = None if mr_line == '': # Skip initial (or trailing) empty lines pass else: # Process the next metrics group metric_group_name = mr_line.strip('"') # No " or \ inside assert metric_group_name in mg_defs m_defs = mg_defs[metric_group_name].metric_definitions object_values = list() state = 1 elif state == 1: if mr_line == '': # There are no (or no more) ObjectValues items in this # metrics group state = 0 else: # There are ObjectValues items resource_uri = mr_line.strip('"') # No " or \ inside state = 2 elif state == 2: # Process the timestamp assert mr_line != '' try: dt_timestamp = datetime_from_timestamp(int(mr_line)) except ValueError: # Sometimes, the returned epoch timestamp values are way # too large, e.g. 3651584404810066 (which would translate # to the year 115791 A.D.). Python datetime supports # up to the year 9999. We circumvent this issue by # simply using the current date&time. # TODO: Remove the circumvention for too large timestamps. dt_timestamp = datetime.now(pytz.utc) state = 3 elif state == 3: if mr_line != '': # Process the metric values in the ValueRow line str_values = mr_line.split(',') metrics = dict() for m_name in m_defs: m_def = m_defs[m_name] m_type = m_def.type m_value_str = str_values[m_def.index] m_value = _metric_value(m_value_str, m_type) metrics[m_name] = m_value ov = MetricObjectValues( self._client, mg_defs[metric_group_name], resource_uri, dt_timestamp, metrics) object_values.append(ov) # stay in this state, for more ValueRow lines else: # On the empty line after the last ValueRow line state = 1 return metric_group_values
python
{ "resource": "" }
q15083
IdPool._expand
train
def _expand(self): """ Expand the free pool, if possible. If out of capacity w.r.t. the defined ID value range, ValueError is raised. """ assert not self._free # free pool is empty expand_end = self._expand_start + self._expand_len if expand_end > self._range_end: # This happens if the size of the value range is not a multiple # of the expansion chunk size. expand_end = self._range_end if self._expand_start == expand_end: raise ValueError("Out of capacity in ID pool") self._free = set(range(self._expand_start, expand_end)) self._expand_start = expand_end
python
{ "resource": "" }
q15084
IdPool.alloc
train
def alloc(self): """ Allocate an ID value and return it. Raises: ValueError: Out of capacity in ID pool. """ if not self._free: self._expand() id = self._free.pop() self._used.add(id) return id
python
{ "resource": "" }
q15085
NicManager.create
train
def create(self, properties): """ Create and configure a NIC in this Partition. The NIC must be backed by an adapter port (on an OSA, ROCE, or Hipersockets adapter). The way the backing adapter port is specified in the "properties" parameter of this method depends on the adapter type, as follows: * For OSA and Hipersockets adapters, the "virtual-switch-uri" property is used to specify the URI of the virtual switch that is associated with the backing adapter port. This virtual switch is a resource that automatically exists as soon as the adapter resource exists. Note that these virtual switches do not show up in the HMC GUI; but they do show up at the HMC REST API and thus also at the zhmcclient API as the :class:`~zhmcclient.VirtualSwitch` class. The value for the "virtual-switch-uri" property can be determined from a given adapter name and port index as shown in the following example code (omitting any error handling): .. code-block:: python partition = ... # Partition object for the new NIC adapter_name = 'OSA #1' # name of adapter with backing port adapter_port_index = 0 # port index of backing port adapter = partition.manager.cpc.adapters.find(name=adapter_name) vswitches = partition.manager.cpc.virtual_switches.findall( **{'backing-adapter-uri': adapter.uri}) vswitch = None for vs in vswitches: if vs.get_property('port') == adapter_port_index: vswitch = vs break properties['virtual-switch-uri'] = vswitch.uri * For RoCE adapters, the "network-adapter-port-uri" property is used to specify the URI of the backing adapter port, directly. The value for the "network-adapter-port-uri" property can be determined from a given adapter name and port index as shown in the following example code (omitting any error handling): .. code-block:: python partition = ... # Partition object for the new NIC adapter_name = 'ROCE #1' # name of adapter with backing port adapter_port_index = 0 # port index of backing port adapter = partition.manager.cpc.adapters.find(name=adapter_name) port = adapter.ports.find(index=adapter_port_index) properties['network-adapter-port-uri'] = port.uri Authorization requirements: * Object-access permission to this Partition. * Object-access permission to the backing Adapter for the new NIC. * Task permission to the "Partition Details" task. Parameters: properties (dict): Initial property values. Allowable properties are defined in section 'Request body contents' in section 'Create NIC' in the :term:`HMC API` book. Returns: Nic: The resource object for the new NIC. The object will have its 'element-uri' property set as returned by the HMC, and will also have the input properties set. Raises: :exc:`~zhmcclient.HTTPError` :exc:`~zhmcclient.ParseError` :exc:`~zhmcclient.AuthError` :exc:`~zhmcclient.ConnectionError` """ result = self.session.post(self.partition.uri + '/nics', body=properties) # There should not be overlaps, but just in case there are, the # returned props should overwrite the input props: props = copy.deepcopy(properties) props.update(result) name = props.get(self._name_prop, None) uri = props[self._uri_prop] nic = Nic(self, uri, name, props) self._name_uri_cache.update(name, uri) return nic
python
{ "resource": "" }
q15086
Nic.delete
train
def delete(self): """ Delete this NIC. Authorization requirements: * Object-access permission to the Partition containing this HBA. * Task permission to the "Partition Details" task. Raises: :exc:`~zhmcclient.HTTPError` :exc:`~zhmcclient.ParseError` :exc:`~zhmcclient.AuthError` :exc:`~zhmcclient.ConnectionError` """ self.manager.session.delete(self._uri) self.manager._name_uri_cache.delete( self.properties.get(self.manager._name_prop, None))
python
{ "resource": "" }
q15087
Nic.update_properties
train
def update_properties(self, properties): """ Update writeable properties of this NIC. Authorization requirements: * Object-access permission to the Partition containing this NIC. * Object-access permission to the backing Adapter for this NIC. * Task permission to the "Partition Details" task. Parameters: properties (dict): New values for the properties to be updated. Properties not to be updated are omitted. Allowable properties are the properties with qualifier (w) in section 'Data model - NIC Element Object' in the :term:`HMC API` book. Raises: :exc:`~zhmcclient.HTTPError` :exc:`~zhmcclient.ParseError` :exc:`~zhmcclient.AuthError` :exc:`~zhmcclient.ConnectionError` """ self.manager.session.post(self.uri, body=properties) is_rename = self.manager._name_prop in properties if is_rename: # Delete the old name from the cache self.manager._name_uri_cache.delete(self.name) self.properties.update(copy.deepcopy(properties)) if is_rename: # Add the new name to the cache self.manager._name_uri_cache.update(self.name, self.uri)
python
{ "resource": "" }
q15088
_NameUriCache.get
train
def get(self, name): """ Get the resource URI for a specified resource name. If an entry for the specified resource name does not exist in the Name-URI cache, the cache is refreshed from the HMC with all resources of the manager holding this cache. If an entry for the specified resource name still does not exist after that, ``NotFound`` is raised. """ self.auto_invalidate() try: return self._uris[name] except KeyError: self.refresh() try: return self._uris[name] except KeyError: raise NotFound({self._manager._name_prop: name}, self._manager)
python
{ "resource": "" }
q15089
_NameUriCache.auto_invalidate
train
def auto_invalidate(self): """ Invalidate the cache if the current time is past the time to live. """ current = datetime.now() if current > self._invalidated + timedelta(seconds=self._timetolive): self.invalidate()
python
{ "resource": "" }
q15090
_NameUriCache.refresh
train
def refresh(self): """ Refresh the Name-URI cache from the HMC. This is done by invalidating the cache, listing the resources of this manager from the HMC, and populating the cache with that information. """ self.invalidate() full = not self._manager._list_has_name res_list = self._manager.list(full_properties=full) self.update_from(res_list)
python
{ "resource": "" }
q15091
_NameUriCache.update_from
train
def update_from(self, res_list): """ Update the Name-URI cache from the provided resource list. This is done by going through the resource list and updating any cache entries for non-empty resource names in that list. Other cache entries remain unchanged. """ for res in res_list: # We access the properties dictionary, in order to make sure # we don't drive additional HMC interactions. name = res.properties.get(self._manager._name_prop, None) uri = res.properties.get(self._manager._uri_prop, None) self.update(name, uri)
python
{ "resource": "" }
q15092
BaseManager._divide_filter_args
train
def _divide_filter_args(self, filter_args): """ Divide the filter arguments into filter query parameters for filtering on the server side, and the remaining client-side filters. Parameters: filter_args (dict): Filter arguments that narrow the list of returned resources to those that match the specified filter arguments. For details, see :ref:`Filtering`. `None` causes no filtering to happen, i.e. all resources are returned. Returns: : tuple (query_parms_str, client_filter_args) """ query_parms = [] # query parameter strings client_filter_args = {} if filter_args is not None: for prop_name in filter_args: prop_match = filter_args[prop_name] if prop_name in self._query_props: self._append_query_parms(query_parms, prop_name, prop_match) else: client_filter_args[prop_name] = prop_match query_parms_str = '&'.join(query_parms) if query_parms_str: query_parms_str = '?{}'.format(query_parms_str) return query_parms_str, client_filter_args
python
{ "resource": "" }
q15093
BaseManager._matches_filters
train
def _matches_filters(self, obj, filter_args): """ Return a boolean indicating whether a resource object matches a set of filter arguments. This is used for client-side filtering. Depending on the properties specified in the filter arguments, this method retrieves the resource properties from the HMC. Parameters: obj (BaseResource): Resource object. filter_args (dict): Filter arguments. For details, see :ref:`Filtering`. `None` causes the resource to always match. Returns: bool: Boolean indicating whether the resource object matches the filter arguments. """ if filter_args is not None: for prop_name in filter_args: prop_match = filter_args[prop_name] if not self._matches_prop(obj, prop_name, prop_match): return False return True
python
{ "resource": "" }
q15094
BaseManager._matches_prop
train
def _matches_prop(self, obj, prop_name, prop_match): """ Return a boolean indicating whether a resource object matches with a single property against a property match value. This is used for client-side filtering. Depending on the specified property, this method retrieves the resource properties from the HMC. Parameters: obj (BaseResource): Resource object. prop_match: Property match value that is used to match the actual value of the specified property against, as follows: - If the match value is a list or tuple, this method is invoked recursively to find whether one or more match values inthe list match. - Else if the property is of string type, its value is matched by interpreting the match value as a regular expression. - Else the property value is matched by exact value comparison with the match value. Returns: bool: Boolean indicating whether the resource object matches w.r.t. the specified property and the match value. """ if isinstance(prop_match, (list, tuple)): # List items are logically ORed, so one matching item suffices. for pm in prop_match: if self._matches_prop(obj, prop_name, pm): return True else: # Some lists of resources do not have all properties, for example # Hipersocket adapters do not have a "card-location" property. # If a filter property does not exist on a resource, the resource # does not match. try: prop_value = obj.get_property(prop_name) except KeyError: return False if isinstance(prop_value, six.string_types): # HMC resource property is Enum String or (non-enum) String, # and is both matched by regexp matching. Ideally, regexp # matching should only be done for non-enum strings, but # distinguishing them is not possible given that the client # has no knowledge about the properties. # The regexp matching implemented in the HMC requires begin and # end of the string value to match, even if the '^' for begin # and '$' for end are not specified in the pattern. The code # here is consistent with that: We add end matching to the # pattern, and begin matching is done by re.match() # automatically. re_match = prop_match + '$' m = re.match(re_match, prop_value) if m: return True else: if prop_value == prop_match: return True return False
python
{ "resource": "" }
q15095
BaseManager.resource_object
train
def resource_object(self, uri_or_oid, props=None): """ Return a minimalistic Python resource object for this resource class, that is scoped to this manager. This method is an internal helper function and is not normally called by users. The returned resource object will have the following minimal set of properties set automatically: * `object-uri` * `object-id` * `parent` * `class` Additional properties for the Python resource object can be specified by the caller. Parameters: uri_or_oid (string): `object-uri` or `object-id` of the resource. props (dict): Property values in addition to the minimal list of properties that are set automatically (see above). Returns: Subclass of :class:`~zhmcclient.BaseResource`: A Python resource object for this resource class. """ if uri_or_oid.startswith('/api/'): assert uri_or_oid[-1] != '/' uri = uri_or_oid oid = uri.split('/')[-1] else: assert '/' not in uri_or_oid oid = uri_or_oid uri = '{}/{}'.format(self._base_uri, oid) res_props = { self._oid_prop: oid, 'parent': self.parent.uri if self.parent is not None else None, 'class': self.class_name, } name = None if props: res_props.update(props) try: name = props[self._name_prop] except KeyError: pass return self.resource_class(self, uri, name, res_props)
python
{ "resource": "" }
q15096
FakedBaseResource.add_resources
train
def add_resources(self, resources): """ Add faked child resources to this resource, from the provided resource definitions. Duplicate resource names in the same scope are not permitted. Although this method is typically used to initially load the faked HMC with resource state just once, it can be invoked multiple times and can also be invoked on faked resources (e.g. on a faked CPC). Parameters: resources (dict): resource dictionary with definitions of faked child resources to be added. For an explanation of how the resource dictionary is set up, see the examples below. For requirements on and auto-generation of certain resource properties, see the ``add()`` methods of the various faked resource managers (e.g. :meth:`zhmcclient_mock.FakedCpcManager.add`). For example, the object-id or element-id properties and the corresponding uri properties are always auto-generated. The resource dictionary specifies a tree of resource managers and resources, in an alternating manner. It starts with the resource managers of child resources of the target resource, which contains a list of those child resources. For an HMC, the CPCs managed by the HMC would be its child resources. Each resource specifies its own properties (``properties`` key) and the resource managers for its child resources. For example, the CPC resource specifies its adapter child resources using the ``adapters`` key. The keys for the child resource managers are the attribute names of these resource managers in the parent resource. For example, the ``adapters`` key is named after the :attr:`zhmcclient.Cpc.adapters` attribute (which has the same name as in its corresponding faked CPC resource: :attr:`zhmcclient_mock.FakedCpc.adapters`). Raises: :exc:`zhmcclient_mock.InputError`: Some issue with the input resources. Examples: Example for targeting a faked HMC for adding a CPC with one adapter:: resources = { 'cpcs': [ # name of manager attribute for this resource { 'properties': { 'name': 'cpc_1', }, 'adapters': [ # name of manager attribute for this # resource { 'properties': { 'object-id': '12', 'name': 'ad_1', }, 'ports': [ { 'properties': { 'name': 'port_1', } }, ], }, ], }, ], } Example for targeting a faked CPC for adding an LPAR and a load activation profile:: resources = { 'lpars': [ # name of manager attribute for this resource { 'properties': { # object-id is not provided -> auto-generated # object-uri is not provided -> auto-generated 'name': 'lpar_1', }, }, ], 'load_activation_profiles': [ # name of manager attribute { 'properties': { # object-id is not provided -> auto-generated # object-uri is not provided -> auto-generated 'name': 'lpar_1', }, }, ], } """ for child_attr in resources: child_list = resources[child_attr] self._process_child_list(self, child_attr, child_list)
python
{ "resource": "" }
q15097
FakedBaseManager.add
train
def add(self, properties): """ Add a faked resource to this manager. For URI-based lookup, the resource is also added to the faked HMC. Parameters: properties (dict): Resource properties. If the URI property (e.g. 'object-uri') or the object ID property (e.g. 'object-id') are not specified, they will be auto-generated. Returns: FakedBaseResource: The faked resource object. """ resource = self.resource_class(self, properties) self._resources[resource.oid] = resource self._hmc.all_resources[resource.uri] = resource return resource
python
{ "resource": "" }
q15098
FakedBaseManager.remove
train
def remove(self, oid): """ Remove a faked resource from this manager. Parameters: oid (string): The object ID of the resource (e.g. value of the 'object-uri' property). """ uri = self._resources[oid].uri del self._resources[oid] del self._hmc.all_resources[uri]
python
{ "resource": "" }
q15099
FakedBaseManager.list
train
def list(self, filter_args=None): """ List the faked resources of this manager. Parameters: filter_args (dict): Filter arguments. `None` causes no filtering to happen. See :meth:`~zhmcclient.BaseManager.list()` for details. Returns: list of FakedBaseResource: The faked resource objects of this manager. """ res = list() for oid in self._resources: resource = self._resources[oid] if self._matches_filters(resource, filter_args): res.append(resource) return res
python
{ "resource": "" }