id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
227,400
nfcpy/nfcpy
src/nfc/clf/acr122.py
Device.listen_tta
def listen_tta(self, target, timeout): """Listen as Type A Target is not supported.""" info = "{device} does not support listen as Type A Target" raise nfc.clf.UnsupportedTargetError(info.format(device=self))
python
def listen_tta(self, target, timeout): """Listen as Type A Target is not supported.""" info = "{device} does not support listen as Type A Target" raise nfc.clf.UnsupportedTargetError(info.format(device=self))
[ "def", "listen_tta", "(", "self", ",", "target", ",", "timeout", ")", ":", "info", "=", "\"{device} does not support listen as Type A Target\"", "raise", "nfc", ".", "clf", ".", "UnsupportedTargetError", "(", "info", ".", "format", "(", "device", "=", "self", ")", ")" ]
Listen as Type A Target is not supported.
[ "Listen", "as", "Type", "A", "Target", "is", "not", "supported", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/clf/acr122.py#L109-L112
227,401
nfcpy/nfcpy
src/nfc/clf/acr122.py
Chipset.command
def command(self, cmd_code, cmd_data, timeout): """Send a host command and return the chip response. """ log.log(logging.DEBUG-1, self.CMD[cmd_code]+" "+hexlify(cmd_data)) frame = bytearray([0xD4, cmd_code]) + bytearray(cmd_data) frame = bytearray([0xFF, 0x00, 0x00, 0x00, len(frame)]) + frame frame = self.ccid_xfr_block(frame, timeout) if not frame or len(frame) < 4: log.error("insufficient data for decoding chip response") raise IOError(errno.EIO, os.strerror(errno.EIO)) if not (frame[0] == 0xD5 and frame[1] == cmd_code + 1): log.error("received invalid chip response") raise IOError(errno.EIO, os.strerror(errno.EIO)) if not (frame[-2] == 0x90 and frame[-1] == 0x00): log.error("received pseudo apdu with error status") raise IOError(errno.EIO, os.strerror(errno.EIO)) return frame[2:-2]
python
def command(self, cmd_code, cmd_data, timeout): """Send a host command and return the chip response. """ log.log(logging.DEBUG-1, self.CMD[cmd_code]+" "+hexlify(cmd_data)) frame = bytearray([0xD4, cmd_code]) + bytearray(cmd_data) frame = bytearray([0xFF, 0x00, 0x00, 0x00, len(frame)]) + frame frame = self.ccid_xfr_block(frame, timeout) if not frame or len(frame) < 4: log.error("insufficient data for decoding chip response") raise IOError(errno.EIO, os.strerror(errno.EIO)) if not (frame[0] == 0xD5 and frame[1] == cmd_code + 1): log.error("received invalid chip response") raise IOError(errno.EIO, os.strerror(errno.EIO)) if not (frame[-2] == 0x90 and frame[-1] == 0x00): log.error("received pseudo apdu with error status") raise IOError(errno.EIO, os.strerror(errno.EIO)) return frame[2:-2]
[ "def", "command", "(", "self", ",", "cmd_code", ",", "cmd_data", ",", "timeout", ")", ":", "log", ".", "log", "(", "logging", ".", "DEBUG", "-", "1", ",", "self", ".", "CMD", "[", "cmd_code", "]", "+", "\" \"", "+", "hexlify", "(", "cmd_data", ")", ")", "frame", "=", "bytearray", "(", "[", "0xD4", ",", "cmd_code", "]", ")", "+", "bytearray", "(", "cmd_data", ")", "frame", "=", "bytearray", "(", "[", "0xFF", ",", "0x00", ",", "0x00", ",", "0x00", ",", "len", "(", "frame", ")", "]", ")", "+", "frame", "frame", "=", "self", ".", "ccid_xfr_block", "(", "frame", ",", "timeout", ")", "if", "not", "frame", "or", "len", "(", "frame", ")", "<", "4", ":", "log", ".", "error", "(", "\"insufficient data for decoding chip response\"", ")", "raise", "IOError", "(", "errno", ".", "EIO", ",", "os", ".", "strerror", "(", "errno", ".", "EIO", ")", ")", "if", "not", "(", "frame", "[", "0", "]", "==", "0xD5", "and", "frame", "[", "1", "]", "==", "cmd_code", "+", "1", ")", ":", "log", ".", "error", "(", "\"received invalid chip response\"", ")", "raise", "IOError", "(", "errno", ".", "EIO", ",", "os", ".", "strerror", "(", "errno", ".", "EIO", ")", ")", "if", "not", "(", "frame", "[", "-", "2", "]", "==", "0x90", "and", "frame", "[", "-", "1", "]", "==", "0x00", ")", ":", "log", ".", "error", "(", "\"received pseudo apdu with error status\"", ")", "raise", "IOError", "(", "errno", ".", "EIO", ",", "os", ".", "strerror", "(", "errno", ".", "EIO", ")", ")", "return", "frame", "[", "2", ":", "-", "2", "]" ]
Send a host command and return the chip response.
[ "Send", "a", "host", "command", "and", "return", "the", "chip", "response", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/clf/acr122.py#L223-L242
227,402
nfcpy/nfcpy
src/nfc/tag/tt4.py
Type4Tag.format
def format(self, version=None, wipe=None): """Erase the NDEF message on a Type 4 Tag. The :meth:`format` method writes the length of the NDEF message on a Type 4 Tag to zero, thus the tag will appear to be empty. If the *wipe* argument is set to some integer then :meth:`format` will also overwrite all user data with that integer (mod 256). Despite it's name, the :meth:`format` method can not format a blank tag to make it NDEF compatible; this requires proprietary information from the manufacturer. """ return super(Type4Tag, self).format(version, wipe)
python
def format(self, version=None, wipe=None): """Erase the NDEF message on a Type 4 Tag. The :meth:`format` method writes the length of the NDEF message on a Type 4 Tag to zero, thus the tag will appear to be empty. If the *wipe* argument is set to some integer then :meth:`format` will also overwrite all user data with that integer (mod 256). Despite it's name, the :meth:`format` method can not format a blank tag to make it NDEF compatible; this requires proprietary information from the manufacturer. """ return super(Type4Tag, self).format(version, wipe)
[ "def", "format", "(", "self", ",", "version", "=", "None", ",", "wipe", "=", "None", ")", ":", "return", "super", "(", "Type4Tag", ",", "self", ")", ".", "format", "(", "version", ",", "wipe", ")" ]
Erase the NDEF message on a Type 4 Tag. The :meth:`format` method writes the length of the NDEF message on a Type 4 Tag to zero, thus the tag will appear to be empty. If the *wipe* argument is set to some integer then :meth:`format` will also overwrite all user data with that integer (mod 256). Despite it's name, the :meth:`format` method can not format a blank tag to make it NDEF compatible; this requires proprietary information from the manufacturer.
[ "Erase", "the", "NDEF", "message", "on", "a", "Type", "4", "Tag", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt4.py#L395-L409
227,403
nfcpy/nfcpy
src/nfc/tag/tt4.py
Type4Tag.transceive
def transceive(self, data, timeout=None): """Transmit arbitrary data and receive the response. This is a low level method to send arbitrary data to the tag. While it should almost always be better to use :meth:`send_apdu` this is the only way to force a specific timeout value (which is otherwise derived from the Tag's answer to select). The *timeout* value is expected as a float specifying the seconds to wait. """ log.debug(">> {0}".format(hexlify(data))) data = self._dep.exchange(data, timeout) log.debug("<< {0}".format(hexlify(data) if data else "None")) return data
python
def transceive(self, data, timeout=None): """Transmit arbitrary data and receive the response. This is a low level method to send arbitrary data to the tag. While it should almost always be better to use :meth:`send_apdu` this is the only way to force a specific timeout value (which is otherwise derived from the Tag's answer to select). The *timeout* value is expected as a float specifying the seconds to wait. """ log.debug(">> {0}".format(hexlify(data))) data = self._dep.exchange(data, timeout) log.debug("<< {0}".format(hexlify(data) if data else "None")) return data
[ "def", "transceive", "(", "self", ",", "data", ",", "timeout", "=", "None", ")", ":", "log", ".", "debug", "(", "\">> {0}\"", ".", "format", "(", "hexlify", "(", "data", ")", ")", ")", "data", "=", "self", ".", "_dep", ".", "exchange", "(", "data", ",", "timeout", ")", "log", ".", "debug", "(", "\"<< {0}\"", ".", "format", "(", "hexlify", "(", "data", ")", "if", "data", "else", "\"None\"", ")", ")", "return", "data" ]
Transmit arbitrary data and receive the response. This is a low level method to send arbitrary data to the tag. While it should almost always be better to use :meth:`send_apdu` this is the only way to force a specific timeout value (which is otherwise derived from the Tag's answer to select). The *timeout* value is expected as a float specifying the seconds to wait.
[ "Transmit", "arbitrary", "data", "and", "receive", "the", "response", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt4.py#L425-L439
227,404
nfcpy/nfcpy
src/nfc/tag/__init__.py
Tag.format
def format(self, version=None, wipe=None): """Format the tag to make it NDEF compatible or erase content. The :meth:`format` method is highly dependent on the tag type, product and present status, for example a tag that has been made read-only with lock bits can no longer be formatted or erased. :meth:`format` creates the management information defined by the NFC Forum to describes the NDEF data area on the tag, this is also called NDEF mapping. The mapping may differ between versions of the tag specifications, the mapping to apply can be specified with the *version* argument as an 8-bit integer composed of a major version number in the most significant 4 bit and the minor version number in the least significant 4 bit. If *version* is not specified then the highest possible mapping version is used. If formatting of the tag is possible, the default behavior of :meth:`format` is to update only the management information required to make the tag appear as NDEF compatible and empty, previously existing data could still be read. If existing data shall be overwritten, the *wipe* argument can be set to an 8-bit integer that will be written to all available bytes. The :meth:`format` method returns :const:`True` if formatting was successful, :const:`False` if it failed for some reason, or :const:`None` if the present tag can not be formatted either because the tag does not support formatting or it is not implemented in nfcpy. """ if hasattr(self, "_format"): args = "version={0!r}, wipe={1!r}" args = args.format(version, wipe) log.debug("format({0})".format(args)) status = self._format(version, wipe) if status is True: self._ndef = None return status else: log.debug("this tag can not be formatted with nfcpy") return None
python
def format(self, version=None, wipe=None): """Format the tag to make it NDEF compatible or erase content. The :meth:`format` method is highly dependent on the tag type, product and present status, for example a tag that has been made read-only with lock bits can no longer be formatted or erased. :meth:`format` creates the management information defined by the NFC Forum to describes the NDEF data area on the tag, this is also called NDEF mapping. The mapping may differ between versions of the tag specifications, the mapping to apply can be specified with the *version* argument as an 8-bit integer composed of a major version number in the most significant 4 bit and the minor version number in the least significant 4 bit. If *version* is not specified then the highest possible mapping version is used. If formatting of the tag is possible, the default behavior of :meth:`format` is to update only the management information required to make the tag appear as NDEF compatible and empty, previously existing data could still be read. If existing data shall be overwritten, the *wipe* argument can be set to an 8-bit integer that will be written to all available bytes. The :meth:`format` method returns :const:`True` if formatting was successful, :const:`False` if it failed for some reason, or :const:`None` if the present tag can not be formatted either because the tag does not support formatting or it is not implemented in nfcpy. """ if hasattr(self, "_format"): args = "version={0!r}, wipe={1!r}" args = args.format(version, wipe) log.debug("format({0})".format(args)) status = self._format(version, wipe) if status is True: self._ndef = None return status else: log.debug("this tag can not be formatted with nfcpy") return None
[ "def", "format", "(", "self", ",", "version", "=", "None", ",", "wipe", "=", "None", ")", ":", "if", "hasattr", "(", "self", ",", "\"_format\"", ")", ":", "args", "=", "\"version={0!r}, wipe={1!r}\"", "args", "=", "args", ".", "format", "(", "version", ",", "wipe", ")", "log", ".", "debug", "(", "\"format({0})\"", ".", "format", "(", "args", ")", ")", "status", "=", "self", ".", "_format", "(", "version", ",", "wipe", ")", "if", "status", "is", "True", ":", "self", ".", "_ndef", "=", "None", "return", "status", "else", ":", "log", ".", "debug", "(", "\"this tag can not be formatted with nfcpy\"", ")", "return", "None" ]
Format the tag to make it NDEF compatible or erase content. The :meth:`format` method is highly dependent on the tag type, product and present status, for example a tag that has been made read-only with lock bits can no longer be formatted or erased. :meth:`format` creates the management information defined by the NFC Forum to describes the NDEF data area on the tag, this is also called NDEF mapping. The mapping may differ between versions of the tag specifications, the mapping to apply can be specified with the *version* argument as an 8-bit integer composed of a major version number in the most significant 4 bit and the minor version number in the least significant 4 bit. If *version* is not specified then the highest possible mapping version is used. If formatting of the tag is possible, the default behavior of :meth:`format` is to update only the management information required to make the tag appear as NDEF compatible and empty, previously existing data could still be read. If existing data shall be overwritten, the *wipe* argument can be set to an 8-bit integer that will be written to all available bytes. The :meth:`format` method returns :const:`True` if formatting was successful, :const:`False` if it failed for some reason, or :const:`None` if the present tag can not be formatted either because the tag does not support formatting or it is not implemented in nfcpy.
[ "Format", "the", "tag", "to", "make", "it", "NDEF", "compatible", "or", "erase", "content", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/__init__.py#L302-L344
227,405
nfcpy/nfcpy
src/nfc/tag/__init__.py
Tag.protect
def protect(self, password=None, read_protect=False, protect_from=0): """Protect a tag against future write or read access. :meth:`protect` attempts to make a tag readonly for all readers if *password* is :const:`None`, writeable only after authentication if a *password* is provided, and readable only after authentication if a *password* is provided and the *read_protect* flag is set. The *password* must be a byte or character sequence that provides sufficient key material for the tag specific protect function (this is documented separately for the individual tag types). As a special case, if *password* is set to an empty string the :meth:`protect` method uses a default manufacturer value if such is known. The *protect_from* argument sets the first memory unit to be protected. Memory units are tag type specific, for a Type 1 or Type 2 Tag a memory unit is 4 byte, for a Type 3 Tag it is 16 byte, and for a Type 4 Tag it is the complete NDEF data area. Note that the effect of protecting a tag without password can normally not be reversed. The return value of :meth:`protect` is either :const:`True` or :const:`False` depending on whether the operation was successful or not, or :const:`None` if the tag does not support custom protection (or it is not implemented). """ if hasattr(self, "_protect"): args = "password={0!r}, read_protect={1!r}, protect_from={2!r}" args = args.format(password, read_protect, protect_from) log.debug("protect({0})".format(args)) status = self._protect(password, read_protect, protect_from) if status is True: self._ndef = None return status else: log.error("this tag can not be protected with nfcpy") return None
python
def protect(self, password=None, read_protect=False, protect_from=0): """Protect a tag against future write or read access. :meth:`protect` attempts to make a tag readonly for all readers if *password* is :const:`None`, writeable only after authentication if a *password* is provided, and readable only after authentication if a *password* is provided and the *read_protect* flag is set. The *password* must be a byte or character sequence that provides sufficient key material for the tag specific protect function (this is documented separately for the individual tag types). As a special case, if *password* is set to an empty string the :meth:`protect` method uses a default manufacturer value if such is known. The *protect_from* argument sets the first memory unit to be protected. Memory units are tag type specific, for a Type 1 or Type 2 Tag a memory unit is 4 byte, for a Type 3 Tag it is 16 byte, and for a Type 4 Tag it is the complete NDEF data area. Note that the effect of protecting a tag without password can normally not be reversed. The return value of :meth:`protect` is either :const:`True` or :const:`False` depending on whether the operation was successful or not, or :const:`None` if the tag does not support custom protection (or it is not implemented). """ if hasattr(self, "_protect"): args = "password={0!r}, read_protect={1!r}, protect_from={2!r}" args = args.format(password, read_protect, protect_from) log.debug("protect({0})".format(args)) status = self._protect(password, read_protect, protect_from) if status is True: self._ndef = None return status else: log.error("this tag can not be protected with nfcpy") return None
[ "def", "protect", "(", "self", ",", "password", "=", "None", ",", "read_protect", "=", "False", ",", "protect_from", "=", "0", ")", ":", "if", "hasattr", "(", "self", ",", "\"_protect\"", ")", ":", "args", "=", "\"password={0!r}, read_protect={1!r}, protect_from={2!r}\"", "args", "=", "args", ".", "format", "(", "password", ",", "read_protect", ",", "protect_from", ")", "log", ".", "debug", "(", "\"protect({0})\"", ".", "format", "(", "args", ")", ")", "status", "=", "self", ".", "_protect", "(", "password", ",", "read_protect", ",", "protect_from", ")", "if", "status", "is", "True", ":", "self", ".", "_ndef", "=", "None", "return", "status", "else", ":", "log", ".", "error", "(", "\"this tag can not be protected with nfcpy\"", ")", "return", "None" ]
Protect a tag against future write or read access. :meth:`protect` attempts to make a tag readonly for all readers if *password* is :const:`None`, writeable only after authentication if a *password* is provided, and readable only after authentication if a *password* is provided and the *read_protect* flag is set. The *password* must be a byte or character sequence that provides sufficient key material for the tag specific protect function (this is documented separately for the individual tag types). As a special case, if *password* is set to an empty string the :meth:`protect` method uses a default manufacturer value if such is known. The *protect_from* argument sets the first memory unit to be protected. Memory units are tag type specific, for a Type 1 or Type 2 Tag a memory unit is 4 byte, for a Type 3 Tag it is 16 byte, and for a Type 4 Tag it is the complete NDEF data area. Note that the effect of protecting a tag without password can normally not be reversed. The return value of :meth:`protect` is either :const:`True` or :const:`False` depending on whether the operation was successful or not, or :const:`None` if the tag does not support custom protection (or it is not implemented).
[ "Protect", "a", "tag", "against", "future", "write", "or", "read", "access", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/__init__.py#L346-L384
227,406
nfcpy/nfcpy
src/nfc/snep/client.py
SnepClient.get_records
def get_records(self, records=None, timeout=1.0): """Get NDEF message records from a SNEP Server. .. versionadded:: 0.13 The :class:`ndef.Record` list given by *records* is encoded as the request message octets input to :meth:`get_octets`. The return value is an :class:`ndef.Record` list decoded from the response message octets returned by :meth:`get_octets`. Same as:: import ndef send_octets = ndef.message_encoder(records) rcvd_octets = snep_client.get_octets(send_octets, timeout) records = list(ndef.message_decoder(rcvd_octets)) """ octets = b''.join(ndef.message_encoder(records)) if records else None octets = self.get_octets(octets, timeout) if octets and len(octets) >= 3: return list(ndef.message_decoder(octets))
python
def get_records(self, records=None, timeout=1.0): """Get NDEF message records from a SNEP Server. .. versionadded:: 0.13 The :class:`ndef.Record` list given by *records* is encoded as the request message octets input to :meth:`get_octets`. The return value is an :class:`ndef.Record` list decoded from the response message octets returned by :meth:`get_octets`. Same as:: import ndef send_octets = ndef.message_encoder(records) rcvd_octets = snep_client.get_octets(send_octets, timeout) records = list(ndef.message_decoder(rcvd_octets)) """ octets = b''.join(ndef.message_encoder(records)) if records else None octets = self.get_octets(octets, timeout) if octets and len(octets) >= 3: return list(ndef.message_decoder(octets))
[ "def", "get_records", "(", "self", ",", "records", "=", "None", ",", "timeout", "=", "1.0", ")", ":", "octets", "=", "b''", ".", "join", "(", "ndef", ".", "message_encoder", "(", "records", ")", ")", "if", "records", "else", "None", "octets", "=", "self", ".", "get_octets", "(", "octets", ",", "timeout", ")", "if", "octets", "and", "len", "(", "octets", ")", ">=", "3", ":", "return", "list", "(", "ndef", ".", "message_decoder", "(", "octets", ")", ")" ]
Get NDEF message records from a SNEP Server. .. versionadded:: 0.13 The :class:`ndef.Record` list given by *records* is encoded as the request message octets input to :meth:`get_octets`. The return value is an :class:`ndef.Record` list decoded from the response message octets returned by :meth:`get_octets`. Same as:: import ndef send_octets = ndef.message_encoder(records) rcvd_octets = snep_client.get_octets(send_octets, timeout) records = list(ndef.message_decoder(rcvd_octets))
[ "Get", "NDEF", "message", "records", "from", "a", "SNEP", "Server", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/snep/client.py#L156-L176
227,407
nfcpy/nfcpy
src/nfc/snep/client.py
SnepClient.get_octets
def get_octets(self, octets=None, timeout=1.0): """Get NDEF message octets from a SNEP Server. .. versionadded:: 0.13 If the client has not yet a data link connection with a SNEP Server, it temporarily connects to the default SNEP Server, sends the message octets, disconnects after the server response, and returns the received message octets. """ if octets is None: # Send NDEF Message with one empty Record. octets = b'\xd0\x00\x00' if not self.socket: try: self.connect('urn:nfc:sn:snep') except nfc.llcp.ConnectRefused: return None else: self.release_connection = True else: self.release_connection = False try: request = struct.pack('>BBLL', 0x10, 0x01, 4 + len(octets), self.acceptable_length) + octets if not send_request(self.socket, request, self.send_miu): return None response = recv_response( self.socket, self.acceptable_length, timeout) if response is not None: if response[1] != 0x81: raise SnepError(response[1]) return response[6:] finally: if self.release_connection: self.close()
python
def get_octets(self, octets=None, timeout=1.0): """Get NDEF message octets from a SNEP Server. .. versionadded:: 0.13 If the client has not yet a data link connection with a SNEP Server, it temporarily connects to the default SNEP Server, sends the message octets, disconnects after the server response, and returns the received message octets. """ if octets is None: # Send NDEF Message with one empty Record. octets = b'\xd0\x00\x00' if not self.socket: try: self.connect('urn:nfc:sn:snep') except nfc.llcp.ConnectRefused: return None else: self.release_connection = True else: self.release_connection = False try: request = struct.pack('>BBLL', 0x10, 0x01, 4 + len(octets), self.acceptable_length) + octets if not send_request(self.socket, request, self.send_miu): return None response = recv_response( self.socket, self.acceptable_length, timeout) if response is not None: if response[1] != 0x81: raise SnepError(response[1]) return response[6:] finally: if self.release_connection: self.close()
[ "def", "get_octets", "(", "self", ",", "octets", "=", "None", ",", "timeout", "=", "1.0", ")", ":", "if", "octets", "is", "None", ":", "# Send NDEF Message with one empty Record.", "octets", "=", "b'\\xd0\\x00\\x00'", "if", "not", "self", ".", "socket", ":", "try", ":", "self", ".", "connect", "(", "'urn:nfc:sn:snep'", ")", "except", "nfc", ".", "llcp", ".", "ConnectRefused", ":", "return", "None", "else", ":", "self", ".", "release_connection", "=", "True", "else", ":", "self", ".", "release_connection", "=", "False", "try", ":", "request", "=", "struct", ".", "pack", "(", "'>BBLL'", ",", "0x10", ",", "0x01", ",", "4", "+", "len", "(", "octets", ")", ",", "self", ".", "acceptable_length", ")", "+", "octets", "if", "not", "send_request", "(", "self", ".", "socket", ",", "request", ",", "self", ".", "send_miu", ")", ":", "return", "None", "response", "=", "recv_response", "(", "self", ".", "socket", ",", "self", ".", "acceptable_length", ",", "timeout", ")", "if", "response", "is", "not", "None", ":", "if", "response", "[", "1", "]", "!=", "0x81", ":", "raise", "SnepError", "(", "response", "[", "1", "]", ")", "return", "response", "[", "6", ":", "]", "finally", ":", "if", "self", ".", "release_connection", ":", "self", ".", "close", "(", ")" ]
Get NDEF message octets from a SNEP Server. .. versionadded:: 0.13 If the client has not yet a data link connection with a SNEP Server, it temporarily connects to the default SNEP Server, sends the message octets, disconnects after the server response, and returns the received message octets.
[ "Get", "NDEF", "message", "octets", "from", "a", "SNEP", "Server", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/snep/client.py#L178-L221
227,408
nfcpy/nfcpy
src/nfc/snep/client.py
SnepClient.put
def put(self, ndef_message, timeout=1.0): """Send an NDEF message to the server. Temporarily connects to the default SNEP server if the client is not yet connected. .. deprecated:: 0.13 Use :meth:`put_records` or :meth:`put_octets`. """ if not self.socket: try: self.connect('urn:nfc:sn:snep') except nfc.llcp.ConnectRefused: return False else: self.release_connection = True else: self.release_connection = False try: ndef_msgsize = struct.pack('>L', len(str(ndef_message))) snep_request = b'\x10\x02' + ndef_msgsize + str(ndef_message) if send_request(self.socket, snep_request, self.send_miu): response = recv_response(self.socket, 0, timeout) if response is not None: if response[1] != 0x81: raise SnepError(response[1]) return True return False finally: if self.release_connection: self.close()
python
def put(self, ndef_message, timeout=1.0): """Send an NDEF message to the server. Temporarily connects to the default SNEP server if the client is not yet connected. .. deprecated:: 0.13 Use :meth:`put_records` or :meth:`put_octets`. """ if not self.socket: try: self.connect('urn:nfc:sn:snep') except nfc.llcp.ConnectRefused: return False else: self.release_connection = True else: self.release_connection = False try: ndef_msgsize = struct.pack('>L', len(str(ndef_message))) snep_request = b'\x10\x02' + ndef_msgsize + str(ndef_message) if send_request(self.socket, snep_request, self.send_miu): response = recv_response(self.socket, 0, timeout) if response is not None: if response[1] != 0x81: raise SnepError(response[1]) return True return False finally: if self.release_connection: self.close()
[ "def", "put", "(", "self", ",", "ndef_message", ",", "timeout", "=", "1.0", ")", ":", "if", "not", "self", ".", "socket", ":", "try", ":", "self", ".", "connect", "(", "'urn:nfc:sn:snep'", ")", "except", "nfc", ".", "llcp", ".", "ConnectRefused", ":", "return", "False", "else", ":", "self", ".", "release_connection", "=", "True", "else", ":", "self", ".", "release_connection", "=", "False", "try", ":", "ndef_msgsize", "=", "struct", ".", "pack", "(", "'>L'", ",", "len", "(", "str", "(", "ndef_message", ")", ")", ")", "snep_request", "=", "b'\\x10\\x02'", "+", "ndef_msgsize", "+", "str", "(", "ndef_message", ")", "if", "send_request", "(", "self", ".", "socket", ",", "snep_request", ",", "self", ".", "send_miu", ")", ":", "response", "=", "recv_response", "(", "self", ".", "socket", ",", "0", ",", "timeout", ")", "if", "response", "is", "not", "None", ":", "if", "response", "[", "1", "]", "!=", "0x81", ":", "raise", "SnepError", "(", "response", "[", "1", "]", ")", "return", "True", "return", "False", "finally", ":", "if", "self", ".", "release_connection", ":", "self", ".", "close", "(", ")" ]
Send an NDEF message to the server. Temporarily connects to the default SNEP server if the client is not yet connected. .. deprecated:: 0.13 Use :meth:`put_records` or :meth:`put_octets`.
[ "Send", "an", "NDEF", "message", "to", "the", "server", ".", "Temporarily", "connects", "to", "the", "default", "SNEP", "server", "if", "the", "client", "is", "not", "yet", "connected", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/snep/client.py#L223-L252
227,409
nfcpy/nfcpy
src/nfc/snep/client.py
SnepClient.put_records
def put_records(self, records, timeout=1.0): """Send NDEF message records to a SNEP Server. .. versionadded:: 0.13 The :class:`ndef.Record` list given by *records* is encoded and then send via :meth:`put_octets`. Same as:: import ndef octets = ndef.message_encoder(records) snep_client.put_octets(octets, timeout) """ octets = b''.join(ndef.message_encoder(records)) return self.put_octets(octets, timeout)
python
def put_records(self, records, timeout=1.0): """Send NDEF message records to a SNEP Server. .. versionadded:: 0.13 The :class:`ndef.Record` list given by *records* is encoded and then send via :meth:`put_octets`. Same as:: import ndef octets = ndef.message_encoder(records) snep_client.put_octets(octets, timeout) """ octets = b''.join(ndef.message_encoder(records)) return self.put_octets(octets, timeout)
[ "def", "put_records", "(", "self", ",", "records", ",", "timeout", "=", "1.0", ")", ":", "octets", "=", "b''", ".", "join", "(", "ndef", ".", "message_encoder", "(", "records", ")", ")", "return", "self", ".", "put_octets", "(", "octets", ",", "timeout", ")" ]
Send NDEF message records to a SNEP Server. .. versionadded:: 0.13 The :class:`ndef.Record` list given by *records* is encoded and then send via :meth:`put_octets`. Same as:: import ndef octets = ndef.message_encoder(records) snep_client.put_octets(octets, timeout)
[ "Send", "NDEF", "message", "records", "to", "a", "SNEP", "Server", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/snep/client.py#L254-L268
227,410
nfcpy/nfcpy
src/nfc/clf/rcs380.py
Device.sense_ttb
def sense_ttb(self, target): """Sense for a Type B Target is supported for 106, 212 and 424 kbps. However, there may not be any target that understands the activation command in other than 106 kbps. """ log.debug("polling for NFC-B technology") if target.brty not in ("106B", "212B", "424B"): message = "unsupported bitrate {0}".format(target.brty) raise nfc.clf.UnsupportedTargetError(message) self.chipset.in_set_rf(target.brty) self.chipset.in_set_protocol(self.chipset.in_set_protocol_defaults) self.chipset.in_set_protocol(initial_guard_time=20, add_sof=1, check_sof=1, add_eof=1, check_eof=1) sensb_req = (target.sensb_req if target.sensb_req else bytearray.fromhex("050010")) log.debug("send SENSB_REQ " + hexlify(sensb_req)) try: sensb_res = self.chipset.in_comm_rf(sensb_req, 30) except CommunicationError as error: if error != "RECEIVE_TIMEOUT_ERROR": log.debug(error) return None if len(sensb_res) >= 12 and sensb_res[0] == 0x50: log.debug("rcvd SENSB_RES " + hexlify(sensb_res)) return nfc.clf.RemoteTarget(target.brty, sensb_res=sensb_res)
python
def sense_ttb(self, target): """Sense for a Type B Target is supported for 106, 212 and 424 kbps. However, there may not be any target that understands the activation command in other than 106 kbps. """ log.debug("polling for NFC-B technology") if target.brty not in ("106B", "212B", "424B"): message = "unsupported bitrate {0}".format(target.brty) raise nfc.clf.UnsupportedTargetError(message) self.chipset.in_set_rf(target.brty) self.chipset.in_set_protocol(self.chipset.in_set_protocol_defaults) self.chipset.in_set_protocol(initial_guard_time=20, add_sof=1, check_sof=1, add_eof=1, check_eof=1) sensb_req = (target.sensb_req if target.sensb_req else bytearray.fromhex("050010")) log.debug("send SENSB_REQ " + hexlify(sensb_req)) try: sensb_res = self.chipset.in_comm_rf(sensb_req, 30) except CommunicationError as error: if error != "RECEIVE_TIMEOUT_ERROR": log.debug(error) return None if len(sensb_res) >= 12 and sensb_res[0] == 0x50: log.debug("rcvd SENSB_RES " + hexlify(sensb_res)) return nfc.clf.RemoteTarget(target.brty, sensb_res=sensb_res)
[ "def", "sense_ttb", "(", "self", ",", "target", ")", ":", "log", ".", "debug", "(", "\"polling for NFC-B technology\"", ")", "if", "target", ".", "brty", "not", "in", "(", "\"106B\"", ",", "\"212B\"", ",", "\"424B\"", ")", ":", "message", "=", "\"unsupported bitrate {0}\"", ".", "format", "(", "target", ".", "brty", ")", "raise", "nfc", ".", "clf", ".", "UnsupportedTargetError", "(", "message", ")", "self", ".", "chipset", ".", "in_set_rf", "(", "target", ".", "brty", ")", "self", ".", "chipset", ".", "in_set_protocol", "(", "self", ".", "chipset", ".", "in_set_protocol_defaults", ")", "self", ".", "chipset", ".", "in_set_protocol", "(", "initial_guard_time", "=", "20", ",", "add_sof", "=", "1", ",", "check_sof", "=", "1", ",", "add_eof", "=", "1", ",", "check_eof", "=", "1", ")", "sensb_req", "=", "(", "target", ".", "sensb_req", "if", "target", ".", "sensb_req", "else", "bytearray", ".", "fromhex", "(", "\"050010\"", ")", ")", "log", ".", "debug", "(", "\"send SENSB_REQ \"", "+", "hexlify", "(", "sensb_req", ")", ")", "try", ":", "sensb_res", "=", "self", ".", "chipset", ".", "in_comm_rf", "(", "sensb_req", ",", "30", ")", "except", "CommunicationError", "as", "error", ":", "if", "error", "!=", "\"RECEIVE_TIMEOUT_ERROR\"", ":", "log", ".", "debug", "(", "error", ")", "return", "None", "if", "len", "(", "sensb_res", ")", ">=", "12", "and", "sensb_res", "[", "0", "]", "==", "0x50", ":", "log", ".", "debug", "(", "\"rcvd SENSB_RES \"", "+", "hexlify", "(", "sensb_res", ")", ")", "return", "nfc", ".", "clf", ".", "RemoteTarget", "(", "target", ".", "brty", ",", "sensb_res", "=", "sensb_res", ")" ]
Sense for a Type B Target is supported for 106, 212 and 424 kbps. However, there may not be any target that understands the activation command in other than 106 kbps.
[ "Sense", "for", "a", "Type", "B", "Target", "is", "supported", "for", "106", "212", "and", "424", "kbps", ".", "However", "there", "may", "not", "be", "any", "target", "that", "understands", "the", "activation", "command", "in", "other", "than", "106", "kbps", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/clf/rcs380.py#L452-L482
227,411
nfcpy/nfcpy
src/nfc/clf/rcs380.py
Device.sense_ttf
def sense_ttf(self, target): """Sense for a Type F Target is supported for 212 and 424 kbps. """ log.debug("polling for NFC-F technology") if target.brty not in ("212F", "424F"): message = "unsupported bitrate {0}".format(target.brty) raise nfc.clf.UnsupportedTargetError(message) self.chipset.in_set_rf(target.brty) self.chipset.in_set_protocol(self.chipset.in_set_protocol_defaults) self.chipset.in_set_protocol(initial_guard_time=24) sensf_req = (target.sensf_req if target.sensf_req else bytearray.fromhex("00FFFF0100")) log.debug("send SENSF_REQ " + hexlify(sensf_req)) try: frame = chr(len(sensf_req)+1) + sensf_req frame = self.chipset.in_comm_rf(frame, 10) except CommunicationError as error: if error != "RECEIVE_TIMEOUT_ERROR": log.debug(error) return None if len(frame) >= 18 and frame[0] == len(frame) and frame[1] == 1: log.debug("rcvd SENSF_RES " + hexlify(frame[1:])) return nfc.clf.RemoteTarget(target.brty, sensf_res=frame[1:])
python
def sense_ttf(self, target): """Sense for a Type F Target is supported for 212 and 424 kbps. """ log.debug("polling for NFC-F technology") if target.brty not in ("212F", "424F"): message = "unsupported bitrate {0}".format(target.brty) raise nfc.clf.UnsupportedTargetError(message) self.chipset.in_set_rf(target.brty) self.chipset.in_set_protocol(self.chipset.in_set_protocol_defaults) self.chipset.in_set_protocol(initial_guard_time=24) sensf_req = (target.sensf_req if target.sensf_req else bytearray.fromhex("00FFFF0100")) log.debug("send SENSF_REQ " + hexlify(sensf_req)) try: frame = chr(len(sensf_req)+1) + sensf_req frame = self.chipset.in_comm_rf(frame, 10) except CommunicationError as error: if error != "RECEIVE_TIMEOUT_ERROR": log.debug(error) return None if len(frame) >= 18 and frame[0] == len(frame) and frame[1] == 1: log.debug("rcvd SENSF_RES " + hexlify(frame[1:])) return nfc.clf.RemoteTarget(target.brty, sensf_res=frame[1:])
[ "def", "sense_ttf", "(", "self", ",", "target", ")", ":", "log", ".", "debug", "(", "\"polling for NFC-F technology\"", ")", "if", "target", ".", "brty", "not", "in", "(", "\"212F\"", ",", "\"424F\"", ")", ":", "message", "=", "\"unsupported bitrate {0}\"", ".", "format", "(", "target", ".", "brty", ")", "raise", "nfc", ".", "clf", ".", "UnsupportedTargetError", "(", "message", ")", "self", ".", "chipset", ".", "in_set_rf", "(", "target", ".", "brty", ")", "self", ".", "chipset", ".", "in_set_protocol", "(", "self", ".", "chipset", ".", "in_set_protocol_defaults", ")", "self", ".", "chipset", ".", "in_set_protocol", "(", "initial_guard_time", "=", "24", ")", "sensf_req", "=", "(", "target", ".", "sensf_req", "if", "target", ".", "sensf_req", "else", "bytearray", ".", "fromhex", "(", "\"00FFFF0100\"", ")", ")", "log", ".", "debug", "(", "\"send SENSF_REQ \"", "+", "hexlify", "(", "sensf_req", ")", ")", "try", ":", "frame", "=", "chr", "(", "len", "(", "sensf_req", ")", "+", "1", ")", "+", "sensf_req", "frame", "=", "self", ".", "chipset", ".", "in_comm_rf", "(", "frame", ",", "10", ")", "except", "CommunicationError", "as", "error", ":", "if", "error", "!=", "\"RECEIVE_TIMEOUT_ERROR\"", ":", "log", ".", "debug", "(", "error", ")", "return", "None", "if", "len", "(", "frame", ")", ">=", "18", "and", "frame", "[", "0", "]", "==", "len", "(", "frame", ")", "and", "frame", "[", "1", "]", "==", "1", ":", "log", ".", "debug", "(", "\"rcvd SENSF_RES \"", "+", "hexlify", "(", "frame", "[", "1", ":", "]", ")", ")", "return", "nfc", ".", "clf", ".", "RemoteTarget", "(", "target", ".", "brty", ",", "sensf_res", "=", "frame", "[", "1", ":", "]", ")" ]
Sense for a Type F Target is supported for 212 and 424 kbps.
[ "Sense", "for", "a", "Type", "F", "Target", "is", "supported", "for", "212", "and", "424", "kbps", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/clf/rcs380.py#L484-L512
227,412
nfcpy/nfcpy
src/nfc/clf/rcs380.py
Device.listen_ttf
def listen_ttf(self, target, timeout): """Listen as Type F Target is supported for either 212 or 424 kbps.""" if target.brty not in ('212F', '424F'): info = "unsupported target bitrate: %r" % target.brty raise nfc.clf.UnsupportedTargetError(info) if target.sensf_res is None: raise ValueError("sensf_res is required") if len(target.sensf_res) != 19: raise ValueError("sensf_res must be 19 byte") self.chipset.tg_set_rf(target.brty) self.chipset.tg_set_protocol(self.chipset.tg_set_protocol_defaults) self.chipset.tg_set_protocol(rf_off_error=False) recv_timeout = min(int(1000 * timeout), 0xFFFF) time_to_return = time.time() + timeout transmit_data = sensf_req = sensf_res = None while recv_timeout > 0: if transmit_data: log.debug("%s send %s", target.brty, hexlify(transmit_data)) log.debug("%s wait recv %d ms", target.brty, recv_timeout) try: data = self.chipset.tg_comm_rf(recv_timeout=recv_timeout, transmit_data=transmit_data) except CommunicationError as error: log.debug(error) continue finally: recv_timeout = int((time_to_return - time.time()) * 1E3) transmit_data = None assert target.brty == ('106A', '212F', '424F')[data[0]-11] log.debug("%s rcvd %s", target.brty, hexlify(buffer(data, 7))) if len(data) > 7 and len(data)-7 == data[7]: if sensf_req and data[9:17] == target.sensf_res[1:9]: self.chipset.tg_set_protocol(rf_off_error=True) target = nfc.clf.LocalTarget(target.brty) target.sensf_req = sensf_req target.sensf_res = sensf_res target.tt3_cmd = data[8:] return target if len(data) == 13 and data[7] == 6 and data[8] == 0: (sensf_req, sensf_res) = (data[8:], target.sensf_res[:]) if (((sensf_req[1] == 255 or sensf_req[1] == sensf_res[17]) and (sensf_req[2] == 255 or sensf_req[2] == sensf_res[18]))): transmit_data = sensf_res[0:17] if sensf_req[3] == 1: transmit_data += sensf_res[17:19] if sensf_req[3] == 2: transmit_data += b"\x00" transmit_data += chr(1 << (target.brty == "424F")) transmit_data = chr(len(transmit_data)+1) + transmit_data
python
def listen_ttf(self, target, timeout): """Listen as Type F Target is supported for either 212 or 424 kbps.""" if target.brty not in ('212F', '424F'): info = "unsupported target bitrate: %r" % target.brty raise nfc.clf.UnsupportedTargetError(info) if target.sensf_res is None: raise ValueError("sensf_res is required") if len(target.sensf_res) != 19: raise ValueError("sensf_res must be 19 byte") self.chipset.tg_set_rf(target.brty) self.chipset.tg_set_protocol(self.chipset.tg_set_protocol_defaults) self.chipset.tg_set_protocol(rf_off_error=False) recv_timeout = min(int(1000 * timeout), 0xFFFF) time_to_return = time.time() + timeout transmit_data = sensf_req = sensf_res = None while recv_timeout > 0: if transmit_data: log.debug("%s send %s", target.brty, hexlify(transmit_data)) log.debug("%s wait recv %d ms", target.brty, recv_timeout) try: data = self.chipset.tg_comm_rf(recv_timeout=recv_timeout, transmit_data=transmit_data) except CommunicationError as error: log.debug(error) continue finally: recv_timeout = int((time_to_return - time.time()) * 1E3) transmit_data = None assert target.brty == ('106A', '212F', '424F')[data[0]-11] log.debug("%s rcvd %s", target.brty, hexlify(buffer(data, 7))) if len(data) > 7 and len(data)-7 == data[7]: if sensf_req and data[9:17] == target.sensf_res[1:9]: self.chipset.tg_set_protocol(rf_off_error=True) target = nfc.clf.LocalTarget(target.brty) target.sensf_req = sensf_req target.sensf_res = sensf_res target.tt3_cmd = data[8:] return target if len(data) == 13 and data[7] == 6 and data[8] == 0: (sensf_req, sensf_res) = (data[8:], target.sensf_res[:]) if (((sensf_req[1] == 255 or sensf_req[1] == sensf_res[17]) and (sensf_req[2] == 255 or sensf_req[2] == sensf_res[18]))): transmit_data = sensf_res[0:17] if sensf_req[3] == 1: transmit_data += sensf_res[17:19] if sensf_req[3] == 2: transmit_data += b"\x00" transmit_data += chr(1 << (target.brty == "424F")) transmit_data = chr(len(transmit_data)+1) + transmit_data
[ "def", "listen_ttf", "(", "self", ",", "target", ",", "timeout", ")", ":", "if", "target", ".", "brty", "not", "in", "(", "'212F'", ",", "'424F'", ")", ":", "info", "=", "\"unsupported target bitrate: %r\"", "%", "target", ".", "brty", "raise", "nfc", ".", "clf", ".", "UnsupportedTargetError", "(", "info", ")", "if", "target", ".", "sensf_res", "is", "None", ":", "raise", "ValueError", "(", "\"sensf_res is required\"", ")", "if", "len", "(", "target", ".", "sensf_res", ")", "!=", "19", ":", "raise", "ValueError", "(", "\"sensf_res must be 19 byte\"", ")", "self", ".", "chipset", ".", "tg_set_rf", "(", "target", ".", "brty", ")", "self", ".", "chipset", ".", "tg_set_protocol", "(", "self", ".", "chipset", ".", "tg_set_protocol_defaults", ")", "self", ".", "chipset", ".", "tg_set_protocol", "(", "rf_off_error", "=", "False", ")", "recv_timeout", "=", "min", "(", "int", "(", "1000", "*", "timeout", ")", ",", "0xFFFF", ")", "time_to_return", "=", "time", ".", "time", "(", ")", "+", "timeout", "transmit_data", "=", "sensf_req", "=", "sensf_res", "=", "None", "while", "recv_timeout", ">", "0", ":", "if", "transmit_data", ":", "log", ".", "debug", "(", "\"%s send %s\"", ",", "target", ".", "brty", ",", "hexlify", "(", "transmit_data", ")", ")", "log", ".", "debug", "(", "\"%s wait recv %d ms\"", ",", "target", ".", "brty", ",", "recv_timeout", ")", "try", ":", "data", "=", "self", ".", "chipset", ".", "tg_comm_rf", "(", "recv_timeout", "=", "recv_timeout", ",", "transmit_data", "=", "transmit_data", ")", "except", "CommunicationError", "as", "error", ":", "log", ".", "debug", "(", "error", ")", "continue", "finally", ":", "recv_timeout", "=", "int", "(", "(", "time_to_return", "-", "time", ".", "time", "(", ")", ")", "*", "1E3", ")", "transmit_data", "=", "None", "assert", "target", ".", "brty", "==", "(", "'106A'", ",", "'212F'", ",", "'424F'", ")", "[", "data", "[", "0", "]", "-", "11", "]", "log", ".", "debug", "(", "\"%s rcvd %s\"", ",", "target", ".", "brty", ",", "hexlify", "(", "buffer", "(", "data", ",", "7", ")", ")", ")", "if", "len", "(", "data", ")", ">", "7", "and", "len", "(", "data", ")", "-", "7", "==", "data", "[", "7", "]", ":", "if", "sensf_req", "and", "data", "[", "9", ":", "17", "]", "==", "target", ".", "sensf_res", "[", "1", ":", "9", "]", ":", "self", ".", "chipset", ".", "tg_set_protocol", "(", "rf_off_error", "=", "True", ")", "target", "=", "nfc", ".", "clf", ".", "LocalTarget", "(", "target", ".", "brty", ")", "target", ".", "sensf_req", "=", "sensf_req", "target", ".", "sensf_res", "=", "sensf_res", "target", ".", "tt3_cmd", "=", "data", "[", "8", ":", "]", "return", "target", "if", "len", "(", "data", ")", "==", "13", "and", "data", "[", "7", "]", "==", "6", "and", "data", "[", "8", "]", "==", "0", ":", "(", "sensf_req", ",", "sensf_res", ")", "=", "(", "data", "[", "8", ":", "]", ",", "target", ".", "sensf_res", "[", ":", "]", ")", "if", "(", "(", "(", "sensf_req", "[", "1", "]", "==", "255", "or", "sensf_req", "[", "1", "]", "==", "sensf_res", "[", "17", "]", ")", "and", "(", "sensf_req", "[", "2", "]", "==", "255", "or", "sensf_req", "[", "2", "]", "==", "sensf_res", "[", "18", "]", ")", ")", ")", ":", "transmit_data", "=", "sensf_res", "[", "0", ":", "17", "]", "if", "sensf_req", "[", "3", "]", "==", "1", ":", "transmit_data", "+=", "sensf_res", "[", "17", ":", "19", "]", "if", "sensf_req", "[", "3", "]", "==", "2", ":", "transmit_data", "+=", "b\"\\x00\"", "transmit_data", "+=", "chr", "(", "1", "<<", "(", "target", ".", "brty", "==", "\"424F\"", ")", ")", "transmit_data", "=", "chr", "(", "len", "(", "transmit_data", ")", "+", "1", ")", "+", "transmit_data" ]
Listen as Type F Target is supported for either 212 or 424 kbps.
[ "Listen", "as", "Type", "F", "Target", "is", "supported", "for", "either", "212", "or", "424", "kbps", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/clf/rcs380.py#L671-L726
227,413
nfcpy/nfcpy
src/nfc/tag/tt3_sony.py
FelicaStandard.request_response
def request_response(self): """Verify that a card is still present and get its operating mode. The Request Response command returns the current operating state of the card. The operating state changes with the authentication process, a card is in Mode 0 after power-up or a Polling command, transitions to Mode 1 with Authentication1, to Mode 2 with Authentication2, and Mode 3 with any of the card issuance commands. The :meth:`request_response` method returns the mode as an integer. Command execution errors raise :exc:`~nfc.tag.TagCommandError`. """ a, b, e = self.pmm[3] & 7, self.pmm[3] >> 3 & 7, self.pmm[3] >> 6 timeout = 302E-6 * (b + 1 + a + 1) * 4**e data = self.send_cmd_recv_rsp(0x04, '', timeout, check_status=False) if len(data) != 1: log.debug("insufficient data received from tag") raise tt3.Type3TagCommandError(tt3.DATA_SIZE_ERROR) return data[0]
python
def request_response(self): """Verify that a card is still present and get its operating mode. The Request Response command returns the current operating state of the card. The operating state changes with the authentication process, a card is in Mode 0 after power-up or a Polling command, transitions to Mode 1 with Authentication1, to Mode 2 with Authentication2, and Mode 3 with any of the card issuance commands. The :meth:`request_response` method returns the mode as an integer. Command execution errors raise :exc:`~nfc.tag.TagCommandError`. """ a, b, e = self.pmm[3] & 7, self.pmm[3] >> 3 & 7, self.pmm[3] >> 6 timeout = 302E-6 * (b + 1 + a + 1) * 4**e data = self.send_cmd_recv_rsp(0x04, '', timeout, check_status=False) if len(data) != 1: log.debug("insufficient data received from tag") raise tt3.Type3TagCommandError(tt3.DATA_SIZE_ERROR) return data[0]
[ "def", "request_response", "(", "self", ")", ":", "a", ",", "b", ",", "e", "=", "self", ".", "pmm", "[", "3", "]", "&", "7", ",", "self", ".", "pmm", "[", "3", "]", ">>", "3", "&", "7", ",", "self", ".", "pmm", "[", "3", "]", ">>", "6", "timeout", "=", "302E-6", "*", "(", "b", "+", "1", "+", "a", "+", "1", ")", "*", "4", "**", "e", "data", "=", "self", ".", "send_cmd_recv_rsp", "(", "0x04", ",", "''", ",", "timeout", ",", "check_status", "=", "False", ")", "if", "len", "(", "data", ")", "!=", "1", ":", "log", ".", "debug", "(", "\"insufficient data received from tag\"", ")", "raise", "tt3", ".", "Type3TagCommandError", "(", "tt3", ".", "DATA_SIZE_ERROR", ")", "return", "data", "[", "0", "]" ]
Verify that a card is still present and get its operating mode. The Request Response command returns the current operating state of the card. The operating state changes with the authentication process, a card is in Mode 0 after power-up or a Polling command, transitions to Mode 1 with Authentication1, to Mode 2 with Authentication2, and Mode 3 with any of the card issuance commands. The :meth:`request_response` method returns the mode as an integer. Command execution errors raise :exc:`~nfc.tag.TagCommandError`.
[ "Verify", "that", "a", "card", "is", "still", "present", "and", "get", "its", "operating", "mode", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3_sony.py#L258-L279
227,414
nfcpy/nfcpy
src/nfc/tag/tt3_sony.py
FelicaStandard.search_service_code
def search_service_code(self, service_index): """Search for a service code that corresponds to an index. The Search Service Code command provides access to the iterable list of services and areas within the activated system. The *service_index* argument may be any value from 0 to 0xffff. As long as there is a service or area found for a given *service_index*, the information returned is a tuple with either one or two 16-bit integer elements. Two integers are returned for an area definition, the first is the area code and the second is the largest possible service index for the area. One integer, the service code, is returned for a service definition. The return value is :const:`None` if the *service_index* was not found. For example, to print all services and areas of the active system: :: for i in xrange(0x10000): area_or_service = tag.search_service_code(i) if area_or_service is None: break elif len(area_or_service) == 1: sc = area_or_service[0] print(nfc.tag.tt3.ServiceCode(sc >> 6, sc & 0x3f)) elif len(area_or_service) == 2: area_code, area_last = area_or_service print("Area {0:04x}--{0:04x}".format(area_code, area_last)) Command execution errors raise :exc:`~nfc.tag.TagCommandError`. """ log.debug("search service code index {0}".format(service_index)) # The maximum response time is given by the value of PMM[3]. # Some cards (like RC-S860 with IC RC-S915) encode a value # that is too short, thus we use at lest 2 ms. a, e = self.pmm[3] & 7, self.pmm[3] >> 6 timeout = max(302E-6 * (a + 1) * 4**e, 0.002) data = pack("<H", service_index) data = self.send_cmd_recv_rsp(0x0A, data, timeout, check_status=False) if data != "\xFF\xFF": unpack_format = "<H" if len(data) == 2 else "<HH" return unpack(unpack_format, data)
python
def search_service_code(self, service_index): """Search for a service code that corresponds to an index. The Search Service Code command provides access to the iterable list of services and areas within the activated system. The *service_index* argument may be any value from 0 to 0xffff. As long as there is a service or area found for a given *service_index*, the information returned is a tuple with either one or two 16-bit integer elements. Two integers are returned for an area definition, the first is the area code and the second is the largest possible service index for the area. One integer, the service code, is returned for a service definition. The return value is :const:`None` if the *service_index* was not found. For example, to print all services and areas of the active system: :: for i in xrange(0x10000): area_or_service = tag.search_service_code(i) if area_or_service is None: break elif len(area_or_service) == 1: sc = area_or_service[0] print(nfc.tag.tt3.ServiceCode(sc >> 6, sc & 0x3f)) elif len(area_or_service) == 2: area_code, area_last = area_or_service print("Area {0:04x}--{0:04x}".format(area_code, area_last)) Command execution errors raise :exc:`~nfc.tag.TagCommandError`. """ log.debug("search service code index {0}".format(service_index)) # The maximum response time is given by the value of PMM[3]. # Some cards (like RC-S860 with IC RC-S915) encode a value # that is too short, thus we use at lest 2 ms. a, e = self.pmm[3] & 7, self.pmm[3] >> 6 timeout = max(302E-6 * (a + 1) * 4**e, 0.002) data = pack("<H", service_index) data = self.send_cmd_recv_rsp(0x0A, data, timeout, check_status=False) if data != "\xFF\xFF": unpack_format = "<H" if len(data) == 2 else "<HH" return unpack(unpack_format, data)
[ "def", "search_service_code", "(", "self", ",", "service_index", ")", ":", "log", ".", "debug", "(", "\"search service code index {0}\"", ".", "format", "(", "service_index", ")", ")", "# The maximum response time is given by the value of PMM[3].", "# Some cards (like RC-S860 with IC RC-S915) encode a value", "# that is too short, thus we use at lest 2 ms.", "a", ",", "e", "=", "self", ".", "pmm", "[", "3", "]", "&", "7", ",", "self", ".", "pmm", "[", "3", "]", ">>", "6", "timeout", "=", "max", "(", "302E-6", "*", "(", "a", "+", "1", ")", "*", "4", "**", "e", ",", "0.002", ")", "data", "=", "pack", "(", "\"<H\"", ",", "service_index", ")", "data", "=", "self", ".", "send_cmd_recv_rsp", "(", "0x0A", ",", "data", ",", "timeout", ",", "check_status", "=", "False", ")", "if", "data", "!=", "\"\\xFF\\xFF\"", ":", "unpack_format", "=", "\"<H\"", "if", "len", "(", "data", ")", "==", "2", "else", "\"<HH\"", "return", "unpack", "(", "unpack_format", ",", "data", ")" ]
Search for a service code that corresponds to an index. The Search Service Code command provides access to the iterable list of services and areas within the activated system. The *service_index* argument may be any value from 0 to 0xffff. As long as there is a service or area found for a given *service_index*, the information returned is a tuple with either one or two 16-bit integer elements. Two integers are returned for an area definition, the first is the area code and the second is the largest possible service index for the area. One integer, the service code, is returned for a service definition. The return value is :const:`None` if the *service_index* was not found. For example, to print all services and areas of the active system: :: for i in xrange(0x10000): area_or_service = tag.search_service_code(i) if area_or_service is None: break elif len(area_or_service) == 1: sc = area_or_service[0] print(nfc.tag.tt3.ServiceCode(sc >> 6, sc & 0x3f)) elif len(area_or_service) == 2: area_code, area_last = area_or_service print("Area {0:04x}--{0:04x}".format(area_code, area_last)) Command execution errors raise :exc:`~nfc.tag.TagCommandError`.
[ "Search", "for", "a", "service", "code", "that", "corresponds", "to", "an", "index", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3_sony.py#L281-L323
227,415
nfcpy/nfcpy
src/nfc/tag/tt3_sony.py
FelicaStandard.request_system_code
def request_system_code(self): """Return all system codes that are registered in the card. A card has one or more system codes that correspond to logical partitions (systems). Each system has a system code that could be used in a polling command to activate that system. The system codes responded by the card are returned as a list of 16-bit integers. :: for system_code in tag.request_system_code(): print("System {0:04X}".format(system_code)) Command execution errors raise :exc:`~nfc.tag.TagCommandError`. """ log.debug("request system code list") a, e = self.pmm[3] & 7, self.pmm[3] >> 6 timeout = max(302E-6 * (a + 1) * 4**e, 0.002) data = self.send_cmd_recv_rsp(0x0C, '', timeout, check_status=False) if len(data) != 1 + data[0] * 2: log.debug("insufficient data received from tag") raise tt3.Type3TagCommandError(tt3.DATA_SIZE_ERROR) return [unpack(">H", data[i:i+2])[0] for i in range(1, len(data), 2)]
python
def request_system_code(self): """Return all system codes that are registered in the card. A card has one or more system codes that correspond to logical partitions (systems). Each system has a system code that could be used in a polling command to activate that system. The system codes responded by the card are returned as a list of 16-bit integers. :: for system_code in tag.request_system_code(): print("System {0:04X}".format(system_code)) Command execution errors raise :exc:`~nfc.tag.TagCommandError`. """ log.debug("request system code list") a, e = self.pmm[3] & 7, self.pmm[3] >> 6 timeout = max(302E-6 * (a + 1) * 4**e, 0.002) data = self.send_cmd_recv_rsp(0x0C, '', timeout, check_status=False) if len(data) != 1 + data[0] * 2: log.debug("insufficient data received from tag") raise tt3.Type3TagCommandError(tt3.DATA_SIZE_ERROR) return [unpack(">H", data[i:i+2])[0] for i in range(1, len(data), 2)]
[ "def", "request_system_code", "(", "self", ")", ":", "log", ".", "debug", "(", "\"request system code list\"", ")", "a", ",", "e", "=", "self", ".", "pmm", "[", "3", "]", "&", "7", ",", "self", ".", "pmm", "[", "3", "]", ">>", "6", "timeout", "=", "max", "(", "302E-6", "*", "(", "a", "+", "1", ")", "*", "4", "**", "e", ",", "0.002", ")", "data", "=", "self", ".", "send_cmd_recv_rsp", "(", "0x0C", ",", "''", ",", "timeout", ",", "check_status", "=", "False", ")", "if", "len", "(", "data", ")", "!=", "1", "+", "data", "[", "0", "]", "*", "2", ":", "log", ".", "debug", "(", "\"insufficient data received from tag\"", ")", "raise", "tt3", ".", "Type3TagCommandError", "(", "tt3", ".", "DATA_SIZE_ERROR", ")", "return", "[", "unpack", "(", "\">H\"", ",", "data", "[", "i", ":", "i", "+", "2", "]", ")", "[", "0", "]", "for", "i", "in", "range", "(", "1", ",", "len", "(", "data", ")", ",", "2", ")", "]" ]
Return all system codes that are registered in the card. A card has one or more system codes that correspond to logical partitions (systems). Each system has a system code that could be used in a polling command to activate that system. The system codes responded by the card are returned as a list of 16-bit integers. :: for system_code in tag.request_system_code(): print("System {0:04X}".format(system_code)) Command execution errors raise :exc:`~nfc.tag.TagCommandError`.
[ "Return", "all", "system", "codes", "that", "are", "registered", "in", "the", "card", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3_sony.py#L325-L347
227,416
nfcpy/nfcpy
src/nfc/tag/tt3_sony.py
FelicaLite.protect
def protect(self, password=None, read_protect=False, protect_from=0): """Protect a FeliCa Lite Tag. A FeliCa Lite Tag can be provisioned with a custom password (or the default manufacturer key if the password is an empty string or bytearray) to ensure that data retrieved by future read operations, after authentication, is genuine. Read protection is not supported. A non-empty *password* must provide at least 128 bit key material, in other words it must be a string or bytearray of length 16 or more. The memory unit for the value of *protect_from* is 16 byte, thus with ``protect_from=2`` bytes 0 to 31 are not protected. If *protect_from* is zero (the default value) and the Tag has valid NDEF management data, the NDEF RW Flag is set to read only. """ return super(FelicaLite, self).protect( password, read_protect, protect_from)
python
def protect(self, password=None, read_protect=False, protect_from=0): """Protect a FeliCa Lite Tag. A FeliCa Lite Tag can be provisioned with a custom password (or the default manufacturer key if the password is an empty string or bytearray) to ensure that data retrieved by future read operations, after authentication, is genuine. Read protection is not supported. A non-empty *password* must provide at least 128 bit key material, in other words it must be a string or bytearray of length 16 or more. The memory unit for the value of *protect_from* is 16 byte, thus with ``protect_from=2`` bytes 0 to 31 are not protected. If *protect_from* is zero (the default value) and the Tag has valid NDEF management data, the NDEF RW Flag is set to read only. """ return super(FelicaLite, self).protect( password, read_protect, protect_from)
[ "def", "protect", "(", "self", ",", "password", "=", "None", ",", "read_protect", "=", "False", ",", "protect_from", "=", "0", ")", ":", "return", "super", "(", "FelicaLite", ",", "self", ")", ".", "protect", "(", "password", ",", "read_protect", ",", "protect_from", ")" ]
Protect a FeliCa Lite Tag. A FeliCa Lite Tag can be provisioned with a custom password (or the default manufacturer key if the password is an empty string or bytearray) to ensure that data retrieved by future read operations, after authentication, is genuine. Read protection is not supported. A non-empty *password* must provide at least 128 bit key material, in other words it must be a string or bytearray of length 16 or more. The memory unit for the value of *protect_from* is 16 byte, thus with ``protect_from=2`` bytes 0 to 31 are not protected. If *protect_from* is zero (the default value) and the Tag has valid NDEF management data, the NDEF RW Flag is set to read only.
[ "Protect", "a", "FeliCa", "Lite", "Tag", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3_sony.py#L492-L513
227,417
nfcpy/nfcpy
src/nfc/tag/tt3_sony.py
FelicaLite.format
def format(self, version=0x10, wipe=None): """Format a FeliCa Lite Tag for NDEF. """ return super(FelicaLite, self).format(version, wipe)
python
def format(self, version=0x10, wipe=None): """Format a FeliCa Lite Tag for NDEF. """ return super(FelicaLite, self).format(version, wipe)
[ "def", "format", "(", "self", ",", "version", "=", "0x10", ",", "wipe", "=", "None", ")", ":", "return", "super", "(", "FelicaLite", ",", "self", ")", ".", "format", "(", "version", ",", "wipe", ")" ]
Format a FeliCa Lite Tag for NDEF.
[ "Format", "a", "FeliCa", "Lite", "Tag", "for", "NDEF", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3_sony.py#L626-L630
227,418
nfcpy/nfcpy
src/nfc/tag/tt3_sony.py
FelicaLite.read_without_mac
def read_without_mac(self, *blocks): """Read a number of data blocks without integrity check. This method accepts a variable number of integer arguments as the block numbers to read. The blocks are read with service code 0x000B (NDEF). Tag command errors raise :exc:`~nfc.tag.TagCommandError`. """ log.debug("read {0} block(s) without mac".format(len(blocks))) service_list = [tt3.ServiceCode(0, 0b001011)] block_list = [tt3.BlockCode(n) for n in blocks] return self.read_without_encryption(service_list, block_list)
python
def read_without_mac(self, *blocks): """Read a number of data blocks without integrity check. This method accepts a variable number of integer arguments as the block numbers to read. The blocks are read with service code 0x000B (NDEF). Tag command errors raise :exc:`~nfc.tag.TagCommandError`. """ log.debug("read {0} block(s) without mac".format(len(blocks))) service_list = [tt3.ServiceCode(0, 0b001011)] block_list = [tt3.BlockCode(n) for n in blocks] return self.read_without_encryption(service_list, block_list)
[ "def", "read_without_mac", "(", "self", ",", "*", "blocks", ")", ":", "log", ".", "debug", "(", "\"read {0} block(s) without mac\"", ".", "format", "(", "len", "(", "blocks", ")", ")", ")", "service_list", "=", "[", "tt3", ".", "ServiceCode", "(", "0", ",", "0b001011", ")", "]", "block_list", "=", "[", "tt3", ".", "BlockCode", "(", "n", ")", "for", "n", "in", "blocks", "]", "return", "self", ".", "read_without_encryption", "(", "service_list", ",", "block_list", ")" ]
Read a number of data blocks without integrity check. This method accepts a variable number of integer arguments as the block numbers to read. The blocks are read with service code 0x000B (NDEF). Tag command errors raise :exc:`~nfc.tag.TagCommandError`.
[ "Read", "a", "number", "of", "data", "blocks", "without", "integrity", "check", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3_sony.py#L680-L693
227,419
nfcpy/nfcpy
src/nfc/tag/tt3_sony.py
FelicaLite.read_with_mac
def read_with_mac(self, *blocks): """Read a number of data blocks with integrity check. This method accepts a variable number of integer arguments as the block numbers to read. The blocks are read with service code 0x000B (NDEF). Along with the requested block data the tag returns a message authentication code that is verified before data is returned. If verification fails the return value of :meth:`read_with_mac` is None. A :exc:`RuntimeError` exception is raised if the tag was not authenticated before calling this method. Tag command errors raise :exc:`~nfc.tag.TagCommandError`. """ log.debug("read {0} block(s) with mac".format(len(blocks))) if self._sk is None or self._iv is None: raise RuntimeError("authentication required") service_list = [tt3.ServiceCode(0, 0b001011)] block_list = [tt3.BlockCode(n) for n in blocks] block_list.append(tt3.BlockCode(0x81)) data = self.read_without_encryption(service_list, block_list) data, mac = data[0:-16], data[-16:-8] if mac != self.generate_mac(data, self._sk, self._iv): log.warning("mac verification failed") else: return data
python
def read_with_mac(self, *blocks): """Read a number of data blocks with integrity check. This method accepts a variable number of integer arguments as the block numbers to read. The blocks are read with service code 0x000B (NDEF). Along with the requested block data the tag returns a message authentication code that is verified before data is returned. If verification fails the return value of :meth:`read_with_mac` is None. A :exc:`RuntimeError` exception is raised if the tag was not authenticated before calling this method. Tag command errors raise :exc:`~nfc.tag.TagCommandError`. """ log.debug("read {0} block(s) with mac".format(len(blocks))) if self._sk is None or self._iv is None: raise RuntimeError("authentication required") service_list = [tt3.ServiceCode(0, 0b001011)] block_list = [tt3.BlockCode(n) for n in blocks] block_list.append(tt3.BlockCode(0x81)) data = self.read_without_encryption(service_list, block_list) data, mac = data[0:-16], data[-16:-8] if mac != self.generate_mac(data, self._sk, self._iv): log.warning("mac verification failed") else: return data
[ "def", "read_with_mac", "(", "self", ",", "*", "blocks", ")", ":", "log", ".", "debug", "(", "\"read {0} block(s) with mac\"", ".", "format", "(", "len", "(", "blocks", ")", ")", ")", "if", "self", ".", "_sk", "is", "None", "or", "self", ".", "_iv", "is", "None", ":", "raise", "RuntimeError", "(", "\"authentication required\"", ")", "service_list", "=", "[", "tt3", ".", "ServiceCode", "(", "0", ",", "0b001011", ")", "]", "block_list", "=", "[", "tt3", ".", "BlockCode", "(", "n", ")", "for", "n", "in", "blocks", "]", "block_list", ".", "append", "(", "tt3", ".", "BlockCode", "(", "0x81", ")", ")", "data", "=", "self", ".", "read_without_encryption", "(", "service_list", ",", "block_list", ")", "data", ",", "mac", "=", "data", "[", "0", ":", "-", "16", "]", ",", "data", "[", "-", "16", ":", "-", "8", "]", "if", "mac", "!=", "self", ".", "generate_mac", "(", "data", ",", "self", ".", "_sk", ",", "self", ".", "_iv", ")", ":", "log", ".", "warning", "(", "\"mac verification failed\"", ")", "else", ":", "return", "data" ]
Read a number of data blocks with integrity check. This method accepts a variable number of integer arguments as the block numbers to read. The blocks are read with service code 0x000B (NDEF). Along with the requested block data the tag returns a message authentication code that is verified before data is returned. If verification fails the return value of :meth:`read_with_mac` is None. A :exc:`RuntimeError` exception is raised if the tag was not authenticated before calling this method. Tag command errors raise :exc:`~nfc.tag.TagCommandError`.
[ "Read", "a", "number", "of", "data", "blocks", "with", "integrity", "check", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3_sony.py#L695-L725
227,420
nfcpy/nfcpy
src/nfc/tag/tt3_sony.py
FelicaLite.write_without_mac
def write_without_mac(self, data, block): """Write a data block without integrity check. This is the standard write method for a FeliCa Lite. The 16-byte string or bytearray *data* is written to the numbered *block* in service 0x0009 (NDEF write service). :: data = bytearray(range(16)) # 0x00, 0x01, ... 0x0F try: tag.write_without_mac(data, 5) # write block 5 except nfc.tag.TagCommandError: print("something went wrong") Tag command errors raise :exc:`~nfc.tag.TagCommandError`. """ # Write a single data block without a mac. Write with mac is # only supported by FeliCa Lite-S. assert len(data) == 16 and type(block) is int log.debug("write 1 block without mac".format()) sc_list = [tt3.ServiceCode(0, 0b001001)] bc_list = [tt3.BlockCode(block)] self.write_without_encryption(sc_list, bc_list, data)
python
def write_without_mac(self, data, block): """Write a data block without integrity check. This is the standard write method for a FeliCa Lite. The 16-byte string or bytearray *data* is written to the numbered *block* in service 0x0009 (NDEF write service). :: data = bytearray(range(16)) # 0x00, 0x01, ... 0x0F try: tag.write_without_mac(data, 5) # write block 5 except nfc.tag.TagCommandError: print("something went wrong") Tag command errors raise :exc:`~nfc.tag.TagCommandError`. """ # Write a single data block without a mac. Write with mac is # only supported by FeliCa Lite-S. assert len(data) == 16 and type(block) is int log.debug("write 1 block without mac".format()) sc_list = [tt3.ServiceCode(0, 0b001001)] bc_list = [tt3.BlockCode(block)] self.write_without_encryption(sc_list, bc_list, data)
[ "def", "write_without_mac", "(", "self", ",", "data", ",", "block", ")", ":", "# Write a single data block without a mac. Write with mac is", "# only supported by FeliCa Lite-S.", "assert", "len", "(", "data", ")", "==", "16", "and", "type", "(", "block", ")", "is", "int", "log", ".", "debug", "(", "\"write 1 block without mac\"", ".", "format", "(", ")", ")", "sc_list", "=", "[", "tt3", ".", "ServiceCode", "(", "0", ",", "0b001001", ")", "]", "bc_list", "=", "[", "tt3", ".", "BlockCode", "(", "block", ")", "]", "self", ".", "write_without_encryption", "(", "sc_list", ",", "bc_list", ",", "data", ")" ]
Write a data block without integrity check. This is the standard write method for a FeliCa Lite. The 16-byte string or bytearray *data* is written to the numbered *block* in service 0x0009 (NDEF write service). :: data = bytearray(range(16)) # 0x00, 0x01, ... 0x0F try: tag.write_without_mac(data, 5) # write block 5 except nfc.tag.TagCommandError: print("something went wrong") Tag command errors raise :exc:`~nfc.tag.TagCommandError`.
[ "Write", "a", "data", "block", "without", "integrity", "check", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3_sony.py#L727-L748
227,421
nfcpy/nfcpy
src/nfc/tag/tt3_sony.py
FelicaLiteS.authenticate
def authenticate(self, password): """Mutually authenticate with a FeliCa Lite-S Tag. FeliCa Lite-S supports enhanced security functions, one of them is the mutual authentication performed by this method. The first part of mutual authentication is to authenticate the tag with :meth:`FelicaLite.authenticate`. If successful, the shared session key is used to generate the integrity check value for write operation to update a specific memory block. If that was successful then the tag is ensured that the reader has the correct card key. After successful authentication the :meth:`~FelicaLite.read_with_mac` and :meth:`write_with_mac` methods can be used to read and write data such that it can not be falsified on transmission. """ if super(FelicaLiteS, self).authenticate(password): # At this point we have achieved internal authentication, # i.e we know that the tag has the same card key as in # password. We now reset the authentication status and do # external authentication to assure the tag that we have # the right card key. self._authenticated = False self.read_from_ndef_service = self.read_without_mac self.write_to_ndef_service = self.write_without_mac # To authenticate to the tag we write a 01h into the # ext_auth byte of the state block (block 0x92). The other # bytes of the state block can be all set to zero. self.write_with_mac("\x01" + 15*"\0", 0x92) # Now read the state block and check the value of the # ext_auth to see if we are authenticated. If it's 01h # then we are, otherwise not. if self.read_with_mac(0x92)[0] == 0x01: log.debug("mutual authentication completed") self._authenticated = True self.read_from_ndef_service = self.read_with_mac self.write_to_ndef_service = self.write_with_mac else: log.debug("mutual authentication failed") return self._authenticated
python
def authenticate(self, password): """Mutually authenticate with a FeliCa Lite-S Tag. FeliCa Lite-S supports enhanced security functions, one of them is the mutual authentication performed by this method. The first part of mutual authentication is to authenticate the tag with :meth:`FelicaLite.authenticate`. If successful, the shared session key is used to generate the integrity check value for write operation to update a specific memory block. If that was successful then the tag is ensured that the reader has the correct card key. After successful authentication the :meth:`~FelicaLite.read_with_mac` and :meth:`write_with_mac` methods can be used to read and write data such that it can not be falsified on transmission. """ if super(FelicaLiteS, self).authenticate(password): # At this point we have achieved internal authentication, # i.e we know that the tag has the same card key as in # password. We now reset the authentication status and do # external authentication to assure the tag that we have # the right card key. self._authenticated = False self.read_from_ndef_service = self.read_without_mac self.write_to_ndef_service = self.write_without_mac # To authenticate to the tag we write a 01h into the # ext_auth byte of the state block (block 0x92). The other # bytes of the state block can be all set to zero. self.write_with_mac("\x01" + 15*"\0", 0x92) # Now read the state block and check the value of the # ext_auth to see if we are authenticated. If it's 01h # then we are, otherwise not. if self.read_with_mac(0x92)[0] == 0x01: log.debug("mutual authentication completed") self._authenticated = True self.read_from_ndef_service = self.read_with_mac self.write_to_ndef_service = self.write_with_mac else: log.debug("mutual authentication failed") return self._authenticated
[ "def", "authenticate", "(", "self", ",", "password", ")", ":", "if", "super", "(", "FelicaLiteS", ",", "self", ")", ".", "authenticate", "(", "password", ")", ":", "# At this point we have achieved internal authentication,", "# i.e we know that the tag has the same card key as in", "# password. We now reset the authentication status and do", "# external authentication to assure the tag that we have", "# the right card key.", "self", ".", "_authenticated", "=", "False", "self", ".", "read_from_ndef_service", "=", "self", ".", "read_without_mac", "self", ".", "write_to_ndef_service", "=", "self", ".", "write_without_mac", "# To authenticate to the tag we write a 01h into the", "# ext_auth byte of the state block (block 0x92). The other", "# bytes of the state block can be all set to zero.", "self", ".", "write_with_mac", "(", "\"\\x01\"", "+", "15", "*", "\"\\0\"", ",", "0x92", ")", "# Now read the state block and check the value of the", "# ext_auth to see if we are authenticated. If it's 01h", "# then we are, otherwise not.", "if", "self", ".", "read_with_mac", "(", "0x92", ")", "[", "0", "]", "==", "0x01", ":", "log", ".", "debug", "(", "\"mutual authentication completed\"", ")", "self", ".", "_authenticated", "=", "True", "self", ".", "read_from_ndef_service", "=", "self", ".", "read_with_mac", "self", ".", "write_to_ndef_service", "=", "self", ".", "write_with_mac", "else", ":", "log", ".", "debug", "(", "\"mutual authentication failed\"", ")", "return", "self", ".", "_authenticated" ]
Mutually authenticate with a FeliCa Lite-S Tag. FeliCa Lite-S supports enhanced security functions, one of them is the mutual authentication performed by this method. The first part of mutual authentication is to authenticate the tag with :meth:`FelicaLite.authenticate`. If successful, the shared session key is used to generate the integrity check value for write operation to update a specific memory block. If that was successful then the tag is ensured that the reader has the correct card key. After successful authentication the :meth:`~FelicaLite.read_with_mac` and :meth:`write_with_mac` methods can be used to read and write data such that it can not be falsified on transmission.
[ "Mutually", "authenticate", "with", "a", "FeliCa", "Lite", "-", "S", "Tag", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3_sony.py#L882-L926
227,422
nfcpy/nfcpy
src/nfc/tag/tt3_sony.py
FelicaLiteS.write_with_mac
def write_with_mac(self, data, block): """Write one data block with additional integrity check. If prior to calling this method the tag was not authenticated, a :exc:`RuntimeError` exception is raised. Command execution errors raise :exc:`~nfc.tag.TagCommandError`. """ # Write a single data block protected with a mac. The card # will only accept the write if it computed the same mac. log.debug("write 1 block with mac") if len(data) != 16: raise ValueError("data must be 16 octets") if type(block) is not int: raise ValueError("block number must be int") if self._sk is None or self._iv is None: raise RuntimeError("tag must be authenticated first") # The write count is the first three byte of the wcnt block. wcnt = str(self.read_without_mac(0x90)[0:3]) log.debug("write count is 0x{0}".format(wcnt[::-1].encode("hex"))) # We must generate the mac_a block to write the data. The data # to encrypt to the mac is composed of write count and block # numbers (8 byte) and the data we want to write. The mac for # write must be generated with the key flipped (sk2 || sk1). def flip(sk): return sk[8:16] + sk[0:8] data = wcnt + "\x00" + chr(block) + "\x00\x91\x00" + data maca = self.generate_mac(data, flip(self._sk), self._iv) + wcnt+5*"\0" # Now we can write the data block with our computed mac to the # desired block and the maca block. Write without encryption # means that the data is not encrypted with a service key. sc_list = [tt3.ServiceCode(0, 0b001001)] bc_list = [tt3.BlockCode(block), tt3.BlockCode(0x91)] self.write_without_encryption(sc_list, bc_list, data[8:24] + maca)
python
def write_with_mac(self, data, block): """Write one data block with additional integrity check. If prior to calling this method the tag was not authenticated, a :exc:`RuntimeError` exception is raised. Command execution errors raise :exc:`~nfc.tag.TagCommandError`. """ # Write a single data block protected with a mac. The card # will only accept the write if it computed the same mac. log.debug("write 1 block with mac") if len(data) != 16: raise ValueError("data must be 16 octets") if type(block) is not int: raise ValueError("block number must be int") if self._sk is None or self._iv is None: raise RuntimeError("tag must be authenticated first") # The write count is the first three byte of the wcnt block. wcnt = str(self.read_without_mac(0x90)[0:3]) log.debug("write count is 0x{0}".format(wcnt[::-1].encode("hex"))) # We must generate the mac_a block to write the data. The data # to encrypt to the mac is composed of write count and block # numbers (8 byte) and the data we want to write. The mac for # write must be generated with the key flipped (sk2 || sk1). def flip(sk): return sk[8:16] + sk[0:8] data = wcnt + "\x00" + chr(block) + "\x00\x91\x00" + data maca = self.generate_mac(data, flip(self._sk), self._iv) + wcnt+5*"\0" # Now we can write the data block with our computed mac to the # desired block and the maca block. Write without encryption # means that the data is not encrypted with a service key. sc_list = [tt3.ServiceCode(0, 0b001001)] bc_list = [tt3.BlockCode(block), tt3.BlockCode(0x91)] self.write_without_encryption(sc_list, bc_list, data[8:24] + maca)
[ "def", "write_with_mac", "(", "self", ",", "data", ",", "block", ")", ":", "# Write a single data block protected with a mac. The card", "# will only accept the write if it computed the same mac.", "log", ".", "debug", "(", "\"write 1 block with mac\"", ")", "if", "len", "(", "data", ")", "!=", "16", ":", "raise", "ValueError", "(", "\"data must be 16 octets\"", ")", "if", "type", "(", "block", ")", "is", "not", "int", ":", "raise", "ValueError", "(", "\"block number must be int\"", ")", "if", "self", ".", "_sk", "is", "None", "or", "self", ".", "_iv", "is", "None", ":", "raise", "RuntimeError", "(", "\"tag must be authenticated first\"", ")", "# The write count is the first three byte of the wcnt block.", "wcnt", "=", "str", "(", "self", ".", "read_without_mac", "(", "0x90", ")", "[", "0", ":", "3", "]", ")", "log", ".", "debug", "(", "\"write count is 0x{0}\"", ".", "format", "(", "wcnt", "[", ":", ":", "-", "1", "]", ".", "encode", "(", "\"hex\"", ")", ")", ")", "# We must generate the mac_a block to write the data. The data", "# to encrypt to the mac is composed of write count and block", "# numbers (8 byte) and the data we want to write. The mac for", "# write must be generated with the key flipped (sk2 || sk1).", "def", "flip", "(", "sk", ")", ":", "return", "sk", "[", "8", ":", "16", "]", "+", "sk", "[", "0", ":", "8", "]", "data", "=", "wcnt", "+", "\"\\x00\"", "+", "chr", "(", "block", ")", "+", "\"\\x00\\x91\\x00\"", "+", "data", "maca", "=", "self", ".", "generate_mac", "(", "data", ",", "flip", "(", "self", ".", "_sk", ")", ",", "self", ".", "_iv", ")", "+", "wcnt", "+", "5", "*", "\"\\0\"", "# Now we can write the data block with our computed mac to the", "# desired block and the maca block. Write without encryption", "# means that the data is not encrypted with a service key.", "sc_list", "=", "[", "tt3", ".", "ServiceCode", "(", "0", ",", "0b001001", ")", "]", "bc_list", "=", "[", "tt3", ".", "BlockCode", "(", "block", ")", ",", "tt3", ".", "BlockCode", "(", "0x91", ")", "]", "self", ".", "write_without_encryption", "(", "sc_list", ",", "bc_list", ",", "data", "[", "8", ":", "24", "]", "+", "maca", ")" ]
Write one data block with additional integrity check. If prior to calling this method the tag was not authenticated, a :exc:`RuntimeError` exception is raised. Command execution errors raise :exc:`~nfc.tag.TagCommandError`.
[ "Write", "one", "data", "block", "with", "additional", "integrity", "check", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3_sony.py#L928-L966
227,423
nfcpy/nfcpy
src/nfc/ndef/record.py
Record._read
def _read(self, f): """Parse an NDEF record from a file-like object.""" try: self.header = ord(f.read(1)) except TypeError: log.debug("buffer underflow at offset {0}".format(f.tell())) raise LengthError("insufficient data to parse") mbf = bool(self.header & 0x80) mef = bool(self.header & 0x40) cff = bool(self.header & 0x20) srf = bool(self.header & 0x10) ilf = bool(self.header & 0x08) tnf = self.header & 0x07 try: type_length = ord(f.read(1)) if srf: # short record data_length = ord(f.read(1)) else: # 32-bit length data_length = struct.unpack('>L', f.read(4))[0] if ilf: # id length present name_length = ord(f.read(1)) else: name_length = 0 except (TypeError, struct.error): log.debug("buffer underflow at offset {0}".format(f.tell())) raise LengthError("insufficient data to parse") try: record_type = f.read(type_length) assert len(record_type) == type_length record_name = f.read(name_length) assert len(record_name) == name_length record_data = f.read(data_length) assert len(record_data) == data_length except AssertionError: log.debug("buffer underflow at offset {0}".format(f.tell())) raise LengthError("insufficient data to parse") if tnf in (0, 5, 6) and len(record_type) > 0: s = "ndef type name format {0} doesn't allow a type string" raise FormatError( s.format(tnf) ) if tnf in (1, 2, 3, 4) and len(record_type) == 0: s = "ndef type name format {0} requires a type string" raise FormatError( s.format(tnf) ) if tnf == 0 and len(record_data) > 0: s = "ndef type name format {0} doesn't allow a payload" raise FormatError( s.format(tnf) ) self._message_begin, self._message_end = mbf, mef self._type = bytearray(type_name_prefix[tnf] + record_type) self._name = bytearray(record_name) self._data = bytearray(record_data) log.debug("parsed {0}".format(repr(self)))
python
def _read(self, f): """Parse an NDEF record from a file-like object.""" try: self.header = ord(f.read(1)) except TypeError: log.debug("buffer underflow at offset {0}".format(f.tell())) raise LengthError("insufficient data to parse") mbf = bool(self.header & 0x80) mef = bool(self.header & 0x40) cff = bool(self.header & 0x20) srf = bool(self.header & 0x10) ilf = bool(self.header & 0x08) tnf = self.header & 0x07 try: type_length = ord(f.read(1)) if srf: # short record data_length = ord(f.read(1)) else: # 32-bit length data_length = struct.unpack('>L', f.read(4))[0] if ilf: # id length present name_length = ord(f.read(1)) else: name_length = 0 except (TypeError, struct.error): log.debug("buffer underflow at offset {0}".format(f.tell())) raise LengthError("insufficient data to parse") try: record_type = f.read(type_length) assert len(record_type) == type_length record_name = f.read(name_length) assert len(record_name) == name_length record_data = f.read(data_length) assert len(record_data) == data_length except AssertionError: log.debug("buffer underflow at offset {0}".format(f.tell())) raise LengthError("insufficient data to parse") if tnf in (0, 5, 6) and len(record_type) > 0: s = "ndef type name format {0} doesn't allow a type string" raise FormatError( s.format(tnf) ) if tnf in (1, 2, 3, 4) and len(record_type) == 0: s = "ndef type name format {0} requires a type string" raise FormatError( s.format(tnf) ) if tnf == 0 and len(record_data) > 0: s = "ndef type name format {0} doesn't allow a payload" raise FormatError( s.format(tnf) ) self._message_begin, self._message_end = mbf, mef self._type = bytearray(type_name_prefix[tnf] + record_type) self._name = bytearray(record_name) self._data = bytearray(record_data) log.debug("parsed {0}".format(repr(self)))
[ "def", "_read", "(", "self", ",", "f", ")", ":", "try", ":", "self", ".", "header", "=", "ord", "(", "f", ".", "read", "(", "1", ")", ")", "except", "TypeError", ":", "log", ".", "debug", "(", "\"buffer underflow at offset {0}\"", ".", "format", "(", "f", ".", "tell", "(", ")", ")", ")", "raise", "LengthError", "(", "\"insufficient data to parse\"", ")", "mbf", "=", "bool", "(", "self", ".", "header", "&", "0x80", ")", "mef", "=", "bool", "(", "self", ".", "header", "&", "0x40", ")", "cff", "=", "bool", "(", "self", ".", "header", "&", "0x20", ")", "srf", "=", "bool", "(", "self", ".", "header", "&", "0x10", ")", "ilf", "=", "bool", "(", "self", ".", "header", "&", "0x08", ")", "tnf", "=", "self", ".", "header", "&", "0x07", "try", ":", "type_length", "=", "ord", "(", "f", ".", "read", "(", "1", ")", ")", "if", "srf", ":", "# short record", "data_length", "=", "ord", "(", "f", ".", "read", "(", "1", ")", ")", "else", ":", "# 32-bit length", "data_length", "=", "struct", ".", "unpack", "(", "'>L'", ",", "f", ".", "read", "(", "4", ")", ")", "[", "0", "]", "if", "ilf", ":", "# id length present", "name_length", "=", "ord", "(", "f", ".", "read", "(", "1", ")", ")", "else", ":", "name_length", "=", "0", "except", "(", "TypeError", ",", "struct", ".", "error", ")", ":", "log", ".", "debug", "(", "\"buffer underflow at offset {0}\"", ".", "format", "(", "f", ".", "tell", "(", ")", ")", ")", "raise", "LengthError", "(", "\"insufficient data to parse\"", ")", "try", ":", "record_type", "=", "f", ".", "read", "(", "type_length", ")", "assert", "len", "(", "record_type", ")", "==", "type_length", "record_name", "=", "f", ".", "read", "(", "name_length", ")", "assert", "len", "(", "record_name", ")", "==", "name_length", "record_data", "=", "f", ".", "read", "(", "data_length", ")", "assert", "len", "(", "record_data", ")", "==", "data_length", "except", "AssertionError", ":", "log", ".", "debug", "(", "\"buffer underflow at offset {0}\"", ".", "format", "(", "f", ".", "tell", "(", ")", ")", ")", "raise", "LengthError", "(", "\"insufficient data to parse\"", ")", "if", "tnf", "in", "(", "0", ",", "5", ",", "6", ")", "and", "len", "(", "record_type", ")", ">", "0", ":", "s", "=", "\"ndef type name format {0} doesn't allow a type string\"", "raise", "FormatError", "(", "s", ".", "format", "(", "tnf", ")", ")", "if", "tnf", "in", "(", "1", ",", "2", ",", "3", ",", "4", ")", "and", "len", "(", "record_type", ")", "==", "0", ":", "s", "=", "\"ndef type name format {0} requires a type string\"", "raise", "FormatError", "(", "s", ".", "format", "(", "tnf", ")", ")", "if", "tnf", "==", "0", "and", "len", "(", "record_data", ")", ">", "0", ":", "s", "=", "\"ndef type name format {0} doesn't allow a payload\"", "raise", "FormatError", "(", "s", ".", "format", "(", "tnf", ")", ")", "self", ".", "_message_begin", ",", "self", ".", "_message_end", "=", "mbf", ",", "mef", "self", ".", "_type", "=", "bytearray", "(", "type_name_prefix", "[", "tnf", "]", "+", "record_type", ")", "self", ".", "_name", "=", "bytearray", "(", "record_name", ")", "self", ".", "_data", "=", "bytearray", "(", "record_data", ")", "log", ".", "debug", "(", "\"parsed {0}\"", ".", "format", "(", "repr", "(", "self", ")", ")", ")" ]
Parse an NDEF record from a file-like object.
[ "Parse", "an", "NDEF", "record", "from", "a", "file", "-", "like", "object", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/ndef/record.py#L91-L146
227,424
nfcpy/nfcpy
src/nfc/ndef/record.py
Record._write
def _write(self, f): """Serialize an NDEF record to a file-like object.""" log.debug("writing ndef record at offset {0}".format(f.tell())) record_type = self.type record_name = self.name record_data = self.data if record_type == '': header_flags = 0; record_name = ''; record_data = '' elif record_type.startswith("urn:nfc:wkt:"): header_flags = 1; record_type = record_type[12:] elif re.match(r'[a-zA-Z0-9-]+/[a-zA-Z0-9-+.]+', record_type): header_flags = 2; record_type = record_type elif re.match(r'[a-zA-Z][a-zA-Z0-9+-.]*://', record_type): header_flags = 3; record_type = record_type elif record_type.startswith("urn:nfc:ext:"): header_flags = 4; record_type = record_type[12:] elif record_type == 'unknown': header_flags = 5; record_type = '' elif record_type == 'unchanged': header_flags = 6; record_type = '' type_length = len(record_type) data_length = len(record_data) name_length = len(record_name) if self._message_begin: header_flags |= 0x80 if self._message_end: header_flags |= 0x40 if data_length < 256: header_flags |= 0x10 if name_length > 0: header_flags |= 0x08 if data_length < 256: f.write(struct.pack(">BBB", header_flags, type_length, data_length)) else: f.write(struct.pack(">BBL", header_flags, type_length, data_length)) if name_length > 0: f.write(struct.pack(">B", name_length)) f.write(record_type) f.write(record_name) f.write(record_data)
python
def _write(self, f): """Serialize an NDEF record to a file-like object.""" log.debug("writing ndef record at offset {0}".format(f.tell())) record_type = self.type record_name = self.name record_data = self.data if record_type == '': header_flags = 0; record_name = ''; record_data = '' elif record_type.startswith("urn:nfc:wkt:"): header_flags = 1; record_type = record_type[12:] elif re.match(r'[a-zA-Z0-9-]+/[a-zA-Z0-9-+.]+', record_type): header_flags = 2; record_type = record_type elif re.match(r'[a-zA-Z][a-zA-Z0-9+-.]*://', record_type): header_flags = 3; record_type = record_type elif record_type.startswith("urn:nfc:ext:"): header_flags = 4; record_type = record_type[12:] elif record_type == 'unknown': header_flags = 5; record_type = '' elif record_type == 'unchanged': header_flags = 6; record_type = '' type_length = len(record_type) data_length = len(record_data) name_length = len(record_name) if self._message_begin: header_flags |= 0x80 if self._message_end: header_flags |= 0x40 if data_length < 256: header_flags |= 0x10 if name_length > 0: header_flags |= 0x08 if data_length < 256: f.write(struct.pack(">BBB", header_flags, type_length, data_length)) else: f.write(struct.pack(">BBL", header_flags, type_length, data_length)) if name_length > 0: f.write(struct.pack(">B", name_length)) f.write(record_type) f.write(record_name) f.write(record_data)
[ "def", "_write", "(", "self", ",", "f", ")", ":", "log", ".", "debug", "(", "\"writing ndef record at offset {0}\"", ".", "format", "(", "f", ".", "tell", "(", ")", ")", ")", "record_type", "=", "self", ".", "type", "record_name", "=", "self", ".", "name", "record_data", "=", "self", ".", "data", "if", "record_type", "==", "''", ":", "header_flags", "=", "0", "record_name", "=", "''", "record_data", "=", "''", "elif", "record_type", ".", "startswith", "(", "\"urn:nfc:wkt:\"", ")", ":", "header_flags", "=", "1", "record_type", "=", "record_type", "[", "12", ":", "]", "elif", "re", ".", "match", "(", "r'[a-zA-Z0-9-]+/[a-zA-Z0-9-+.]+'", ",", "record_type", ")", ":", "header_flags", "=", "2", "record_type", "=", "record_type", "elif", "re", ".", "match", "(", "r'[a-zA-Z][a-zA-Z0-9+-.]*://'", ",", "record_type", ")", ":", "header_flags", "=", "3", "record_type", "=", "record_type", "elif", "record_type", ".", "startswith", "(", "\"urn:nfc:ext:\"", ")", ":", "header_flags", "=", "4", "record_type", "=", "record_type", "[", "12", ":", "]", "elif", "record_type", "==", "'unknown'", ":", "header_flags", "=", "5", "record_type", "=", "''", "elif", "record_type", "==", "'unchanged'", ":", "header_flags", "=", "6", "record_type", "=", "''", "type_length", "=", "len", "(", "record_type", ")", "data_length", "=", "len", "(", "record_data", ")", "name_length", "=", "len", "(", "record_name", ")", "if", "self", ".", "_message_begin", ":", "header_flags", "|=", "0x80", "if", "self", ".", "_message_end", ":", "header_flags", "|=", "0x40", "if", "data_length", "<", "256", ":", "header_flags", "|=", "0x10", "if", "name_length", ">", "0", ":", "header_flags", "|=", "0x08", "if", "data_length", "<", "256", ":", "f", ".", "write", "(", "struct", ".", "pack", "(", "\">BBB\"", ",", "header_flags", ",", "type_length", ",", "data_length", ")", ")", "else", ":", "f", ".", "write", "(", "struct", ".", "pack", "(", "\">BBL\"", ",", "header_flags", ",", "type_length", ",", "data_length", ")", ")", "if", "name_length", ">", "0", ":", "f", ".", "write", "(", "struct", ".", "pack", "(", "\">B\"", ",", "name_length", ")", ")", "f", ".", "write", "(", "record_type", ")", "f", ".", "write", "(", "record_name", ")", "f", ".", "write", "(", "record_data", ")" ]
Serialize an NDEF record to a file-like object.
[ "Serialize", "an", "NDEF", "record", "to", "a", "file", "-", "like", "object", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/ndef/record.py#L148-L193
227,425
nfcpy/nfcpy
src/nfc/tag/tt3.py
ServiceCode.pack
def pack(self): """Pack the service code for transmission. Returns a 2 byte string.""" sn, sa = self.number, self.attribute return pack("<H", (sn & 0x3ff) << 6 | (sa & 0x3f))
python
def pack(self): """Pack the service code for transmission. Returns a 2 byte string.""" sn, sa = self.number, self.attribute return pack("<H", (sn & 0x3ff) << 6 | (sa & 0x3f))
[ "def", "pack", "(", "self", ")", ":", "sn", ",", "sa", "=", "self", ".", "number", ",", "self", ".", "attribute", "return", "pack", "(", "\"<H\"", ",", "(", "sn", "&", "0x3ff", ")", "<<", "6", "|", "(", "sa", "&", "0x3f", ")", ")" ]
Pack the service code for transmission. Returns a 2 byte string.
[ "Pack", "the", "service", "code", "for", "transmission", ".", "Returns", "a", "2", "byte", "string", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3.py#L97-L100
227,426
nfcpy/nfcpy
src/nfc/tag/tt3.py
BlockCode.pack
def pack(self): """Pack the block code for transmission. Returns a 2-3 byte string.""" bn, am, sx = self.number, self.access, self.service return chr(bool(bn < 256) << 7 | (am & 0x7) << 4 | (sx & 0xf)) + \ (chr(bn) if bn < 256 else pack("<H", bn))
python
def pack(self): """Pack the block code for transmission. Returns a 2-3 byte string.""" bn, am, sx = self.number, self.access, self.service return chr(bool(bn < 256) << 7 | (am & 0x7) << 4 | (sx & 0xf)) + \ (chr(bn) if bn < 256 else pack("<H", bn))
[ "def", "pack", "(", "self", ")", ":", "bn", ",", "am", ",", "sx", "=", "self", ".", "number", ",", "self", ".", "access", ",", "self", ".", "service", "return", "chr", "(", "bool", "(", "bn", "<", "256", ")", "<<", "7", "|", "(", "am", "&", "0x7", ")", "<<", "4", "|", "(", "sx", "&", "0xf", ")", ")", "+", "(", "chr", "(", "bn", ")", "if", "bn", "<", "256", "else", "pack", "(", "\"<H\"", ",", "bn", ")", ")" ]
Pack the block code for transmission. Returns a 2-3 byte string.
[ "Pack", "the", "block", "code", "for", "transmission", ".", "Returns", "a", "2", "-", "3", "byte", "string", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3.py#L129-L133
227,427
nfcpy/nfcpy
src/nfc/tag/tt3.py
Type3Tag.dump_service
def dump_service(self, sc): """Read all data blocks of a given service. :meth:`dump_service` reads all data blocks from the service with service code *sc* and returns a list of strings suitable for printing. The number of strings returned does not necessarily reflect the number of data blocks because a range of data blocks with equal content is reduced to fewer lines of output. """ def lprint(fmt, data, index): ispchr = lambda x: x >= 32 and x <= 126 # noqa: E731 def print_bytes(octets): return ' '.join(['%02x' % x for x in octets]) def print_chars(octets): return ''.join([chr(x) if ispchr(x) else '.' for x in octets]) return fmt.format(index, print_bytes(data), print_chars(data)) data_line_fmt = "{0:04X}: {1} |{2}|" same_line_fmt = "{0:<4s} {1} |{2}|" lines = list() last_data = None same_data = 0 for i in itertools.count(): # pragma: no branch assert i < 0x10000 try: this_data = self.read_without_encryption([sc], [BlockCode(i)]) except Type3TagCommandError: i = i - 1 break if this_data == last_data: same_data += 1 else: if same_data > 1: lines.append(lprint(same_line_fmt, last_data, "*")) lines.append(lprint(data_line_fmt, this_data, i)) last_data = this_data same_data = 0 if same_data > 1: lines.append(lprint(same_line_fmt, last_data, "*")) if same_data > 0: lines.append(lprint(data_line_fmt, this_data, i)) return lines
python
def dump_service(self, sc): """Read all data blocks of a given service. :meth:`dump_service` reads all data blocks from the service with service code *sc* and returns a list of strings suitable for printing. The number of strings returned does not necessarily reflect the number of data blocks because a range of data blocks with equal content is reduced to fewer lines of output. """ def lprint(fmt, data, index): ispchr = lambda x: x >= 32 and x <= 126 # noqa: E731 def print_bytes(octets): return ' '.join(['%02x' % x for x in octets]) def print_chars(octets): return ''.join([chr(x) if ispchr(x) else '.' for x in octets]) return fmt.format(index, print_bytes(data), print_chars(data)) data_line_fmt = "{0:04X}: {1} |{2}|" same_line_fmt = "{0:<4s} {1} |{2}|" lines = list() last_data = None same_data = 0 for i in itertools.count(): # pragma: no branch assert i < 0x10000 try: this_data = self.read_without_encryption([sc], [BlockCode(i)]) except Type3TagCommandError: i = i - 1 break if this_data == last_data: same_data += 1 else: if same_data > 1: lines.append(lprint(same_line_fmt, last_data, "*")) lines.append(lprint(data_line_fmt, this_data, i)) last_data = this_data same_data = 0 if same_data > 1: lines.append(lprint(same_line_fmt, last_data, "*")) if same_data > 0: lines.append(lprint(data_line_fmt, this_data, i)) return lines
[ "def", "dump_service", "(", "self", ",", "sc", ")", ":", "def", "lprint", "(", "fmt", ",", "data", ",", "index", ")", ":", "ispchr", "=", "lambda", "x", ":", "x", ">=", "32", "and", "x", "<=", "126", "# noqa: E731", "def", "print_bytes", "(", "octets", ")", ":", "return", "' '", ".", "join", "(", "[", "'%02x'", "%", "x", "for", "x", "in", "octets", "]", ")", "def", "print_chars", "(", "octets", ")", ":", "return", "''", ".", "join", "(", "[", "chr", "(", "x", ")", "if", "ispchr", "(", "x", ")", "else", "'.'", "for", "x", "in", "octets", "]", ")", "return", "fmt", ".", "format", "(", "index", ",", "print_bytes", "(", "data", ")", ",", "print_chars", "(", "data", ")", ")", "data_line_fmt", "=", "\"{0:04X}: {1} |{2}|\"", "same_line_fmt", "=", "\"{0:<4s} {1} |{2}|\"", "lines", "=", "list", "(", ")", "last_data", "=", "None", "same_data", "=", "0", "for", "i", "in", "itertools", ".", "count", "(", ")", ":", "# pragma: no branch", "assert", "i", "<", "0x10000", "try", ":", "this_data", "=", "self", ".", "read_without_encryption", "(", "[", "sc", "]", ",", "[", "BlockCode", "(", "i", ")", "]", ")", "except", "Type3TagCommandError", ":", "i", "=", "i", "-", "1", "break", "if", "this_data", "==", "last_data", ":", "same_data", "+=", "1", "else", ":", "if", "same_data", ">", "1", ":", "lines", ".", "append", "(", "lprint", "(", "same_line_fmt", ",", "last_data", ",", "\"*\"", ")", ")", "lines", ".", "append", "(", "lprint", "(", "data_line_fmt", ",", "this_data", ",", "i", ")", ")", "last_data", "=", "this_data", "same_data", "=", "0", "if", "same_data", ">", "1", ":", "lines", ".", "append", "(", "lprint", "(", "same_line_fmt", ",", "last_data", ",", "\"*\"", ")", ")", "if", "same_data", ">", "0", ":", "lines", ".", "append", "(", "lprint", "(", "data_line_fmt", ",", "this_data", ",", "i", ")", ")", "return", "lines" ]
Read all data blocks of a given service. :meth:`dump_service` reads all data blocks from the service with service code *sc* and returns a list of strings suitable for printing. The number of strings returned does not necessarily reflect the number of data blocks because a range of data blocks with equal content is reduced to fewer lines of output.
[ "Read", "all", "data", "blocks", "of", "a", "given", "service", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3.py#L284-L335
227,428
nfcpy/nfcpy
src/nfc/tag/tt3.py
Type3Tag.format
def format(self, version=None, wipe=None): """Format and blank an NFC Forum Type 3 Tag. A generic NFC Forum Type 3 Tag can be (re)formatted if it is in either one of blank, initialized or readwrite state. By formatting, all contents of the attribute information block is overwritten with values determined. The number of user data blocks is determined by reading all memory until an error response. Similarily, the maximum number of data block that can be read or written with a single command is determined by sending successively increased read and write commands. The current data length is set to zero. The NDEF mapping version is set to the latest known version number (1.0), unless the *version* argument is provided and it's major version number corresponds to one of the known major version numbers. By default, no data other than the attribute block is modified. To overwrite user data the *wipe* argument must be set to an integer value. The lower 8 bits of that value are written to all data bytes that follow the attribute block. """ return super(Type3Tag, self).format(version, wipe)
python
def format(self, version=None, wipe=None): """Format and blank an NFC Forum Type 3 Tag. A generic NFC Forum Type 3 Tag can be (re)formatted if it is in either one of blank, initialized or readwrite state. By formatting, all contents of the attribute information block is overwritten with values determined. The number of user data blocks is determined by reading all memory until an error response. Similarily, the maximum number of data block that can be read or written with a single command is determined by sending successively increased read and write commands. The current data length is set to zero. The NDEF mapping version is set to the latest known version number (1.0), unless the *version* argument is provided and it's major version number corresponds to one of the known major version numbers. By default, no data other than the attribute block is modified. To overwrite user data the *wipe* argument must be set to an integer value. The lower 8 bits of that value are written to all data bytes that follow the attribute block. """ return super(Type3Tag, self).format(version, wipe)
[ "def", "format", "(", "self", ",", "version", "=", "None", ",", "wipe", "=", "None", ")", ":", "return", "super", "(", "Type3Tag", ",", "self", ")", ".", "format", "(", "version", ",", "wipe", ")" ]
Format and blank an NFC Forum Type 3 Tag. A generic NFC Forum Type 3 Tag can be (re)formatted if it is in either one of blank, initialized or readwrite state. By formatting, all contents of the attribute information block is overwritten with values determined. The number of user data blocks is determined by reading all memory until an error response. Similarily, the maximum number of data block that can be read or written with a single command is determined by sending successively increased read and write commands. The current data length is set to zero. The NDEF mapping version is set to the latest known version number (1.0), unless the *version* argument is provided and it's major version number corresponds to one of the known major version numbers. By default, no data other than the attribute block is modified. To overwrite user data the *wipe* argument must be set to an integer value. The lower 8 bits of that value are written to all data bytes that follow the attribute block.
[ "Format", "and", "blank", "an", "NFC", "Forum", "Type", "3", "Tag", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3.py#L337-L359
227,429
nfcpy/nfcpy
src/nfc/tag/tt3.py
Type3Tag.polling
def polling(self, system_code=0xffff, request_code=0, time_slots=0): """Aquire and identify a card. The Polling command is used to detect the Type 3 Tags in the field. It is also used for initialization and anti-collision. The *system_code* identifies the card system to acquire. A card can have multiple systems. The first system that matches *system_code* will be activated. A value of 0xff for any of the two bytes works as a wildcard, thus 0xffff activates the very first system in the card. The card identification data returned are the Manufacture ID (IDm) and Manufacture Parameter (PMm). The *request_code* tells the card whether it should return additional information. The default value 0 requests no additional information. Request code 1 means that the card shall also return the system code, so polling for system code 0xffff with request code 1 can be used to identify the first system on the card. Request code 2 asks for communication performance data, more precisely a bitmap of possible communication speeds. Not all cards provide that information. The number of *time_slots* determines whether there's a chance to receive a response if multiple Type 3 Tags are in the field. For the reader the number of time slots determines the amount of time to wait for a response. Any Type 3 Tag in the field, i.e. powered by the field, will choose a random time slot to respond. With the default *time_slots* value 0 there will only be one time slot available for all responses and multiple responses would produce a collision. More time slots reduce the chance of collisions (but may result in an application working with a tag that was just accidentially close enough). Only specific values should be used for *time_slots*, those are 0, 1, 3, 7, and 15. Other values may produce unexpected results depending on the tag product. :meth:`polling` returns either the tuple (IDm, PMm) or the tuple (IDm, PMm, *additional information*) depending on the response lengt, all as bytearrays. Command execution errors raise :exc:`~nfc.tag.TagCommandError`. """ log.debug("polling for system 0x{0:04x}".format(system_code)) if time_slots not in (0, 1, 3, 7, 15): log.debug("invalid number of time slots: {0}".format(time_slots)) raise ValueError("invalid number of time slots") if request_code not in (0, 1, 2): log.debug("invalid request code value: {0}".format(request_code)) raise ValueError("invalid request code for polling") timeout = 0.003625 + time_slots * 0.001208 data = pack(">HBB", system_code, request_code, time_slots) data = self.send_cmd_recv_rsp(0x00, data, timeout, send_idm=False) if len(data) != (16 if request_code == 0 else 18): log.debug("unexpected polling response length") raise Type3TagCommandError(DATA_SIZE_ERROR) return (data[0:8], data[8:16]) if len(data) == 16 else \ (data[0:8], data[8:16], data[16:18])
python
def polling(self, system_code=0xffff, request_code=0, time_slots=0): """Aquire and identify a card. The Polling command is used to detect the Type 3 Tags in the field. It is also used for initialization and anti-collision. The *system_code* identifies the card system to acquire. A card can have multiple systems. The first system that matches *system_code* will be activated. A value of 0xff for any of the two bytes works as a wildcard, thus 0xffff activates the very first system in the card. The card identification data returned are the Manufacture ID (IDm) and Manufacture Parameter (PMm). The *request_code* tells the card whether it should return additional information. The default value 0 requests no additional information. Request code 1 means that the card shall also return the system code, so polling for system code 0xffff with request code 1 can be used to identify the first system on the card. Request code 2 asks for communication performance data, more precisely a bitmap of possible communication speeds. Not all cards provide that information. The number of *time_slots* determines whether there's a chance to receive a response if multiple Type 3 Tags are in the field. For the reader the number of time slots determines the amount of time to wait for a response. Any Type 3 Tag in the field, i.e. powered by the field, will choose a random time slot to respond. With the default *time_slots* value 0 there will only be one time slot available for all responses and multiple responses would produce a collision. More time slots reduce the chance of collisions (but may result in an application working with a tag that was just accidentially close enough). Only specific values should be used for *time_slots*, those are 0, 1, 3, 7, and 15. Other values may produce unexpected results depending on the tag product. :meth:`polling` returns either the tuple (IDm, PMm) or the tuple (IDm, PMm, *additional information*) depending on the response lengt, all as bytearrays. Command execution errors raise :exc:`~nfc.tag.TagCommandError`. """ log.debug("polling for system 0x{0:04x}".format(system_code)) if time_slots not in (0, 1, 3, 7, 15): log.debug("invalid number of time slots: {0}".format(time_slots)) raise ValueError("invalid number of time slots") if request_code not in (0, 1, 2): log.debug("invalid request code value: {0}".format(request_code)) raise ValueError("invalid request code for polling") timeout = 0.003625 + time_slots * 0.001208 data = pack(">HBB", system_code, request_code, time_slots) data = self.send_cmd_recv_rsp(0x00, data, timeout, send_idm=False) if len(data) != (16 if request_code == 0 else 18): log.debug("unexpected polling response length") raise Type3TagCommandError(DATA_SIZE_ERROR) return (data[0:8], data[8:16]) if len(data) == 16 else \ (data[0:8], data[8:16], data[16:18])
[ "def", "polling", "(", "self", ",", "system_code", "=", "0xffff", ",", "request_code", "=", "0", ",", "time_slots", "=", "0", ")", ":", "log", ".", "debug", "(", "\"polling for system 0x{0:04x}\"", ".", "format", "(", "system_code", ")", ")", "if", "time_slots", "not", "in", "(", "0", ",", "1", ",", "3", ",", "7", ",", "15", ")", ":", "log", ".", "debug", "(", "\"invalid number of time slots: {0}\"", ".", "format", "(", "time_slots", ")", ")", "raise", "ValueError", "(", "\"invalid number of time slots\"", ")", "if", "request_code", "not", "in", "(", "0", ",", "1", ",", "2", ")", ":", "log", ".", "debug", "(", "\"invalid request code value: {0}\"", ".", "format", "(", "request_code", ")", ")", "raise", "ValueError", "(", "\"invalid request code for polling\"", ")", "timeout", "=", "0.003625", "+", "time_slots", "*", "0.001208", "data", "=", "pack", "(", "\">HBB\"", ",", "system_code", ",", "request_code", ",", "time_slots", ")", "data", "=", "self", ".", "send_cmd_recv_rsp", "(", "0x00", ",", "data", ",", "timeout", ",", "send_idm", "=", "False", ")", "if", "len", "(", "data", ")", "!=", "(", "16", "if", "request_code", "==", "0", "else", "18", ")", ":", "log", ".", "debug", "(", "\"unexpected polling response length\"", ")", "raise", "Type3TagCommandError", "(", "DATA_SIZE_ERROR", ")", "return", "(", "data", "[", "0", ":", "8", "]", ",", "data", "[", "8", ":", "16", "]", ")", "if", "len", "(", "data", ")", "==", "16", "else", "(", "data", "[", "0", ":", "8", "]", ",", "data", "[", "8", ":", "16", "]", ",", "data", "[", "16", ":", "18", "]", ")" ]
Aquire and identify a card. The Polling command is used to detect the Type 3 Tags in the field. It is also used for initialization and anti-collision. The *system_code* identifies the card system to acquire. A card can have multiple systems. The first system that matches *system_code* will be activated. A value of 0xff for any of the two bytes works as a wildcard, thus 0xffff activates the very first system in the card. The card identification data returned are the Manufacture ID (IDm) and Manufacture Parameter (PMm). The *request_code* tells the card whether it should return additional information. The default value 0 requests no additional information. Request code 1 means that the card shall also return the system code, so polling for system code 0xffff with request code 1 can be used to identify the first system on the card. Request code 2 asks for communication performance data, more precisely a bitmap of possible communication speeds. Not all cards provide that information. The number of *time_slots* determines whether there's a chance to receive a response if multiple Type 3 Tags are in the field. For the reader the number of time slots determines the amount of time to wait for a response. Any Type 3 Tag in the field, i.e. powered by the field, will choose a random time slot to respond. With the default *time_slots* value 0 there will only be one time slot available for all responses and multiple responses would produce a collision. More time slots reduce the chance of collisions (but may result in an application working with a tag that was just accidentially close enough). Only specific values should be used for *time_slots*, those are 0, 1, 3, 7, and 15. Other values may produce unexpected results depending on the tag product. :meth:`polling` returns either the tuple (IDm, PMm) or the tuple (IDm, PMm, *additional information*) depending on the response lengt, all as bytearrays. Command execution errors raise :exc:`~nfc.tag.TagCommandError`.
[ "Aquire", "and", "identify", "a", "card", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3.py#L452-L513
227,430
nfcpy/nfcpy
src/nfc/tag/tt3.py
Type3Tag.read_from_ndef_service
def read_from_ndef_service(self, *blocks): """Read block data from an NDEF compatible tag. This is a convinience method to read block data from a tag that has system code 0x12FC (NDEF). For other tags this method simply returns :const:`None`. All arguments are block numbers to read. To actually pass a list of block numbers requires unpacking. The following example calls would have the same effect of reading 32 byte data from from blocks 1 and 8.:: data = tag.read_from_ndef_service(1, 8) data = tag.read_from_ndef_service(*list(1, 8)) Command execution errors raise :exc:`~nfc.tag.TagCommandError`. """ if self.sys == 0x12FC: sc_list = [ServiceCode(0, 0b001011)] bc_list = [BlockCode(n) for n in blocks] return self.read_without_encryption(sc_list, bc_list)
python
def read_from_ndef_service(self, *blocks): """Read block data from an NDEF compatible tag. This is a convinience method to read block data from a tag that has system code 0x12FC (NDEF). For other tags this method simply returns :const:`None`. All arguments are block numbers to read. To actually pass a list of block numbers requires unpacking. The following example calls would have the same effect of reading 32 byte data from from blocks 1 and 8.:: data = tag.read_from_ndef_service(1, 8) data = tag.read_from_ndef_service(*list(1, 8)) Command execution errors raise :exc:`~nfc.tag.TagCommandError`. """ if self.sys == 0x12FC: sc_list = [ServiceCode(0, 0b001011)] bc_list = [BlockCode(n) for n in blocks] return self.read_without_encryption(sc_list, bc_list)
[ "def", "read_from_ndef_service", "(", "self", ",", "*", "blocks", ")", ":", "if", "self", ".", "sys", "==", "0x12FC", ":", "sc_list", "=", "[", "ServiceCode", "(", "0", ",", "0b001011", ")", "]", "bc_list", "=", "[", "BlockCode", "(", "n", ")", "for", "n", "in", "blocks", "]", "return", "self", ".", "read_without_encryption", "(", "sc_list", ",", "bc_list", ")" ]
Read block data from an NDEF compatible tag. This is a convinience method to read block data from a tag that has system code 0x12FC (NDEF). For other tags this method simply returns :const:`None`. All arguments are block numbers to read. To actually pass a list of block numbers requires unpacking. The following example calls would have the same effect of reading 32 byte data from from blocks 1 and 8.:: data = tag.read_from_ndef_service(1, 8) data = tag.read_from_ndef_service(*list(1, 8)) Command execution errors raise :exc:`~nfc.tag.TagCommandError`.
[ "Read", "block", "data", "from", "an", "NDEF", "compatible", "tag", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3.py#L568-L587
227,431
nfcpy/nfcpy
src/nfc/tag/tt3.py
Type3Tag.write_without_encryption
def write_without_encryption(self, service_list, block_list, data): """Write data blocks to unencrypted services. This method sends a Write Without Encryption command to the tag. The data blocks to overwrite are indicated by a sequence of :class:`~nfc.tag.tt3.BlockCode` objects in the parameter *block_list*. Each block code must reference one of the :class:`~nfc.tag.tt3.ServiceCode` objects in the iterable *service_list*. If any of the blocks or services do not exist, the tag will stop processing at that point and return a two byte error status. The status bytes become the :attr:`~nfc.tag.TagCommandError.errno` value of the :exc:`~nfc.tag.TagCommandError` exception. The *data* to write must be a byte string or array of length ``16 * len(block_list)``. As an example, the following code writes ``16 * "\\xAA"`` to block 5 of service 16, ``16 * "\\xBB"`` to block 0 of service 80 and ``16 * "\\xCC"`` to block 1 of service 80 (all services are writeable without key):: sc1 = nfc.tag.tt3.ServiceCode(16, 0x09) sc2 = nfc.tag.tt3.ServiceCode(80, 0x09) bc1 = nfc.tag.tt3.BlockCode(5, service=0) bc2 = nfc.tag.tt3.BlockCode(0, service=1) bc3 = nfc.tag.tt3.BlockCode(1, service=1) sc_list = [sc1, sc2] bc_list = [bc1, bc2, bc3] data = 16 * "\\xAA" + 16 * "\\xBB" + 16 * "\\xCC" try: data = tag.write_without_encryption(sc_list, bc_list, data) except nfc.tag.TagCommandError as e: if e.errno > 0x00FF: print("the tag returned an error status") else: print("command failed with some other error") Command execution errors raise :exc:`~nfc.tag.TagCommandError`. """ a, b, e = self.pmm[6] & 7, self.pmm[6] >> 3 & 7, self.pmm[6] >> 6 timeout = 302.1E-6 * ((b + 1) * len(block_list) + a + 1) * 4**e data = (chr(len(service_list)) + ''.join([sc.pack() for sc in service_list]) + chr(len(block_list)) + ''.join([bc.pack() for bc in block_list]) + data) log.debug("write w/o encryption service/block list: {0} / {1}".format( ' '.join([hexlify(sc.pack()) for sc in service_list]), ' '.join([hexlify(bc.pack()) for bc in block_list]))) self.send_cmd_recv_rsp(0x08, data, timeout)
python
def write_without_encryption(self, service_list, block_list, data): """Write data blocks to unencrypted services. This method sends a Write Without Encryption command to the tag. The data blocks to overwrite are indicated by a sequence of :class:`~nfc.tag.tt3.BlockCode` objects in the parameter *block_list*. Each block code must reference one of the :class:`~nfc.tag.tt3.ServiceCode` objects in the iterable *service_list*. If any of the blocks or services do not exist, the tag will stop processing at that point and return a two byte error status. The status bytes become the :attr:`~nfc.tag.TagCommandError.errno` value of the :exc:`~nfc.tag.TagCommandError` exception. The *data* to write must be a byte string or array of length ``16 * len(block_list)``. As an example, the following code writes ``16 * "\\xAA"`` to block 5 of service 16, ``16 * "\\xBB"`` to block 0 of service 80 and ``16 * "\\xCC"`` to block 1 of service 80 (all services are writeable without key):: sc1 = nfc.tag.tt3.ServiceCode(16, 0x09) sc2 = nfc.tag.tt3.ServiceCode(80, 0x09) bc1 = nfc.tag.tt3.BlockCode(5, service=0) bc2 = nfc.tag.tt3.BlockCode(0, service=1) bc3 = nfc.tag.tt3.BlockCode(1, service=1) sc_list = [sc1, sc2] bc_list = [bc1, bc2, bc3] data = 16 * "\\xAA" + 16 * "\\xBB" + 16 * "\\xCC" try: data = tag.write_without_encryption(sc_list, bc_list, data) except nfc.tag.TagCommandError as e: if e.errno > 0x00FF: print("the tag returned an error status") else: print("command failed with some other error") Command execution errors raise :exc:`~nfc.tag.TagCommandError`. """ a, b, e = self.pmm[6] & 7, self.pmm[6] >> 3 & 7, self.pmm[6] >> 6 timeout = 302.1E-6 * ((b + 1) * len(block_list) + a + 1) * 4**e data = (chr(len(service_list)) + ''.join([sc.pack() for sc in service_list]) + chr(len(block_list)) + ''.join([bc.pack() for bc in block_list]) + data) log.debug("write w/o encryption service/block list: {0} / {1}".format( ' '.join([hexlify(sc.pack()) for sc in service_list]), ' '.join([hexlify(bc.pack()) for bc in block_list]))) self.send_cmd_recv_rsp(0x08, data, timeout)
[ "def", "write_without_encryption", "(", "self", ",", "service_list", ",", "block_list", ",", "data", ")", ":", "a", ",", "b", ",", "e", "=", "self", ".", "pmm", "[", "6", "]", "&", "7", ",", "self", ".", "pmm", "[", "6", "]", ">>", "3", "&", "7", ",", "self", ".", "pmm", "[", "6", "]", ">>", "6", "timeout", "=", "302.1E-6", "*", "(", "(", "b", "+", "1", ")", "*", "len", "(", "block_list", ")", "+", "a", "+", "1", ")", "*", "4", "**", "e", "data", "=", "(", "chr", "(", "len", "(", "service_list", ")", ")", "+", "''", ".", "join", "(", "[", "sc", ".", "pack", "(", ")", "for", "sc", "in", "service_list", "]", ")", "+", "chr", "(", "len", "(", "block_list", ")", ")", "+", "''", ".", "join", "(", "[", "bc", ".", "pack", "(", ")", "for", "bc", "in", "block_list", "]", ")", "+", "data", ")", "log", ".", "debug", "(", "\"write w/o encryption service/block list: {0} / {1}\"", ".", "format", "(", "' '", ".", "join", "(", "[", "hexlify", "(", "sc", ".", "pack", "(", ")", ")", "for", "sc", "in", "service_list", "]", ")", ",", "' '", ".", "join", "(", "[", "hexlify", "(", "bc", ".", "pack", "(", ")", ")", "for", "bc", "in", "block_list", "]", ")", ")", ")", "self", ".", "send_cmd_recv_rsp", "(", "0x08", ",", "data", ",", "timeout", ")" ]
Write data blocks to unencrypted services. This method sends a Write Without Encryption command to the tag. The data blocks to overwrite are indicated by a sequence of :class:`~nfc.tag.tt3.BlockCode` objects in the parameter *block_list*. Each block code must reference one of the :class:`~nfc.tag.tt3.ServiceCode` objects in the iterable *service_list*. If any of the blocks or services do not exist, the tag will stop processing at that point and return a two byte error status. The status bytes become the :attr:`~nfc.tag.TagCommandError.errno` value of the :exc:`~nfc.tag.TagCommandError` exception. The *data* to write must be a byte string or array of length ``16 * len(block_list)``. As an example, the following code writes ``16 * "\\xAA"`` to block 5 of service 16, ``16 * "\\xBB"`` to block 0 of service 80 and ``16 * "\\xCC"`` to block 1 of service 80 (all services are writeable without key):: sc1 = nfc.tag.tt3.ServiceCode(16, 0x09) sc2 = nfc.tag.tt3.ServiceCode(80, 0x09) bc1 = nfc.tag.tt3.BlockCode(5, service=0) bc2 = nfc.tag.tt3.BlockCode(0, service=1) bc3 = nfc.tag.tt3.BlockCode(1, service=1) sc_list = [sc1, sc2] bc_list = [bc1, bc2, bc3] data = 16 * "\\xAA" + 16 * "\\xBB" + 16 * "\\xCC" try: data = tag.write_without_encryption(sc_list, bc_list, data) except nfc.tag.TagCommandError as e: if e.errno > 0x00FF: print("the tag returned an error status") else: print("command failed with some other error") Command execution errors raise :exc:`~nfc.tag.TagCommandError`.
[ "Write", "data", "blocks", "to", "unencrypted", "services", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3.py#L589-L642
227,432
nfcpy/nfcpy
src/nfc/tag/tt3.py
Type3Tag.write_to_ndef_service
def write_to_ndef_service(self, data, *blocks): """Write block data to an NDEF compatible tag. This is a convinience method to write block data to a tag that has system code 0x12FC (NDEF). For other tags this method simply does nothing. The *data* to write must be a string or bytearray with length equal ``16 * len(blocks)``. All parameters following *data* are interpreted as block numbers to write. To actually pass a list of block numbers requires unpacking. The following example calls would have the same effect of writing 32 byte zeros into blocks 1 and 8.:: tag.write_to_ndef_service(32 * "\\0", 1, 8) tag.write_to_ndef_service(32 * "\\0", *list(1, 8)) Command execution errors raise :exc:`~nfc.tag.TagCommandError`. """ if self.sys == 0x12FC: sc_list = [ServiceCode(0, 0b001001)] bc_list = [BlockCode(n) for n in blocks] self.write_without_encryption(sc_list, bc_list, data)
python
def write_to_ndef_service(self, data, *blocks): """Write block data to an NDEF compatible tag. This is a convinience method to write block data to a tag that has system code 0x12FC (NDEF). For other tags this method simply does nothing. The *data* to write must be a string or bytearray with length equal ``16 * len(blocks)``. All parameters following *data* are interpreted as block numbers to write. To actually pass a list of block numbers requires unpacking. The following example calls would have the same effect of writing 32 byte zeros into blocks 1 and 8.:: tag.write_to_ndef_service(32 * "\\0", 1, 8) tag.write_to_ndef_service(32 * "\\0", *list(1, 8)) Command execution errors raise :exc:`~nfc.tag.TagCommandError`. """ if self.sys == 0x12FC: sc_list = [ServiceCode(0, 0b001001)] bc_list = [BlockCode(n) for n in blocks] self.write_without_encryption(sc_list, bc_list, data)
[ "def", "write_to_ndef_service", "(", "self", ",", "data", ",", "*", "blocks", ")", ":", "if", "self", ".", "sys", "==", "0x12FC", ":", "sc_list", "=", "[", "ServiceCode", "(", "0", ",", "0b001001", ")", "]", "bc_list", "=", "[", "BlockCode", "(", "n", ")", "for", "n", "in", "blocks", "]", "self", ".", "write_without_encryption", "(", "sc_list", ",", "bc_list", ",", "data", ")" ]
Write block data to an NDEF compatible tag. This is a convinience method to write block data to a tag that has system code 0x12FC (NDEF). For other tags this method simply does nothing. The *data* to write must be a string or bytearray with length equal ``16 * len(blocks)``. All parameters following *data* are interpreted as block numbers to write. To actually pass a list of block numbers requires unpacking. The following example calls would have the same effect of writing 32 byte zeros into blocks 1 and 8.:: tag.write_to_ndef_service(32 * "\\0", 1, 8) tag.write_to_ndef_service(32 * "\\0", *list(1, 8)) Command execution errors raise :exc:`~nfc.tag.TagCommandError`.
[ "Write", "block", "data", "to", "an", "NDEF", "compatible", "tag", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/tag/tt3.py#L644-L665
227,433
nfcpy/nfcpy
src/nfc/clf/__init__.py
ContactlessFrontend.close
def close(self): """Close the contacless reader device.""" with self.lock: if self.device is not None: try: self.device.close() except IOError: pass self.device = None
python
def close(self): """Close the contacless reader device.""" with self.lock: if self.device is not None: try: self.device.close() except IOError: pass self.device = None
[ "def", "close", "(", "self", ")", ":", "with", "self", ".", "lock", ":", "if", "self", ".", "device", "is", "not", "None", ":", "try", ":", "self", ".", "device", ".", "close", "(", ")", "except", "IOError", ":", "pass", "self", ".", "device", "=", "None" ]
Close the contacless reader device.
[ "Close", "the", "contacless", "reader", "device", "." ]
6649146d1afdd5e82b2b6b1ea00aa58d50785117
https://github.com/nfcpy/nfcpy/blob/6649146d1afdd5e82b2b6b1ea00aa58d50785117/src/nfc/clf/__init__.py#L155-L163
227,434
cloudnativelabs/kube-shell
kubeshell/parser.py
Parser.build
def build(self, root, schema): """ Build the syntax tree for kubectl command line """ if schema.get("subcommands") and schema["subcommands"]: for subcmd, childSchema in schema["subcommands"].items(): child = CommandTree(node=subcmd) child = self.build(child, childSchema) root.children.append(child) # {args: {}, options: {}, help: ""} root.help = schema.get("help") for name, desc in schema.get("options").items(): if root.node == "kubectl": # register global flags self.globalFlags.append(Option(name, desc["help"])) root.localFlags.append(Option(name, desc["help"])) for arg in schema.get("args"): node = CommandTree(node=arg) root.children.append(node) return root
python
def build(self, root, schema): """ Build the syntax tree for kubectl command line """ if schema.get("subcommands") and schema["subcommands"]: for subcmd, childSchema in schema["subcommands"].items(): child = CommandTree(node=subcmd) child = self.build(child, childSchema) root.children.append(child) # {args: {}, options: {}, help: ""} root.help = schema.get("help") for name, desc in schema.get("options").items(): if root.node == "kubectl": # register global flags self.globalFlags.append(Option(name, desc["help"])) root.localFlags.append(Option(name, desc["help"])) for arg in schema.get("args"): node = CommandTree(node=arg) root.children.append(node) return root
[ "def", "build", "(", "self", ",", "root", ",", "schema", ")", ":", "if", "schema", ".", "get", "(", "\"subcommands\"", ")", "and", "schema", "[", "\"subcommands\"", "]", ":", "for", "subcmd", ",", "childSchema", "in", "schema", "[", "\"subcommands\"", "]", ".", "items", "(", ")", ":", "child", "=", "CommandTree", "(", "node", "=", "subcmd", ")", "child", "=", "self", ".", "build", "(", "child", ",", "childSchema", ")", "root", ".", "children", ".", "append", "(", "child", ")", "# {args: {}, options: {}, help: \"\"}", "root", ".", "help", "=", "schema", ".", "get", "(", "\"help\"", ")", "for", "name", ",", "desc", "in", "schema", ".", "get", "(", "\"options\"", ")", ".", "items", "(", ")", ":", "if", "root", ".", "node", "==", "\"kubectl\"", ":", "# register global flags", "self", ".", "globalFlags", ".", "append", "(", "Option", "(", "name", ",", "desc", "[", "\"help\"", "]", ")", ")", "root", ".", "localFlags", ".", "append", "(", "Option", "(", "name", ",", "desc", "[", "\"help\"", "]", ")", ")", "for", "arg", "in", "schema", ".", "get", "(", "\"args\"", ")", ":", "node", "=", "CommandTree", "(", "node", "=", "arg", ")", "root", ".", "children", ".", "append", "(", "node", ")", "return", "root" ]
Build the syntax tree for kubectl command line
[ "Build", "the", "syntax", "tree", "for", "kubectl", "command", "line" ]
adc801d165e87fe62f82b074ec49996954c3fbe8
https://github.com/cloudnativelabs/kube-shell/blob/adc801d165e87fe62f82b074ec49996954c3fbe8/kubeshell/parser.py#L45-L61
227,435
cloudnativelabs/kube-shell
kubeshell/parser.py
Parser.parse_tokens
def parse_tokens(self, tokens): """ Parse a sequence of tokens returns tuple of (parsed tokens, suggestions) """ if len(tokens) == 1: return list(), tokens, {"kubectl": self.ast.help} else: tokens.reverse() parsed, unparsed, suggestions = self.treewalk(self.ast, parsed=list(), unparsed=tokens) if not suggestions and unparsed: # TODO: @vogxn: This is hack until we include expected value types for each option and argument. # Whenver we recieve no suggestions but are left with unparsed tokens we pop the value and walk the # tree again without values logger.debug("unparsed tokens remain, possible value encountered") unparsed.pop() parsed.reverse() unparsed.extend(parsed) logger.debug("resuming treewalk with tokens: %s", unparsed) return self.treewalk(self.ast, parsed=list(), unparsed=unparsed) else: return parsed, unparsed, suggestions
python
def parse_tokens(self, tokens): """ Parse a sequence of tokens returns tuple of (parsed tokens, suggestions) """ if len(tokens) == 1: return list(), tokens, {"kubectl": self.ast.help} else: tokens.reverse() parsed, unparsed, suggestions = self.treewalk(self.ast, parsed=list(), unparsed=tokens) if not suggestions and unparsed: # TODO: @vogxn: This is hack until we include expected value types for each option and argument. # Whenver we recieve no suggestions but are left with unparsed tokens we pop the value and walk the # tree again without values logger.debug("unparsed tokens remain, possible value encountered") unparsed.pop() parsed.reverse() unparsed.extend(parsed) logger.debug("resuming treewalk with tokens: %s", unparsed) return self.treewalk(self.ast, parsed=list(), unparsed=unparsed) else: return parsed, unparsed, suggestions
[ "def", "parse_tokens", "(", "self", ",", "tokens", ")", ":", "if", "len", "(", "tokens", ")", "==", "1", ":", "return", "list", "(", ")", ",", "tokens", ",", "{", "\"kubectl\"", ":", "self", ".", "ast", ".", "help", "}", "else", ":", "tokens", ".", "reverse", "(", ")", "parsed", ",", "unparsed", ",", "suggestions", "=", "self", ".", "treewalk", "(", "self", ".", "ast", ",", "parsed", "=", "list", "(", ")", ",", "unparsed", "=", "tokens", ")", "if", "not", "suggestions", "and", "unparsed", ":", "# TODO: @vogxn: This is hack until we include expected value types for each option and argument.", "# Whenver we recieve no suggestions but are left with unparsed tokens we pop the value and walk the", "# tree again without values", "logger", ".", "debug", "(", "\"unparsed tokens remain, possible value encountered\"", ")", "unparsed", ".", "pop", "(", ")", "parsed", ".", "reverse", "(", ")", "unparsed", ".", "extend", "(", "parsed", ")", "logger", ".", "debug", "(", "\"resuming treewalk with tokens: %s\"", ",", "unparsed", ")", "return", "self", ".", "treewalk", "(", "self", ".", "ast", ",", "parsed", "=", "list", "(", ")", ",", "unparsed", "=", "unparsed", ")", "else", ":", "return", "parsed", ",", "unparsed", ",", "suggestions" ]
Parse a sequence of tokens returns tuple of (parsed tokens, suggestions)
[ "Parse", "a", "sequence", "of", "tokens" ]
adc801d165e87fe62f82b074ec49996954c3fbe8
https://github.com/cloudnativelabs/kube-shell/blob/adc801d165e87fe62f82b074ec49996954c3fbe8/kubeshell/parser.py#L69-L90
227,436
cloudnativelabs/kube-shell
kubeshell/parser.py
Parser.treewalk
def treewalk(self, root, parsed, unparsed): """ Recursively walks the syntax tree at root and returns the items parsed, unparsed and possible suggestions """ suggestions = dict() if not unparsed: logger.debug("no tokens left unparsed. returning %s, %s", parsed, suggestions) return parsed, unparsed, suggestions token = unparsed.pop().strip() logger.debug("begin parsing at %s w/ tokens: %s", root.node, unparsed) if root.node == token: logger.debug("root node: %s matches next token:%s", root.node, token) parsed.append(token) if self.peekForOption(unparsed): # check for localFlags and globalFlags logger.debug("option(s) upcoming %s", unparsed) parsed_opts, unparsed, suggestions = self.evalOptions(root, list(), unparsed[:]) if parsed_opts: logger.debug("parsed option(s): %s", parsed_opts) parsed.extend(parsed_opts) if unparsed and not self.peekForOption(unparsed): # unparsed bits without options logger.debug("begin subtree %s parsing", root.node) for child in root.children: parsed_subtree, unparsed, suggestions = self.treewalk(child, list(), unparsed[:]) if parsed_subtree: # subtree returned further parsed tokens parsed.extend(parsed_subtree) logger.debug("subtree at: %s has matches. %s, %s", child.node, parsed, unparsed) break else: # no matches found in command tree # return children of root as suggestions logger.debug("no matches in subtree: %s. returning children as suggestions", root.node) for child in root.children: suggestions[child.node] = child.help else: logger.debug("no token or option match") unparsed.append(token) return parsed, unparsed, suggestions
python
def treewalk(self, root, parsed, unparsed): """ Recursively walks the syntax tree at root and returns the items parsed, unparsed and possible suggestions """ suggestions = dict() if not unparsed: logger.debug("no tokens left unparsed. returning %s, %s", parsed, suggestions) return parsed, unparsed, suggestions token = unparsed.pop().strip() logger.debug("begin parsing at %s w/ tokens: %s", root.node, unparsed) if root.node == token: logger.debug("root node: %s matches next token:%s", root.node, token) parsed.append(token) if self.peekForOption(unparsed): # check for localFlags and globalFlags logger.debug("option(s) upcoming %s", unparsed) parsed_opts, unparsed, suggestions = self.evalOptions(root, list(), unparsed[:]) if parsed_opts: logger.debug("parsed option(s): %s", parsed_opts) parsed.extend(parsed_opts) if unparsed and not self.peekForOption(unparsed): # unparsed bits without options logger.debug("begin subtree %s parsing", root.node) for child in root.children: parsed_subtree, unparsed, suggestions = self.treewalk(child, list(), unparsed[:]) if parsed_subtree: # subtree returned further parsed tokens parsed.extend(parsed_subtree) logger.debug("subtree at: %s has matches. %s, %s", child.node, parsed, unparsed) break else: # no matches found in command tree # return children of root as suggestions logger.debug("no matches in subtree: %s. returning children as suggestions", root.node) for child in root.children: suggestions[child.node] = child.help else: logger.debug("no token or option match") unparsed.append(token) return parsed, unparsed, suggestions
[ "def", "treewalk", "(", "self", ",", "root", ",", "parsed", ",", "unparsed", ")", ":", "suggestions", "=", "dict", "(", ")", "if", "not", "unparsed", ":", "logger", ".", "debug", "(", "\"no tokens left unparsed. returning %s, %s\"", ",", "parsed", ",", "suggestions", ")", "return", "parsed", ",", "unparsed", ",", "suggestions", "token", "=", "unparsed", ".", "pop", "(", ")", ".", "strip", "(", ")", "logger", ".", "debug", "(", "\"begin parsing at %s w/ tokens: %s\"", ",", "root", ".", "node", ",", "unparsed", ")", "if", "root", ".", "node", "==", "token", ":", "logger", ".", "debug", "(", "\"root node: %s matches next token:%s\"", ",", "root", ".", "node", ",", "token", ")", "parsed", ".", "append", "(", "token", ")", "if", "self", ".", "peekForOption", "(", "unparsed", ")", ":", "# check for localFlags and globalFlags", "logger", ".", "debug", "(", "\"option(s) upcoming %s\"", ",", "unparsed", ")", "parsed_opts", ",", "unparsed", ",", "suggestions", "=", "self", ".", "evalOptions", "(", "root", ",", "list", "(", ")", ",", "unparsed", "[", ":", "]", ")", "if", "parsed_opts", ":", "logger", ".", "debug", "(", "\"parsed option(s): %s\"", ",", "parsed_opts", ")", "parsed", ".", "extend", "(", "parsed_opts", ")", "if", "unparsed", "and", "not", "self", ".", "peekForOption", "(", "unparsed", ")", ":", "# unparsed bits without options", "logger", ".", "debug", "(", "\"begin subtree %s parsing\"", ",", "root", ".", "node", ")", "for", "child", "in", "root", ".", "children", ":", "parsed_subtree", ",", "unparsed", ",", "suggestions", "=", "self", ".", "treewalk", "(", "child", ",", "list", "(", ")", ",", "unparsed", "[", ":", "]", ")", "if", "parsed_subtree", ":", "# subtree returned further parsed tokens", "parsed", ".", "extend", "(", "parsed_subtree", ")", "logger", ".", "debug", "(", "\"subtree at: %s has matches. %s, %s\"", ",", "child", ".", "node", ",", "parsed", ",", "unparsed", ")", "break", "else", ":", "# no matches found in command tree", "# return children of root as suggestions", "logger", ".", "debug", "(", "\"no matches in subtree: %s. returning children as suggestions\"", ",", "root", ".", "node", ")", "for", "child", "in", "root", ".", "children", ":", "suggestions", "[", "child", ".", "node", "]", "=", "child", ".", "help", "else", ":", "logger", ".", "debug", "(", "\"no token or option match\"", ")", "unparsed", ".", "append", "(", "token", ")", "return", "parsed", ",", "unparsed", ",", "suggestions" ]
Recursively walks the syntax tree at root and returns the items parsed, unparsed and possible suggestions
[ "Recursively", "walks", "the", "syntax", "tree", "at", "root", "and", "returns", "the", "items", "parsed", "unparsed", "and", "possible", "suggestions" ]
adc801d165e87fe62f82b074ec49996954c3fbe8
https://github.com/cloudnativelabs/kube-shell/blob/adc801d165e87fe62f82b074ec49996954c3fbe8/kubeshell/parser.py#L92-L128
227,437
cloudnativelabs/kube-shell
kubeshell/parser.py
Parser.evalOptions
def evalOptions(self, root, parsed, unparsed): """ Evaluate only the options and return flags as suggestions """ logger.debug("parsing options at tree: %s with p:%s, u:%s", root.node, parsed, unparsed) suggestions = dict() token = unparsed.pop().strip() parts = token.partition('=') if parts[-1] != '': # parsing for --option=value type input token = parts[0] allFlags = root.localFlags + self.globalFlags for flag in allFlags: if flag.name == token: logger.debug("matched token: %s with flag: %s", token, flag.name) parsed.append(token) if self.peekForOption(unparsed): # recursively look for further options parsed, unparsed, suggestions = self.evalOptions(root, parsed, unparsed[:]) # elif token == "--namespace": # namespaces = [('default', None), ('minikube', None), ('gitlab', None)] # self.kube_client.get_resource("namespace") # suggestions = dict(namespaces) break else: logger.debug("no flags match, returning allFlags suggestions") for flag in allFlags: suggestions[flag.name] = flag.helptext if suggestions: # incomplete parse, replace token logger.debug("incomplete option: %s provided. returning suggestions", token) unparsed.append(token) return parsed, unparsed, suggestions
python
def evalOptions(self, root, parsed, unparsed): """ Evaluate only the options and return flags as suggestions """ logger.debug("parsing options at tree: %s with p:%s, u:%s", root.node, parsed, unparsed) suggestions = dict() token = unparsed.pop().strip() parts = token.partition('=') if parts[-1] != '': # parsing for --option=value type input token = parts[0] allFlags = root.localFlags + self.globalFlags for flag in allFlags: if flag.name == token: logger.debug("matched token: %s with flag: %s", token, flag.name) parsed.append(token) if self.peekForOption(unparsed): # recursively look for further options parsed, unparsed, suggestions = self.evalOptions(root, parsed, unparsed[:]) # elif token == "--namespace": # namespaces = [('default', None), ('minikube', None), ('gitlab', None)] # self.kube_client.get_resource("namespace") # suggestions = dict(namespaces) break else: logger.debug("no flags match, returning allFlags suggestions") for flag in allFlags: suggestions[flag.name] = flag.helptext if suggestions: # incomplete parse, replace token logger.debug("incomplete option: %s provided. returning suggestions", token) unparsed.append(token) return parsed, unparsed, suggestions
[ "def", "evalOptions", "(", "self", ",", "root", ",", "parsed", ",", "unparsed", ")", ":", "logger", ".", "debug", "(", "\"parsing options at tree: %s with p:%s, u:%s\"", ",", "root", ".", "node", ",", "parsed", ",", "unparsed", ")", "suggestions", "=", "dict", "(", ")", "token", "=", "unparsed", ".", "pop", "(", ")", ".", "strip", "(", ")", "parts", "=", "token", ".", "partition", "(", "'='", ")", "if", "parts", "[", "-", "1", "]", "!=", "''", ":", "# parsing for --option=value type input", "token", "=", "parts", "[", "0", "]", "allFlags", "=", "root", ".", "localFlags", "+", "self", ".", "globalFlags", "for", "flag", "in", "allFlags", ":", "if", "flag", ".", "name", "==", "token", ":", "logger", ".", "debug", "(", "\"matched token: %s with flag: %s\"", ",", "token", ",", "flag", ".", "name", ")", "parsed", ".", "append", "(", "token", ")", "if", "self", ".", "peekForOption", "(", "unparsed", ")", ":", "# recursively look for further options", "parsed", ",", "unparsed", ",", "suggestions", "=", "self", ".", "evalOptions", "(", "root", ",", "parsed", ",", "unparsed", "[", ":", "]", ")", "# elif token == \"--namespace\":", "# namespaces = [('default', None), ('minikube', None), ('gitlab', None)] # self.kube_client.get_resource(\"namespace\")", "# suggestions = dict(namespaces)", "break", "else", ":", "logger", ".", "debug", "(", "\"no flags match, returning allFlags suggestions\"", ")", "for", "flag", "in", "allFlags", ":", "suggestions", "[", "flag", ".", "name", "]", "=", "flag", ".", "helptext", "if", "suggestions", ":", "# incomplete parse, replace token", "logger", ".", "debug", "(", "\"incomplete option: %s provided. returning suggestions\"", ",", "token", ")", "unparsed", ".", "append", "(", "token", ")", "return", "parsed", ",", "unparsed", ",", "suggestions" ]
Evaluate only the options and return flags as suggestions
[ "Evaluate", "only", "the", "options", "and", "return", "flags", "as", "suggestions" ]
adc801d165e87fe62f82b074ec49996954c3fbe8
https://github.com/cloudnativelabs/kube-shell/blob/adc801d165e87fe62f82b074ec49996954c3fbe8/kubeshell/parser.py#L136-L165
227,438
olofk/fusesoc
fusesoc/utils.py
setup_logging
def setup_logging(level, monchrome=False, log_file=None): ''' Utility function for setting up logging. ''' # Logging to file if log_file: logging.basicConfig(filename=log_file, filemode='w', level=logging.DEBUG) # Pretty color terminal logging ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) formatter = ColoredFormatter("%(levelname)s: %(message)s", monchrome) ch.setFormatter(formatter) # Which packages do we want to log from. packages = ('__main__', 'fusesoc',) for package in packages: logger = logging.getLogger(package) logger.addHandler(ch) logger.setLevel(level) # Warning only packages warning_only_packages = [] for package in warning_only_packages: logger = logging.getLogger(package) logger.addHandler(ch) logger.setLevel(logging.WARNING) logger.debug('Setup logging at level {}.'.format(level))
python
def setup_logging(level, monchrome=False, log_file=None): ''' Utility function for setting up logging. ''' # Logging to file if log_file: logging.basicConfig(filename=log_file, filemode='w', level=logging.DEBUG) # Pretty color terminal logging ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) formatter = ColoredFormatter("%(levelname)s: %(message)s", monchrome) ch.setFormatter(formatter) # Which packages do we want to log from. packages = ('__main__', 'fusesoc',) for package in packages: logger = logging.getLogger(package) logger.addHandler(ch) logger.setLevel(level) # Warning only packages warning_only_packages = [] for package in warning_only_packages: logger = logging.getLogger(package) logger.addHandler(ch) logger.setLevel(logging.WARNING) logger.debug('Setup logging at level {}.'.format(level))
[ "def", "setup_logging", "(", "level", ",", "monchrome", "=", "False", ",", "log_file", "=", "None", ")", ":", "# Logging to file", "if", "log_file", ":", "logging", ".", "basicConfig", "(", "filename", "=", "log_file", ",", "filemode", "=", "'w'", ",", "level", "=", "logging", ".", "DEBUG", ")", "# Pretty color terminal logging", "ch", "=", "logging", ".", "StreamHandler", "(", ")", "ch", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "formatter", "=", "ColoredFormatter", "(", "\"%(levelname)s: %(message)s\"", ",", "monchrome", ")", "ch", ".", "setFormatter", "(", "formatter", ")", "# Which packages do we want to log from.", "packages", "=", "(", "'__main__'", ",", "'fusesoc'", ",", ")", "for", "package", "in", "packages", ":", "logger", "=", "logging", ".", "getLogger", "(", "package", ")", "logger", ".", "addHandler", "(", "ch", ")", "logger", ".", "setLevel", "(", "level", ")", "# Warning only packages", "warning_only_packages", "=", "[", "]", "for", "package", "in", "warning_only_packages", ":", "logger", "=", "logging", ".", "getLogger", "(", "package", ")", "logger", ".", "addHandler", "(", "ch", ")", "logger", ".", "setLevel", "(", "logging", ".", "WARNING", ")", "logger", ".", "debug", "(", "'Setup logging at level {}.'", ".", "format", "(", "level", ")", ")" ]
Utility function for setting up logging.
[ "Utility", "function", "for", "setting", "up", "logging", "." ]
e30c6a30f6e4c2f4a568b3e8f53edce64b4481cd
https://github.com/olofk/fusesoc/blob/e30c6a30f6e4c2f4a568b3e8f53edce64b4481cd/fusesoc/utils.py#L84-L110
227,439
olofk/fusesoc
fusesoc/edalizer.py
Ttptttg.generate
def generate(self, cache_root): """Run a parametrized generator Args: cache_root (str): The directory where to store the generated cores Returns: list: Cores created by the generator """ generator_cwd = os.path.join(cache_root, 'generated', self.vlnv.sanitized_name) generator_input_file = os.path.join(generator_cwd, self.name+'_input.yml') logger.info('Generating ' + str(self.vlnv)) if not os.path.exists(generator_cwd): os.makedirs(generator_cwd) with open(generator_input_file, 'w') as f: f.write(yaml.dump(self.generator_input)) args = [os.path.join(os.path.abspath(self.generator.root), self.generator.command), generator_input_file] if self.generator.interpreter: args[0:0] = [self.generator.interpreter] Launcher(args[0], args[1:], cwd=generator_cwd).run() cores = [] logger.debug("Looking for generated cores in " + generator_cwd) for root, dirs, files in os.walk(generator_cwd): for f in files: if f.endswith('.core'): try: cores.append(Core(os.path.join(root, f))) except SyntaxError as e: w = "Failed to parse generated core file " + f + ": " + e.msg raise RuntimeError(w) logger.debug("Found " + ', '.join(str(c.name) for c in cores)) return cores
python
def generate(self, cache_root): """Run a parametrized generator Args: cache_root (str): The directory where to store the generated cores Returns: list: Cores created by the generator """ generator_cwd = os.path.join(cache_root, 'generated', self.vlnv.sanitized_name) generator_input_file = os.path.join(generator_cwd, self.name+'_input.yml') logger.info('Generating ' + str(self.vlnv)) if not os.path.exists(generator_cwd): os.makedirs(generator_cwd) with open(generator_input_file, 'w') as f: f.write(yaml.dump(self.generator_input)) args = [os.path.join(os.path.abspath(self.generator.root), self.generator.command), generator_input_file] if self.generator.interpreter: args[0:0] = [self.generator.interpreter] Launcher(args[0], args[1:], cwd=generator_cwd).run() cores = [] logger.debug("Looking for generated cores in " + generator_cwd) for root, dirs, files in os.walk(generator_cwd): for f in files: if f.endswith('.core'): try: cores.append(Core(os.path.join(root, f))) except SyntaxError as e: w = "Failed to parse generated core file " + f + ": " + e.msg raise RuntimeError(w) logger.debug("Found " + ', '.join(str(c.name) for c in cores)) return cores
[ "def", "generate", "(", "self", ",", "cache_root", ")", ":", "generator_cwd", "=", "os", ".", "path", ".", "join", "(", "cache_root", ",", "'generated'", ",", "self", ".", "vlnv", ".", "sanitized_name", ")", "generator_input_file", "=", "os", ".", "path", ".", "join", "(", "generator_cwd", ",", "self", ".", "name", "+", "'_input.yml'", ")", "logger", ".", "info", "(", "'Generating '", "+", "str", "(", "self", ".", "vlnv", ")", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "generator_cwd", ")", ":", "os", ".", "makedirs", "(", "generator_cwd", ")", "with", "open", "(", "generator_input_file", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "yaml", ".", "dump", "(", "self", ".", "generator_input", ")", ")", "args", "=", "[", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "abspath", "(", "self", ".", "generator", ".", "root", ")", ",", "self", ".", "generator", ".", "command", ")", ",", "generator_input_file", "]", "if", "self", ".", "generator", ".", "interpreter", ":", "args", "[", "0", ":", "0", "]", "=", "[", "self", ".", "generator", ".", "interpreter", "]", "Launcher", "(", "args", "[", "0", "]", ",", "args", "[", "1", ":", "]", ",", "cwd", "=", "generator_cwd", ")", ".", "run", "(", ")", "cores", "=", "[", "]", "logger", ".", "debug", "(", "\"Looking for generated cores in \"", "+", "generator_cwd", ")", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "generator_cwd", ")", ":", "for", "f", "in", "files", ":", "if", "f", ".", "endswith", "(", "'.core'", ")", ":", "try", ":", "cores", ".", "append", "(", "Core", "(", "os", ".", "path", ".", "join", "(", "root", ",", "f", ")", ")", ")", "except", "SyntaxError", "as", "e", ":", "w", "=", "\"Failed to parse generated core file \"", "+", "f", "+", "\": \"", "+", "e", ".", "msg", "raise", "RuntimeError", "(", "w", ")", "logger", ".", "debug", "(", "\"Found \"", "+", "', '", ".", "join", "(", "str", "(", "c", ".", "name", ")", "for", "c", "in", "cores", ")", ")", "return", "cores" ]
Run a parametrized generator Args: cache_root (str): The directory where to store the generated cores Returns: list: Cores created by the generator
[ "Run", "a", "parametrized", "generator" ]
e30c6a30f6e4c2f4a568b3e8f53edce64b4481cd
https://github.com/olofk/fusesoc/blob/e30c6a30f6e4c2f4a568b3e8f53edce64b4481cd/fusesoc/edalizer.py#L165-L203
227,440
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/fields.py
Field._range_check
def _range_check(self, value, min_value, max_value): ''' Utility method to check that the given value is between min_value and max_value. ''' if value < min_value or value > max_value: raise ValueError('%s out of range - %s is not between %s and %s' % (self.__class__.__name__, value, min_value, max_value))
python
def _range_check(self, value, min_value, max_value): ''' Utility method to check that the given value is between min_value and max_value. ''' if value < min_value or value > max_value: raise ValueError('%s out of range - %s is not between %s and %s' % (self.__class__.__name__, value, min_value, max_value))
[ "def", "_range_check", "(", "self", ",", "value", ",", "min_value", ",", "max_value", ")", ":", "if", "value", "<", "min_value", "or", "value", ">", "max_value", ":", "raise", "ValueError", "(", "'%s out of range - %s is not between %s and %s'", "%", "(", "self", ".", "__class__", ".", "__name__", ",", "value", ",", "min_value", ",", "max_value", ")", ")" ]
Utility method to check that the given value is between min_value and max_value.
[ "Utility", "method", "to", "check", "that", "the", "given", "value", "is", "between", "min_value", "and", "max_value", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/fields.py#L52-L57
227,441
Infinidat/infi.clickhouse_orm
scripts/generate_ref.py
_get_default_arg
def _get_default_arg(args, defaults, arg_index): """ Method that determines if an argument has default value or not, and if yes what is the default value for the argument :param args: array of arguments, eg: ['first_arg', 'second_arg', 'third_arg'] :param defaults: array of default values, eg: (42, 'something') :param arg_index: index of the argument in the argument array for which, this function checks if a default value exists or not. And if default value exists it would return the default value. Example argument: 1 :return: Tuple of whether there is a default or not, and if yes the default value, eg: for index 2 i.e. for "second_arg" this function returns (True, 42) """ if not defaults: return DefaultArgSpec(False, None) args_with_no_defaults = len(args) - len(defaults) if arg_index < args_with_no_defaults: return DefaultArgSpec(False, None) else: value = defaults[arg_index - args_with_no_defaults] if (type(value) is str): value = '"%s"' % value return DefaultArgSpec(True, value)
python
def _get_default_arg(args, defaults, arg_index): """ Method that determines if an argument has default value or not, and if yes what is the default value for the argument :param args: array of arguments, eg: ['first_arg', 'second_arg', 'third_arg'] :param defaults: array of default values, eg: (42, 'something') :param arg_index: index of the argument in the argument array for which, this function checks if a default value exists or not. And if default value exists it would return the default value. Example argument: 1 :return: Tuple of whether there is a default or not, and if yes the default value, eg: for index 2 i.e. for "second_arg" this function returns (True, 42) """ if not defaults: return DefaultArgSpec(False, None) args_with_no_defaults = len(args) - len(defaults) if arg_index < args_with_no_defaults: return DefaultArgSpec(False, None) else: value = defaults[arg_index - args_with_no_defaults] if (type(value) is str): value = '"%s"' % value return DefaultArgSpec(True, value)
[ "def", "_get_default_arg", "(", "args", ",", "defaults", ",", "arg_index", ")", ":", "if", "not", "defaults", ":", "return", "DefaultArgSpec", "(", "False", ",", "None", ")", "args_with_no_defaults", "=", "len", "(", "args", ")", "-", "len", "(", "defaults", ")", "if", "arg_index", "<", "args_with_no_defaults", ":", "return", "DefaultArgSpec", "(", "False", ",", "None", ")", "else", ":", "value", "=", "defaults", "[", "arg_index", "-", "args_with_no_defaults", "]", "if", "(", "type", "(", "value", ")", "is", "str", ")", ":", "value", "=", "'\"%s\"'", "%", "value", "return", "DefaultArgSpec", "(", "True", ",", "value", ")" ]
Method that determines if an argument has default value or not, and if yes what is the default value for the argument :param args: array of arguments, eg: ['first_arg', 'second_arg', 'third_arg'] :param defaults: array of default values, eg: (42, 'something') :param arg_index: index of the argument in the argument array for which, this function checks if a default value exists or not. And if default value exists it would return the default value. Example argument: 1 :return: Tuple of whether there is a default or not, and if yes the default value, eg: for index 2 i.e. for "second_arg" this function returns (True, 42)
[ "Method", "that", "determines", "if", "an", "argument", "has", "default", "value", "or", "not", "and", "if", "yes", "what", "is", "the", "default", "value", "for", "the", "argument" ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/scripts/generate_ref.py#L7-L30
227,442
Infinidat/infi.clickhouse_orm
scripts/generate_ref.py
get_method_sig
def get_method_sig(method): """ Given a function, it returns a string that pretty much looks how the function signature would be written in python. :param method: a python method :return: A string similar describing the pythong method signature. eg: "my_method(first_argArg, second_arg=42, third_arg='something')" """ # The return value of ArgSpec is a bit weird, as the list of arguments and # list of defaults are returned in separate array. # eg: ArgSpec(args=['first_arg', 'second_arg', 'third_arg'], # varargs=None, keywords=None, defaults=(42, 'something')) argspec = inspect.getargspec(method) arg_index=0 args = [] # Use the args and defaults array returned by argspec and find out # which arguments has default for arg in argspec.args: default_arg = _get_default_arg(argspec.args, argspec.defaults, arg_index) if default_arg.has_default: val = default_arg.default_value if isinstance(val, basestring): val = '"' + val + '"' args.append("%s=%s" % (arg, val)) else: args.append(arg) arg_index += 1 if argspec.varargs: args.append('*' + argspec.varargs) if argspec.keywords: args.append('**' + argspec.keywords) return "%s(%s)" % (method.__name__, ", ".join(args[1:]))
python
def get_method_sig(method): """ Given a function, it returns a string that pretty much looks how the function signature would be written in python. :param method: a python method :return: A string similar describing the pythong method signature. eg: "my_method(first_argArg, second_arg=42, third_arg='something')" """ # The return value of ArgSpec is a bit weird, as the list of arguments and # list of defaults are returned in separate array. # eg: ArgSpec(args=['first_arg', 'second_arg', 'third_arg'], # varargs=None, keywords=None, defaults=(42, 'something')) argspec = inspect.getargspec(method) arg_index=0 args = [] # Use the args and defaults array returned by argspec and find out # which arguments has default for arg in argspec.args: default_arg = _get_default_arg(argspec.args, argspec.defaults, arg_index) if default_arg.has_default: val = default_arg.default_value if isinstance(val, basestring): val = '"' + val + '"' args.append("%s=%s" % (arg, val)) else: args.append(arg) arg_index += 1 if argspec.varargs: args.append('*' + argspec.varargs) if argspec.keywords: args.append('**' + argspec.keywords) return "%s(%s)" % (method.__name__, ", ".join(args[1:]))
[ "def", "get_method_sig", "(", "method", ")", ":", "# The return value of ArgSpec is a bit weird, as the list of arguments and", "# list of defaults are returned in separate array.", "# eg: ArgSpec(args=['first_arg', 'second_arg', 'third_arg'],", "# varargs=None, keywords=None, defaults=(42, 'something'))", "argspec", "=", "inspect", ".", "getargspec", "(", "method", ")", "arg_index", "=", "0", "args", "=", "[", "]", "# Use the args and defaults array returned by argspec and find out", "# which arguments has default", "for", "arg", "in", "argspec", ".", "args", ":", "default_arg", "=", "_get_default_arg", "(", "argspec", ".", "args", ",", "argspec", ".", "defaults", ",", "arg_index", ")", "if", "default_arg", ".", "has_default", ":", "val", "=", "default_arg", ".", "default_value", "if", "isinstance", "(", "val", ",", "basestring", ")", ":", "val", "=", "'\"'", "+", "val", "+", "'\"'", "args", ".", "append", "(", "\"%s=%s\"", "%", "(", "arg", ",", "val", ")", ")", "else", ":", "args", ".", "append", "(", "arg", ")", "arg_index", "+=", "1", "if", "argspec", ".", "varargs", ":", "args", ".", "append", "(", "'*'", "+", "argspec", ".", "varargs", ")", "if", "argspec", ".", "keywords", ":", "args", ".", "append", "(", "'**'", "+", "argspec", ".", "keywords", ")", "return", "\"%s(%s)\"", "%", "(", "method", ".", "__name__", ",", "\", \"", ".", "join", "(", "args", "[", "1", ":", "]", ")", ")" ]
Given a function, it returns a string that pretty much looks how the function signature would be written in python. :param method: a python method :return: A string similar describing the pythong method signature. eg: "my_method(first_argArg, second_arg=42, third_arg='something')"
[ "Given", "a", "function", "it", "returns", "a", "string", "that", "pretty", "much", "looks", "how", "the", "function", "signature", "would", "be", "written", "in", "python", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/scripts/generate_ref.py#L32-L65
227,443
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/query.py
AggregateQuerySet.group_by
def group_by(self, *args): """ This method lets you specify the grouping fields explicitly. The `args` must be names of grouping fields or calculated fields that this queryset was created with. """ for name in args: assert name in self._fields or name in self._calculated_fields, \ 'Cannot group by `%s` since it is not included in the query' % name qs = copy(self) qs._grouping_fields = args return qs
python
def group_by(self, *args): """ This method lets you specify the grouping fields explicitly. The `args` must be names of grouping fields or calculated fields that this queryset was created with. """ for name in args: assert name in self._fields or name in self._calculated_fields, \ 'Cannot group by `%s` since it is not included in the query' % name qs = copy(self) qs._grouping_fields = args return qs
[ "def", "group_by", "(", "self", ",", "*", "args", ")", ":", "for", "name", "in", "args", ":", "assert", "name", "in", "self", ".", "_fields", "or", "name", "in", "self", ".", "_calculated_fields", ",", "'Cannot group by `%s` since it is not included in the query'", "%", "name", "qs", "=", "copy", "(", "self", ")", "qs", ".", "_grouping_fields", "=", "args", "return", "qs" ]
This method lets you specify the grouping fields explicitly. The `args` must be names of grouping fields or calculated fields that this queryset was created with.
[ "This", "method", "lets", "you", "specify", "the", "grouping", "fields", "explicitly", ".", "The", "args", "must", "be", "names", "of", "grouping", "fields", "or", "calculated", "fields", "that", "this", "queryset", "was", "created", "with", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/query.py#L554-L565
227,444
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/query.py
AggregateQuerySet.select_fields_as_sql
def select_fields_as_sql(self): """ Returns the selected fields or expressions as a SQL string. """ return comma_join(list(self._fields) + ['%s AS %s' % (v, k) for k, v in self._calculated_fields.items()])
python
def select_fields_as_sql(self): """ Returns the selected fields or expressions as a SQL string. """ return comma_join(list(self._fields) + ['%s AS %s' % (v, k) for k, v in self._calculated_fields.items()])
[ "def", "select_fields_as_sql", "(", "self", ")", ":", "return", "comma_join", "(", "list", "(", "self", ".", "_fields", ")", "+", "[", "'%s AS %s'", "%", "(", "v", ",", "k", ")", "for", "k", ",", "v", "in", "self", ".", "_calculated_fields", ".", "items", "(", ")", "]", ")" ]
Returns the selected fields or expressions as a SQL string.
[ "Returns", "the", "selected", "fields", "or", "expressions", "as", "a", "SQL", "string", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/query.py#L579-L583
227,445
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/query.py
AggregateQuerySet.count
def count(self): """ Returns the number of rows after aggregation. """ sql = u'SELECT count() FROM (%s)' % self.as_sql() raw = self._database.raw(sql) return int(raw) if raw else 0
python
def count(self): """ Returns the number of rows after aggregation. """ sql = u'SELECT count() FROM (%s)' % self.as_sql() raw = self._database.raw(sql) return int(raw) if raw else 0
[ "def", "count", "(", "self", ")", ":", "sql", "=", "u'SELECT count() FROM (%s)'", "%", "self", ".", "as_sql", "(", ")", "raw", "=", "self", ".", "_database", ".", "raw", "(", "sql", ")", "return", "int", "(", "raw", ")", "if", "raw", "else", "0" ]
Returns the number of rows after aggregation.
[ "Returns", "the", "number", "of", "rows", "after", "aggregation", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/query.py#L588-L594
227,446
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/models.py
Model.set_database
def set_database(self, db): ''' Sets the `Database` that this model instance belongs to. This is done automatically when the instance is read from the database or written to it. ''' # This can not be imported globally due to circular import from .database import Database assert isinstance(db, Database), "database must be database.Database instance" self._database = db
python
def set_database(self, db): ''' Sets the `Database` that this model instance belongs to. This is done automatically when the instance is read from the database or written to it. ''' # This can not be imported globally due to circular import from .database import Database assert isinstance(db, Database), "database must be database.Database instance" self._database = db
[ "def", "set_database", "(", "self", ",", "db", ")", ":", "# This can not be imported globally due to circular import", "from", ".", "database", "import", "Database", "assert", "isinstance", "(", "db", ",", "Database", ")", ",", "\"database must be database.Database instance\"", "self", ".", "_database", "=", "db" ]
Sets the `Database` that this model instance belongs to. This is done automatically when the instance is read from the database or written to it.
[ "Sets", "the", "Database", "that", "this", "model", "instance", "belongs", "to", ".", "This", "is", "done", "automatically", "when", "the", "instance", "is", "read", "from", "the", "database", "or", "written", "to", "it", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/models.py#L153-L161
227,447
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/models.py
Model.from_tsv
def from_tsv(cls, line, field_names, timezone_in_use=pytz.utc, database=None): ''' Create a model instance from a tab-separated line. The line may or may not include a newline. The `field_names` list must match the fields defined in the model, but does not have to include all of them. - `line`: the TSV-formatted data. - `field_names`: names of the model fields in the data. - `timezone_in_use`: the timezone to use when parsing dates and datetimes. - `database`: if given, sets the database that this instance belongs to. ''' from six import next values = iter(parse_tsv(line)) kwargs = {} for name in field_names: field = getattr(cls, name) kwargs[name] = field.to_python(next(values), timezone_in_use) obj = cls(**kwargs) if database is not None: obj.set_database(database) return obj
python
def from_tsv(cls, line, field_names, timezone_in_use=pytz.utc, database=None): ''' Create a model instance from a tab-separated line. The line may or may not include a newline. The `field_names` list must match the fields defined in the model, but does not have to include all of them. - `line`: the TSV-formatted data. - `field_names`: names of the model fields in the data. - `timezone_in_use`: the timezone to use when parsing dates and datetimes. - `database`: if given, sets the database that this instance belongs to. ''' from six import next values = iter(parse_tsv(line)) kwargs = {} for name in field_names: field = getattr(cls, name) kwargs[name] = field.to_python(next(values), timezone_in_use) obj = cls(**kwargs) if database is not None: obj.set_database(database) return obj
[ "def", "from_tsv", "(", "cls", ",", "line", ",", "field_names", ",", "timezone_in_use", "=", "pytz", ".", "utc", ",", "database", "=", "None", ")", ":", "from", "six", "import", "next", "values", "=", "iter", "(", "parse_tsv", "(", "line", ")", ")", "kwargs", "=", "{", "}", "for", "name", "in", "field_names", ":", "field", "=", "getattr", "(", "cls", ",", "name", ")", "kwargs", "[", "name", "]", "=", "field", ".", "to_python", "(", "next", "(", "values", ")", ",", "timezone_in_use", ")", "obj", "=", "cls", "(", "*", "*", "kwargs", ")", "if", "database", "is", "not", "None", ":", "obj", ".", "set_database", "(", "database", ")", "return", "obj" ]
Create a model instance from a tab-separated line. The line may or may not include a newline. The `field_names` list must match the fields defined in the model, but does not have to include all of them. - `line`: the TSV-formatted data. - `field_names`: names of the model fields in the data. - `timezone_in_use`: the timezone to use when parsing dates and datetimes. - `database`: if given, sets the database that this instance belongs to.
[ "Create", "a", "model", "instance", "from", "a", "tab", "-", "separated", "line", ".", "The", "line", "may", "or", "may", "not", "include", "a", "newline", ".", "The", "field_names", "list", "must", "match", "the", "fields", "defined", "in", "the", "model", "but", "does", "not", "have", "to", "include", "all", "of", "them", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/models.py#L207-L228
227,448
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/models.py
Model.to_tsv
def to_tsv(self, include_readonly=True): ''' Returns the instance's column values as a tab-separated line. A newline is not included. - `include_readonly`: if false, returns only fields that can be inserted into database. ''' data = self.__dict__ fields = self.fields(writable=not include_readonly) return '\t'.join(field.to_db_string(data[name], quote=False) for name, field in iteritems(fields))
python
def to_tsv(self, include_readonly=True): ''' Returns the instance's column values as a tab-separated line. A newline is not included. - `include_readonly`: if false, returns only fields that can be inserted into database. ''' data = self.__dict__ fields = self.fields(writable=not include_readonly) return '\t'.join(field.to_db_string(data[name], quote=False) for name, field in iteritems(fields))
[ "def", "to_tsv", "(", "self", ",", "include_readonly", "=", "True", ")", ":", "data", "=", "self", ".", "__dict__", "fields", "=", "self", ".", "fields", "(", "writable", "=", "not", "include_readonly", ")", "return", "'\\t'", ".", "join", "(", "field", ".", "to_db_string", "(", "data", "[", "name", "]", ",", "quote", "=", "False", ")", "for", "name", ",", "field", "in", "iteritems", "(", "fields", ")", ")" ]
Returns the instance's column values as a tab-separated line. A newline is not included. - `include_readonly`: if false, returns only fields that can be inserted into database.
[ "Returns", "the", "instance", "s", "column", "values", "as", "a", "tab", "-", "separated", "line", ".", "A", "newline", "is", "not", "included", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/models.py#L230-L238
227,449
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/models.py
Model.to_dict
def to_dict(self, include_readonly=True, field_names=None): ''' Returns the instance's column values as a dict. - `include_readonly`: if false, returns only fields that can be inserted into database. - `field_names`: an iterable of field names to return (optional) ''' fields = self.fields(writable=not include_readonly) if field_names is not None: fields = [f for f in fields if f in field_names] data = self.__dict__ return {name: data[name] for name in fields}
python
def to_dict(self, include_readonly=True, field_names=None): ''' Returns the instance's column values as a dict. - `include_readonly`: if false, returns only fields that can be inserted into database. - `field_names`: an iterable of field names to return (optional) ''' fields = self.fields(writable=not include_readonly) if field_names is not None: fields = [f for f in fields if f in field_names] data = self.__dict__ return {name: data[name] for name in fields}
[ "def", "to_dict", "(", "self", ",", "include_readonly", "=", "True", ",", "field_names", "=", "None", ")", ":", "fields", "=", "self", ".", "fields", "(", "writable", "=", "not", "include_readonly", ")", "if", "field_names", "is", "not", "None", ":", "fields", "=", "[", "f", "for", "f", "in", "fields", "if", "f", "in", "field_names", "]", "data", "=", "self", ".", "__dict__", "return", "{", "name", ":", "data", "[", "name", "]", "for", "name", "in", "fields", "}" ]
Returns the instance's column values as a dict. - `include_readonly`: if false, returns only fields that can be inserted into database. - `field_names`: an iterable of field names to return (optional)
[ "Returns", "the", "instance", "s", "column", "values", "as", "a", "dict", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/models.py#L240-L253
227,450
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/utils.py
import_submodules
def import_submodules(package_name): """ Import all submodules of a module. """ import importlib, pkgutil package = importlib.import_module(package_name) return { name: importlib.import_module(package_name + '.' + name) for _, name, _ in pkgutil.iter_modules(package.__path__) }
python
def import_submodules(package_name): """ Import all submodules of a module. """ import importlib, pkgutil package = importlib.import_module(package_name) return { name: importlib.import_module(package_name + '.' + name) for _, name, _ in pkgutil.iter_modules(package.__path__) }
[ "def", "import_submodules", "(", "package_name", ")", ":", "import", "importlib", ",", "pkgutil", "package", "=", "importlib", ".", "import_module", "(", "package_name", ")", "return", "{", "name", ":", "importlib", ".", "import_module", "(", "package_name", "+", "'.'", "+", "name", ")", "for", "_", ",", "name", ",", "_", "in", "pkgutil", ".", "iter_modules", "(", "package", ".", "__path__", ")", "}" ]
Import all submodules of a module.
[ "Import", "all", "submodules", "of", "a", "module", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/utils.py#L84-L93
227,451
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/database.py
ServerError.get_error_code_msg
def get_error_code_msg(cls, full_error_message): """ Extract the code and message of the exception that clickhouse-server generated. See the list of error codes here: https://github.com/yandex/ClickHouse/blob/master/dbms/src/Common/ErrorCodes.cpp """ for pattern in cls.ERROR_PATTERNS: match = pattern.match(full_error_message) if match: # assert match.group('type1') == match.group('type2') return int(match.group('code')), match.group('msg').strip() return 0, full_error_message
python
def get_error_code_msg(cls, full_error_message): """ Extract the code and message of the exception that clickhouse-server generated. See the list of error codes here: https://github.com/yandex/ClickHouse/blob/master/dbms/src/Common/ErrorCodes.cpp """ for pattern in cls.ERROR_PATTERNS: match = pattern.match(full_error_message) if match: # assert match.group('type1') == match.group('type2') return int(match.group('code')), match.group('msg').strip() return 0, full_error_message
[ "def", "get_error_code_msg", "(", "cls", ",", "full_error_message", ")", ":", "for", "pattern", "in", "cls", ".", "ERROR_PATTERNS", ":", "match", "=", "pattern", ".", "match", "(", "full_error_message", ")", "if", "match", ":", "# assert match.group('type1') == match.group('type2')", "return", "int", "(", "match", ".", "group", "(", "'code'", ")", ")", ",", "match", ".", "group", "(", "'msg'", ")", ".", "strip", "(", ")", "return", "0", ",", "full_error_message" ]
Extract the code and message of the exception that clickhouse-server generated. See the list of error codes here: https://github.com/yandex/ClickHouse/blob/master/dbms/src/Common/ErrorCodes.cpp
[ "Extract", "the", "code", "and", "message", "of", "the", "exception", "that", "clickhouse", "-", "server", "generated", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/database.py#L58-L71
227,452
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/database.py
Database.create_table
def create_table(self, model_class): ''' Creates a table for the given model class, if it does not exist already. ''' if model_class.is_system_model(): raise DatabaseException("You can't create system table") if getattr(model_class, 'engine') is None: raise DatabaseException("%s class must define an engine" % model_class.__name__) self._send(model_class.create_table_sql(self))
python
def create_table(self, model_class): ''' Creates a table for the given model class, if it does not exist already. ''' if model_class.is_system_model(): raise DatabaseException("You can't create system table") if getattr(model_class, 'engine') is None: raise DatabaseException("%s class must define an engine" % model_class.__name__) self._send(model_class.create_table_sql(self))
[ "def", "create_table", "(", "self", ",", "model_class", ")", ":", "if", "model_class", ".", "is_system_model", "(", ")", ":", "raise", "DatabaseException", "(", "\"You can't create system table\"", ")", "if", "getattr", "(", "model_class", ",", "'engine'", ")", "is", "None", ":", "raise", "DatabaseException", "(", "\"%s class must define an engine\"", "%", "model_class", ".", "__name__", ")", "self", ".", "_send", "(", "model_class", ".", "create_table_sql", "(", "self", ")", ")" ]
Creates a table for the given model class, if it does not exist already.
[ "Creates", "a", "table", "for", "the", "given", "model", "class", "if", "it", "does", "not", "exist", "already", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/database.py#L136-L144
227,453
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/database.py
Database.drop_table
def drop_table(self, model_class): ''' Drops the database table of the given model class, if it exists. ''' if model_class.is_system_model(): raise DatabaseException("You can't drop system table") self._send(model_class.drop_table_sql(self))
python
def drop_table(self, model_class): ''' Drops the database table of the given model class, if it exists. ''' if model_class.is_system_model(): raise DatabaseException("You can't drop system table") self._send(model_class.drop_table_sql(self))
[ "def", "drop_table", "(", "self", ",", "model_class", ")", ":", "if", "model_class", ".", "is_system_model", "(", ")", ":", "raise", "DatabaseException", "(", "\"You can't drop system table\"", ")", "self", ".", "_send", "(", "model_class", ".", "drop_table_sql", "(", "self", ")", ")" ]
Drops the database table of the given model class, if it exists.
[ "Drops", "the", "database", "table", "of", "the", "given", "model", "class", "if", "it", "exists", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/database.py#L146-L152
227,454
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/database.py
Database.does_table_exist
def does_table_exist(self, model_class): ''' Checks whether a table for the given model class already exists. Note that this only checks for existence of a table with the expected name. ''' sql = "SELECT count() FROM system.tables WHERE database = '%s' AND name = '%s'" r = self._send(sql % (self.db_name, model_class.table_name())) return r.text.strip() == '1'
python
def does_table_exist(self, model_class): ''' Checks whether a table for the given model class already exists. Note that this only checks for existence of a table with the expected name. ''' sql = "SELECT count() FROM system.tables WHERE database = '%s' AND name = '%s'" r = self._send(sql % (self.db_name, model_class.table_name())) return r.text.strip() == '1'
[ "def", "does_table_exist", "(", "self", ",", "model_class", ")", ":", "sql", "=", "\"SELECT count() FROM system.tables WHERE database = '%s' AND name = '%s'\"", "r", "=", "self", ".", "_send", "(", "sql", "%", "(", "self", ".", "db_name", ",", "model_class", ".", "table_name", "(", ")", ")", ")", "return", "r", ".", "text", ".", "strip", "(", ")", "==", "'1'" ]
Checks whether a table for the given model class already exists. Note that this only checks for existence of a table with the expected name.
[ "Checks", "whether", "a", "table", "for", "the", "given", "model", "class", "already", "exists", ".", "Note", "that", "this", "only", "checks", "for", "existence", "of", "a", "table", "with", "the", "expected", "name", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/database.py#L154-L161
227,455
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/database.py
Database.insert
def insert(self, model_instances, batch_size=1000): ''' Insert records into the database. - `model_instances`: any iterable containing instances of a single model class. - `batch_size`: number of records to send per chunk (use a lower number if your records are very large). ''' from six import next from io import BytesIO i = iter(model_instances) try: first_instance = next(i) except StopIteration: return # model_instances is empty model_class = first_instance.__class__ if first_instance.is_read_only() or first_instance.is_system_model(): raise DatabaseException("You can't insert into read only and system tables") fields_list = ','.join( ['`%s`' % name for name in first_instance.fields(writable=True)]) def gen(): buf = BytesIO() query = 'INSERT INTO $table (%s) FORMAT TabSeparated\n' % fields_list buf.write(self._substitute(query, model_class).encode('utf-8')) first_instance.set_database(self) buf.write(first_instance.to_tsv(include_readonly=False).encode('utf-8')) buf.write('\n'.encode('utf-8')) # Collect lines in batches of batch_size lines = 2 for instance in i: instance.set_database(self) buf.write(instance.to_tsv(include_readonly=False).encode('utf-8')) buf.write('\n'.encode('utf-8')) lines += 1 if lines >= batch_size: # Return the current batch of lines yield buf.getvalue() # Start a new batch buf = BytesIO() lines = 0 # Return any remaining lines in partial batch if lines: yield buf.getvalue() self._send(gen())
python
def insert(self, model_instances, batch_size=1000): ''' Insert records into the database. - `model_instances`: any iterable containing instances of a single model class. - `batch_size`: number of records to send per chunk (use a lower number if your records are very large). ''' from six import next from io import BytesIO i = iter(model_instances) try: first_instance = next(i) except StopIteration: return # model_instances is empty model_class = first_instance.__class__ if first_instance.is_read_only() or first_instance.is_system_model(): raise DatabaseException("You can't insert into read only and system tables") fields_list = ','.join( ['`%s`' % name for name in first_instance.fields(writable=True)]) def gen(): buf = BytesIO() query = 'INSERT INTO $table (%s) FORMAT TabSeparated\n' % fields_list buf.write(self._substitute(query, model_class).encode('utf-8')) first_instance.set_database(self) buf.write(first_instance.to_tsv(include_readonly=False).encode('utf-8')) buf.write('\n'.encode('utf-8')) # Collect lines in batches of batch_size lines = 2 for instance in i: instance.set_database(self) buf.write(instance.to_tsv(include_readonly=False).encode('utf-8')) buf.write('\n'.encode('utf-8')) lines += 1 if lines >= batch_size: # Return the current batch of lines yield buf.getvalue() # Start a new batch buf = BytesIO() lines = 0 # Return any remaining lines in partial batch if lines: yield buf.getvalue() self._send(gen())
[ "def", "insert", "(", "self", ",", "model_instances", ",", "batch_size", "=", "1000", ")", ":", "from", "six", "import", "next", "from", "io", "import", "BytesIO", "i", "=", "iter", "(", "model_instances", ")", "try", ":", "first_instance", "=", "next", "(", "i", ")", "except", "StopIteration", ":", "return", "# model_instances is empty", "model_class", "=", "first_instance", ".", "__class__", "if", "first_instance", ".", "is_read_only", "(", ")", "or", "first_instance", ".", "is_system_model", "(", ")", ":", "raise", "DatabaseException", "(", "\"You can't insert into read only and system tables\"", ")", "fields_list", "=", "','", ".", "join", "(", "[", "'`%s`'", "%", "name", "for", "name", "in", "first_instance", ".", "fields", "(", "writable", "=", "True", ")", "]", ")", "def", "gen", "(", ")", ":", "buf", "=", "BytesIO", "(", ")", "query", "=", "'INSERT INTO $table (%s) FORMAT TabSeparated\\n'", "%", "fields_list", "buf", ".", "write", "(", "self", ".", "_substitute", "(", "query", ",", "model_class", ")", ".", "encode", "(", "'utf-8'", ")", ")", "first_instance", ".", "set_database", "(", "self", ")", "buf", ".", "write", "(", "first_instance", ".", "to_tsv", "(", "include_readonly", "=", "False", ")", ".", "encode", "(", "'utf-8'", ")", ")", "buf", ".", "write", "(", "'\\n'", ".", "encode", "(", "'utf-8'", ")", ")", "# Collect lines in batches of batch_size", "lines", "=", "2", "for", "instance", "in", "i", ":", "instance", ".", "set_database", "(", "self", ")", "buf", ".", "write", "(", "instance", ".", "to_tsv", "(", "include_readonly", "=", "False", ")", ".", "encode", "(", "'utf-8'", ")", ")", "buf", ".", "write", "(", "'\\n'", ".", "encode", "(", "'utf-8'", ")", ")", "lines", "+=", "1", "if", "lines", ">=", "batch_size", ":", "# Return the current batch of lines", "yield", "buf", ".", "getvalue", "(", ")", "# Start a new batch", "buf", "=", "BytesIO", "(", ")", "lines", "=", "0", "# Return any remaining lines in partial batch", "if", "lines", ":", "yield", "buf", ".", "getvalue", "(", ")", "self", ".", "_send", "(", "gen", "(", ")", ")" ]
Insert records into the database. - `model_instances`: any iterable containing instances of a single model class. - `batch_size`: number of records to send per chunk (use a lower number if your records are very large).
[ "Insert", "records", "into", "the", "database", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/database.py#L177-L222
227,456
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/database.py
Database.count
def count(self, model_class, conditions=None): ''' Counts the number of records in the model's table. - `model_class`: the model to count. - `conditions`: optional SQL conditions (contents of the WHERE clause). ''' query = 'SELECT count() FROM $table' if conditions: query += ' WHERE ' + conditions query = self._substitute(query, model_class) r = self._send(query) return int(r.text) if r.text else 0
python
def count(self, model_class, conditions=None): ''' Counts the number of records in the model's table. - `model_class`: the model to count. - `conditions`: optional SQL conditions (contents of the WHERE clause). ''' query = 'SELECT count() FROM $table' if conditions: query += ' WHERE ' + conditions query = self._substitute(query, model_class) r = self._send(query) return int(r.text) if r.text else 0
[ "def", "count", "(", "self", ",", "model_class", ",", "conditions", "=", "None", ")", ":", "query", "=", "'SELECT count() FROM $table'", "if", "conditions", ":", "query", "+=", "' WHERE '", "+", "conditions", "query", "=", "self", ".", "_substitute", "(", "query", ",", "model_class", ")", "r", "=", "self", ".", "_send", "(", "query", ")", "return", "int", "(", "r", ".", "text", ")", "if", "r", ".", "text", "else", "0" ]
Counts the number of records in the model's table. - `model_class`: the model to count. - `conditions`: optional SQL conditions (contents of the WHERE clause).
[ "Counts", "the", "number", "of", "records", "in", "the", "model", "s", "table", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/database.py#L224-L236
227,457
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/database.py
Database.select
def select(self, query, model_class=None, settings=None): ''' Performs a query and returns a generator of model instances. - `query`: the SQL query to execute. - `model_class`: the model class matching the query's table, or `None` for getting back instances of an ad-hoc model. - `settings`: query settings to send as HTTP GET parameters ''' query += ' FORMAT TabSeparatedWithNamesAndTypes' query = self._substitute(query, model_class) r = self._send(query, settings, True) lines = r.iter_lines() field_names = parse_tsv(next(lines)) field_types = parse_tsv(next(lines)) model_class = model_class or ModelBase.create_ad_hoc_model(zip(field_names, field_types)) for line in lines: # skip blank line left by WITH TOTALS modifier if line: yield model_class.from_tsv(line, field_names, self.server_timezone, self)
python
def select(self, query, model_class=None, settings=None): ''' Performs a query and returns a generator of model instances. - `query`: the SQL query to execute. - `model_class`: the model class matching the query's table, or `None` for getting back instances of an ad-hoc model. - `settings`: query settings to send as HTTP GET parameters ''' query += ' FORMAT TabSeparatedWithNamesAndTypes' query = self._substitute(query, model_class) r = self._send(query, settings, True) lines = r.iter_lines() field_names = parse_tsv(next(lines)) field_types = parse_tsv(next(lines)) model_class = model_class or ModelBase.create_ad_hoc_model(zip(field_names, field_types)) for line in lines: # skip blank line left by WITH TOTALS modifier if line: yield model_class.from_tsv(line, field_names, self.server_timezone, self)
[ "def", "select", "(", "self", ",", "query", ",", "model_class", "=", "None", ",", "settings", "=", "None", ")", ":", "query", "+=", "' FORMAT TabSeparatedWithNamesAndTypes'", "query", "=", "self", ".", "_substitute", "(", "query", ",", "model_class", ")", "r", "=", "self", ".", "_send", "(", "query", ",", "settings", ",", "True", ")", "lines", "=", "r", ".", "iter_lines", "(", ")", "field_names", "=", "parse_tsv", "(", "next", "(", "lines", ")", ")", "field_types", "=", "parse_tsv", "(", "next", "(", "lines", ")", ")", "model_class", "=", "model_class", "or", "ModelBase", ".", "create_ad_hoc_model", "(", "zip", "(", "field_names", ",", "field_types", ")", ")", "for", "line", "in", "lines", ":", "# skip blank line left by WITH TOTALS modifier", "if", "line", ":", "yield", "model_class", ".", "from_tsv", "(", "line", ",", "field_names", ",", "self", ".", "server_timezone", ",", "self", ")" ]
Performs a query and returns a generator of model instances. - `query`: the SQL query to execute. - `model_class`: the model class matching the query's table, or `None` for getting back instances of an ad-hoc model. - `settings`: query settings to send as HTTP GET parameters
[ "Performs", "a", "query", "and", "returns", "a", "generator", "of", "model", "instances", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/database.py#L238-L257
227,458
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/database.py
Database.raw
def raw(self, query, settings=None, stream=False): ''' Performs a query and returns its output as text. - `query`: the SQL query to execute. - `settings`: query settings to send as HTTP GET parameters - `stream`: if true, the HTTP response from ClickHouse will be streamed. ''' query = self._substitute(query, None) return self._send(query, settings=settings, stream=stream).text
python
def raw(self, query, settings=None, stream=False): ''' Performs a query and returns its output as text. - `query`: the SQL query to execute. - `settings`: query settings to send as HTTP GET parameters - `stream`: if true, the HTTP response from ClickHouse will be streamed. ''' query = self._substitute(query, None) return self._send(query, settings=settings, stream=stream).text
[ "def", "raw", "(", "self", ",", "query", ",", "settings", "=", "None", ",", "stream", "=", "False", ")", ":", "query", "=", "self", ".", "_substitute", "(", "query", ",", "None", ")", "return", "self", ".", "_send", "(", "query", ",", "settings", "=", "settings", ",", "stream", "=", "stream", ")", ".", "text" ]
Performs a query and returns its output as text. - `query`: the SQL query to execute. - `settings`: query settings to send as HTTP GET parameters - `stream`: if true, the HTTP response from ClickHouse will be streamed.
[ "Performs", "a", "query", "and", "returns", "its", "output", "as", "text", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/database.py#L259-L268
227,459
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/database.py
Database.paginate
def paginate(self, model_class, order_by, page_num=1, page_size=100, conditions=None, settings=None): ''' Selects records and returns a single page of model instances. - `model_class`: the model class matching the query's table, or `None` for getting back instances of an ad-hoc model. - `order_by`: columns to use for sorting the query (contents of the ORDER BY clause). - `page_num`: the page number (1-based), or -1 to get the last page. - `page_size`: number of records to return per page. - `conditions`: optional SQL conditions (contents of the WHERE clause). - `settings`: query settings to send as HTTP GET parameters The result is a namedtuple containing `objects` (list), `number_of_objects`, `pages_total`, `number` (of the current page), and `page_size`. ''' count = self.count(model_class, conditions) pages_total = int(ceil(count / float(page_size))) if page_num == -1: page_num = max(pages_total, 1) elif page_num < 1: raise ValueError('Invalid page number: %d' % page_num) offset = (page_num - 1) * page_size query = 'SELECT * FROM $table' if conditions: query += ' WHERE ' + conditions query += ' ORDER BY %s' % order_by query += ' LIMIT %d, %d' % (offset, page_size) query = self._substitute(query, model_class) return Page( objects=list(self.select(query, model_class, settings)) if count else [], number_of_objects=count, pages_total=pages_total, number=page_num, page_size=page_size )
python
def paginate(self, model_class, order_by, page_num=1, page_size=100, conditions=None, settings=None): ''' Selects records and returns a single page of model instances. - `model_class`: the model class matching the query's table, or `None` for getting back instances of an ad-hoc model. - `order_by`: columns to use for sorting the query (contents of the ORDER BY clause). - `page_num`: the page number (1-based), or -1 to get the last page. - `page_size`: number of records to return per page. - `conditions`: optional SQL conditions (contents of the WHERE clause). - `settings`: query settings to send as HTTP GET parameters The result is a namedtuple containing `objects` (list), `number_of_objects`, `pages_total`, `number` (of the current page), and `page_size`. ''' count = self.count(model_class, conditions) pages_total = int(ceil(count / float(page_size))) if page_num == -1: page_num = max(pages_total, 1) elif page_num < 1: raise ValueError('Invalid page number: %d' % page_num) offset = (page_num - 1) * page_size query = 'SELECT * FROM $table' if conditions: query += ' WHERE ' + conditions query += ' ORDER BY %s' % order_by query += ' LIMIT %d, %d' % (offset, page_size) query = self._substitute(query, model_class) return Page( objects=list(self.select(query, model_class, settings)) if count else [], number_of_objects=count, pages_total=pages_total, number=page_num, page_size=page_size )
[ "def", "paginate", "(", "self", ",", "model_class", ",", "order_by", ",", "page_num", "=", "1", ",", "page_size", "=", "100", ",", "conditions", "=", "None", ",", "settings", "=", "None", ")", ":", "count", "=", "self", ".", "count", "(", "model_class", ",", "conditions", ")", "pages_total", "=", "int", "(", "ceil", "(", "count", "/", "float", "(", "page_size", ")", ")", ")", "if", "page_num", "==", "-", "1", ":", "page_num", "=", "max", "(", "pages_total", ",", "1", ")", "elif", "page_num", "<", "1", ":", "raise", "ValueError", "(", "'Invalid page number: %d'", "%", "page_num", ")", "offset", "=", "(", "page_num", "-", "1", ")", "*", "page_size", "query", "=", "'SELECT * FROM $table'", "if", "conditions", ":", "query", "+=", "' WHERE '", "+", "conditions", "query", "+=", "' ORDER BY %s'", "%", "order_by", "query", "+=", "' LIMIT %d, %d'", "%", "(", "offset", ",", "page_size", ")", "query", "=", "self", ".", "_substitute", "(", "query", ",", "model_class", ")", "return", "Page", "(", "objects", "=", "list", "(", "self", ".", "select", "(", "query", ",", "model_class", ",", "settings", ")", ")", "if", "count", "else", "[", "]", ",", "number_of_objects", "=", "count", ",", "pages_total", "=", "pages_total", ",", "number", "=", "page_num", ",", "page_size", "=", "page_size", ")" ]
Selects records and returns a single page of model instances. - `model_class`: the model class matching the query's table, or `None` for getting back instances of an ad-hoc model. - `order_by`: columns to use for sorting the query (contents of the ORDER BY clause). - `page_num`: the page number (1-based), or -1 to get the last page. - `page_size`: number of records to return per page. - `conditions`: optional SQL conditions (contents of the WHERE clause). - `settings`: query settings to send as HTTP GET parameters The result is a namedtuple containing `objects` (list), `number_of_objects`, `pages_total`, `number` (of the current page), and `page_size`.
[ "Selects", "records", "and", "returns", "a", "single", "page", "of", "model", "instances", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/database.py#L270-L304
227,460
Infinidat/infi.clickhouse_orm
src/infi/clickhouse_orm/database.py
Database.migrate
def migrate(self, migrations_package_name, up_to=9999): ''' Executes schema migrations. - `migrations_package_name` - fully qualified name of the Python package containing the migrations. - `up_to` - number of the last migration to apply. ''' from .migrations import MigrationHistory logger = logging.getLogger('migrations') applied_migrations = self._get_applied_migrations(migrations_package_name) modules = import_submodules(migrations_package_name) unapplied_migrations = set(modules.keys()) - applied_migrations for name in sorted(unapplied_migrations): logger.info('Applying migration %s...', name) for operation in modules[name].operations: operation.apply(self) self.insert([MigrationHistory(package_name=migrations_package_name, module_name=name, applied=datetime.date.today())]) if int(name[:4]) >= up_to: break
python
def migrate(self, migrations_package_name, up_to=9999): ''' Executes schema migrations. - `migrations_package_name` - fully qualified name of the Python package containing the migrations. - `up_to` - number of the last migration to apply. ''' from .migrations import MigrationHistory logger = logging.getLogger('migrations') applied_migrations = self._get_applied_migrations(migrations_package_name) modules = import_submodules(migrations_package_name) unapplied_migrations = set(modules.keys()) - applied_migrations for name in sorted(unapplied_migrations): logger.info('Applying migration %s...', name) for operation in modules[name].operations: operation.apply(self) self.insert([MigrationHistory(package_name=migrations_package_name, module_name=name, applied=datetime.date.today())]) if int(name[:4]) >= up_to: break
[ "def", "migrate", "(", "self", ",", "migrations_package_name", ",", "up_to", "=", "9999", ")", ":", "from", ".", "migrations", "import", "MigrationHistory", "logger", "=", "logging", ".", "getLogger", "(", "'migrations'", ")", "applied_migrations", "=", "self", ".", "_get_applied_migrations", "(", "migrations_package_name", ")", "modules", "=", "import_submodules", "(", "migrations_package_name", ")", "unapplied_migrations", "=", "set", "(", "modules", ".", "keys", "(", ")", ")", "-", "applied_migrations", "for", "name", "in", "sorted", "(", "unapplied_migrations", ")", ":", "logger", ".", "info", "(", "'Applying migration %s...'", ",", "name", ")", "for", "operation", "in", "modules", "[", "name", "]", ".", "operations", ":", "operation", ".", "apply", "(", "self", ")", "self", ".", "insert", "(", "[", "MigrationHistory", "(", "package_name", "=", "migrations_package_name", ",", "module_name", "=", "name", ",", "applied", "=", "datetime", ".", "date", ".", "today", "(", ")", ")", "]", ")", "if", "int", "(", "name", "[", ":", "4", "]", ")", ">=", "up_to", ":", "break" ]
Executes schema migrations. - `migrations_package_name` - fully qualified name of the Python package containing the migrations. - `up_to` - number of the last migration to apply.
[ "Executes", "schema", "migrations", "." ]
595f2023e334e3925a5c3fbfdd6083a5992a7169
https://github.com/Infinidat/infi.clickhouse_orm/blob/595f2023e334e3925a5c3fbfdd6083a5992a7169/src/infi/clickhouse_orm/database.py#L306-L325
227,461
pyusb/pyusb
usb/core.py
_try_get_string
def _try_get_string(dev, index, langid = None, default_str_i0 = "", default_access_error = "Error Accessing String"): """ try to get a string, but return a string no matter what """ if index == 0 : string = default_str_i0 else: try: if langid is None: string = util.get_string(dev, index) else: string = util.get_string(dev, index, langid) except : string = default_access_error return string
python
def _try_get_string(dev, index, langid = None, default_str_i0 = "", default_access_error = "Error Accessing String"): """ try to get a string, but return a string no matter what """ if index == 0 : string = default_str_i0 else: try: if langid is None: string = util.get_string(dev, index) else: string = util.get_string(dev, index, langid) except : string = default_access_error return string
[ "def", "_try_get_string", "(", "dev", ",", "index", ",", "langid", "=", "None", ",", "default_str_i0", "=", "\"\"", ",", "default_access_error", "=", "\"Error Accessing String\"", ")", ":", "if", "index", "==", "0", ":", "string", "=", "default_str_i0", "else", ":", "try", ":", "if", "langid", "is", "None", ":", "string", "=", "util", ".", "get_string", "(", "dev", ",", "index", ")", "else", ":", "string", "=", "util", ".", "get_string", "(", "dev", ",", "index", ",", "langid", ")", "except", ":", "string", "=", "default_access_error", "return", "string" ]
try to get a string, but return a string no matter what
[ "try", "to", "get", "a", "string", "but", "return", "a", "string", "no", "matter", "what" ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/core.py#L52-L66
227,462
pyusb/pyusb
usb/core.py
_try_lookup
def _try_lookup(table, value, default = ""): """ try to get a string from the lookup table, return "" instead of key error """ try: string = table[ value ] except KeyError: string = default return string
python
def _try_lookup(table, value, default = ""): """ try to get a string from the lookup table, return "" instead of key error """ try: string = table[ value ] except KeyError: string = default return string
[ "def", "_try_lookup", "(", "table", ",", "value", ",", "default", "=", "\"\"", ")", ":", "try", ":", "string", "=", "table", "[", "value", "]", "except", "KeyError", ":", "string", "=", "default", "return", "string" ]
try to get a string from the lookup table, return "" instead of key error
[ "try", "to", "get", "a", "string", "from", "the", "lookup", "table", "return", "instead", "of", "key", "error" ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/core.py#L68-L76
227,463
pyusb/pyusb
usb/core.py
find
def find(find_all=False, backend = None, custom_match = None, **args): r"""Find an USB device and return it. find() is the function used to discover USB devices. You can pass as arguments any combination of the USB Device Descriptor fields to match a device. For example: find(idVendor=0x3f4, idProduct=0x2009) will return the Device object for the device with idVendor field equals to 0x3f4 and idProduct equals to 0x2009. If there is more than one device which matchs the criteria, the first one found will be returned. If a matching device cannot be found the function returns None. If you want to get all devices, you can set the parameter find_all to True, then find will return an iterator with all matched devices. If no matching device is found, it will return an empty iterator. Example: for printer in find(find_all=True, bDeviceClass=7): print (printer) This call will get all the USB printers connected to the system. (actually may be not, because some devices put their class information in the Interface Descriptor). You can also use a customized match criteria: dev = find(custom_match = lambda d: d.idProduct=0x3f4 and d.idvendor=0x2009) A more accurate printer finder using a customized match would be like so: def is_printer(dev): import usb.util if dev.bDeviceClass == 7: return True for cfg in dev: if usb.util.find_descriptor(cfg, bInterfaceClass=7) is not None: return True for printer in find(find_all=True, custom_match = is_printer): print (printer) Now even if the device class code is in the interface descriptor the printer will be found. You can combine a customized match with device descriptor fields. In this case, the fields must match and the custom_match must return True. In the our previous example, if we would like to get all printers belonging to the manufacturer 0x3f4, the code would be like so: printers = list(find(find_all=True, idVendor=0x3f4, custom_match=is_printer)) If you want to use find as a 'list all devices' function, just call it with find_all = True: devices = list(find(find_all=True)) Finally, you can pass a custom backend to the find function: find(backend = MyBackend()) PyUSB has builtin backends for libusb 0.1, libusb 1.0 and OpenUSB. If you do not supply a backend explicitly, find() function will select one of the predefineds backends according to system availability. Backends are explained in the usb.backend module. """ def device_iter(**kwargs): for dev in backend.enumerate_devices(): d = Device(dev, backend) tests = (val == getattr(d, key) for key, val in kwargs.items()) if _interop._all(tests) and (custom_match is None or custom_match(d)): yield d if backend is None: import usb.backend.libusb1 as libusb1 import usb.backend.libusb0 as libusb0 import usb.backend.openusb as openusb for m in (libusb1, openusb, libusb0): backend = m.get_backend() if backend is not None: _logger.info('find(): using backend "%s"', m.__name__) break else: raise NoBackendError('No backend available') if find_all: return device_iter(**args) else: try: return _interop._next(device_iter(**args)) except StopIteration: return None
python
def find(find_all=False, backend = None, custom_match = None, **args): r"""Find an USB device and return it. find() is the function used to discover USB devices. You can pass as arguments any combination of the USB Device Descriptor fields to match a device. For example: find(idVendor=0x3f4, idProduct=0x2009) will return the Device object for the device with idVendor field equals to 0x3f4 and idProduct equals to 0x2009. If there is more than one device which matchs the criteria, the first one found will be returned. If a matching device cannot be found the function returns None. If you want to get all devices, you can set the parameter find_all to True, then find will return an iterator with all matched devices. If no matching device is found, it will return an empty iterator. Example: for printer in find(find_all=True, bDeviceClass=7): print (printer) This call will get all the USB printers connected to the system. (actually may be not, because some devices put their class information in the Interface Descriptor). You can also use a customized match criteria: dev = find(custom_match = lambda d: d.idProduct=0x3f4 and d.idvendor=0x2009) A more accurate printer finder using a customized match would be like so: def is_printer(dev): import usb.util if dev.bDeviceClass == 7: return True for cfg in dev: if usb.util.find_descriptor(cfg, bInterfaceClass=7) is not None: return True for printer in find(find_all=True, custom_match = is_printer): print (printer) Now even if the device class code is in the interface descriptor the printer will be found. You can combine a customized match with device descriptor fields. In this case, the fields must match and the custom_match must return True. In the our previous example, if we would like to get all printers belonging to the manufacturer 0x3f4, the code would be like so: printers = list(find(find_all=True, idVendor=0x3f4, custom_match=is_printer)) If you want to use find as a 'list all devices' function, just call it with find_all = True: devices = list(find(find_all=True)) Finally, you can pass a custom backend to the find function: find(backend = MyBackend()) PyUSB has builtin backends for libusb 0.1, libusb 1.0 and OpenUSB. If you do not supply a backend explicitly, find() function will select one of the predefineds backends according to system availability. Backends are explained in the usb.backend module. """ def device_iter(**kwargs): for dev in backend.enumerate_devices(): d = Device(dev, backend) tests = (val == getattr(d, key) for key, val in kwargs.items()) if _interop._all(tests) and (custom_match is None or custom_match(d)): yield d if backend is None: import usb.backend.libusb1 as libusb1 import usb.backend.libusb0 as libusb0 import usb.backend.openusb as openusb for m in (libusb1, openusb, libusb0): backend = m.get_backend() if backend is not None: _logger.info('find(): using backend "%s"', m.__name__) break else: raise NoBackendError('No backend available') if find_all: return device_iter(**args) else: try: return _interop._next(device_iter(**args)) except StopIteration: return None
[ "def", "find", "(", "find_all", "=", "False", ",", "backend", "=", "None", ",", "custom_match", "=", "None", ",", "*", "*", "args", ")", ":", "def", "device_iter", "(", "*", "*", "kwargs", ")", ":", "for", "dev", "in", "backend", ".", "enumerate_devices", "(", ")", ":", "d", "=", "Device", "(", "dev", ",", "backend", ")", "tests", "=", "(", "val", "==", "getattr", "(", "d", ",", "key", ")", "for", "key", ",", "val", "in", "kwargs", ".", "items", "(", ")", ")", "if", "_interop", ".", "_all", "(", "tests", ")", "and", "(", "custom_match", "is", "None", "or", "custom_match", "(", "d", ")", ")", ":", "yield", "d", "if", "backend", "is", "None", ":", "import", "usb", ".", "backend", ".", "libusb1", "as", "libusb1", "import", "usb", ".", "backend", ".", "libusb0", "as", "libusb0", "import", "usb", ".", "backend", ".", "openusb", "as", "openusb", "for", "m", "in", "(", "libusb1", ",", "openusb", ",", "libusb0", ")", ":", "backend", "=", "m", ".", "get_backend", "(", ")", "if", "backend", "is", "not", "None", ":", "_logger", ".", "info", "(", "'find(): using backend \"%s\"'", ",", "m", ".", "__name__", ")", "break", "else", ":", "raise", "NoBackendError", "(", "'No backend available'", ")", "if", "find_all", ":", "return", "device_iter", "(", "*", "*", "args", ")", "else", ":", "try", ":", "return", "_interop", ".", "_next", "(", "device_iter", "(", "*", "*", "args", ")", ")", "except", "StopIteration", ":", "return", "None" ]
r"""Find an USB device and return it. find() is the function used to discover USB devices. You can pass as arguments any combination of the USB Device Descriptor fields to match a device. For example: find(idVendor=0x3f4, idProduct=0x2009) will return the Device object for the device with idVendor field equals to 0x3f4 and idProduct equals to 0x2009. If there is more than one device which matchs the criteria, the first one found will be returned. If a matching device cannot be found the function returns None. If you want to get all devices, you can set the parameter find_all to True, then find will return an iterator with all matched devices. If no matching device is found, it will return an empty iterator. Example: for printer in find(find_all=True, bDeviceClass=7): print (printer) This call will get all the USB printers connected to the system. (actually may be not, because some devices put their class information in the Interface Descriptor). You can also use a customized match criteria: dev = find(custom_match = lambda d: d.idProduct=0x3f4 and d.idvendor=0x2009) A more accurate printer finder using a customized match would be like so: def is_printer(dev): import usb.util if dev.bDeviceClass == 7: return True for cfg in dev: if usb.util.find_descriptor(cfg, bInterfaceClass=7) is not None: return True for printer in find(find_all=True, custom_match = is_printer): print (printer) Now even if the device class code is in the interface descriptor the printer will be found. You can combine a customized match with device descriptor fields. In this case, the fields must match and the custom_match must return True. In the our previous example, if we would like to get all printers belonging to the manufacturer 0x3f4, the code would be like so: printers = list(find(find_all=True, idVendor=0x3f4, custom_match=is_printer)) If you want to use find as a 'list all devices' function, just call it with find_all = True: devices = list(find(find_all=True)) Finally, you can pass a custom backend to the find function: find(backend = MyBackend()) PyUSB has builtin backends for libusb 0.1, libusb 1.0 and OpenUSB. If you do not supply a backend explicitly, find() function will select one of the predefineds backends according to system availability. Backends are explained in the usb.backend module.
[ "r", "Find", "an", "USB", "device", "and", "return", "it", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/core.py#L1179-L1273
227,464
pyusb/pyusb
usb/core.py
show_devices
def show_devices(verbose=False, **kwargs): """Show information about connected devices. The verbose flag sets to verbose or not. **kwargs are passed directly to the find() function. """ kwargs["find_all"] = True devices = find(**kwargs) strings = "" for device in devices: if not verbose: strings += "%s, %s\n" % (device._str(), _try_lookup( _lu.device_classes, device.bDeviceClass)) else: strings += "%s\n\n" % str(device) return _DescriptorInfo(strings)
python
def show_devices(verbose=False, **kwargs): """Show information about connected devices. The verbose flag sets to verbose or not. **kwargs are passed directly to the find() function. """ kwargs["find_all"] = True devices = find(**kwargs) strings = "" for device in devices: if not verbose: strings += "%s, %s\n" % (device._str(), _try_lookup( _lu.device_classes, device.bDeviceClass)) else: strings += "%s\n\n" % str(device) return _DescriptorInfo(strings)
[ "def", "show_devices", "(", "verbose", "=", "False", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "\"find_all\"", "]", "=", "True", "devices", "=", "find", "(", "*", "*", "kwargs", ")", "strings", "=", "\"\"", "for", "device", "in", "devices", ":", "if", "not", "verbose", ":", "strings", "+=", "\"%s, %s\\n\"", "%", "(", "device", ".", "_str", "(", ")", ",", "_try_lookup", "(", "_lu", ".", "device_classes", ",", "device", ".", "bDeviceClass", ")", ")", "else", ":", "strings", "+=", "\"%s\\n\\n\"", "%", "str", "(", "device", ")", "return", "_DescriptorInfo", "(", "strings", ")" ]
Show information about connected devices. The verbose flag sets to verbose or not. **kwargs are passed directly to the find() function.
[ "Show", "information", "about", "connected", "devices", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/core.py#L1275-L1291
227,465
pyusb/pyusb
usb/core.py
Device.langids
def langids(self): """ Return the USB device's supported language ID codes. These are 16-bit codes familiar to Windows developers, where for example instead of en-US you say 0x0409. USB_LANGIDS.pdf on the usb.org developer site for more info. String requests using a LANGID not in this array should not be sent to the device. This property will cause some USB traffic the first time it is accessed and cache the resulting value for future use. """ if self._langids is None: try: self._langids = util.get_langids(self) except USBError: self._langids = () return self._langids
python
def langids(self): """ Return the USB device's supported language ID codes. These are 16-bit codes familiar to Windows developers, where for example instead of en-US you say 0x0409. USB_LANGIDS.pdf on the usb.org developer site for more info. String requests using a LANGID not in this array should not be sent to the device. This property will cause some USB traffic the first time it is accessed and cache the resulting value for future use. """ if self._langids is None: try: self._langids = util.get_langids(self) except USBError: self._langids = () return self._langids
[ "def", "langids", "(", "self", ")", ":", "if", "self", ".", "_langids", "is", "None", ":", "try", ":", "self", ".", "_langids", "=", "util", ".", "get_langids", "(", "self", ")", "except", "USBError", ":", "self", ".", "_langids", "=", "(", ")", "return", "self", ".", "_langids" ]
Return the USB device's supported language ID codes. These are 16-bit codes familiar to Windows developers, where for example instead of en-US you say 0x0409. USB_LANGIDS.pdf on the usb.org developer site for more info. String requests using a LANGID not in this array should not be sent to the device. This property will cause some USB traffic the first time it is accessed and cache the resulting value for future use.
[ "Return", "the", "USB", "device", "s", "supported", "language", "ID", "codes", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/core.py#L794-L810
227,466
pyusb/pyusb
usb/core.py
Device.serial_number
def serial_number(self): """ Return the USB device's serial number string descriptor. This property will cause some USB traffic the first time it is accessed and cache the resulting value for future use. """ if self._serial_number is None: self._serial_number = util.get_string(self, self.iSerialNumber) return self._serial_number
python
def serial_number(self): """ Return the USB device's serial number string descriptor. This property will cause some USB traffic the first time it is accessed and cache the resulting value for future use. """ if self._serial_number is None: self._serial_number = util.get_string(self, self.iSerialNumber) return self._serial_number
[ "def", "serial_number", "(", "self", ")", ":", "if", "self", ".", "_serial_number", "is", "None", ":", "self", ".", "_serial_number", "=", "util", ".", "get_string", "(", "self", ",", "self", ".", "iSerialNumber", ")", "return", "self", ".", "_serial_number" ]
Return the USB device's serial number string descriptor. This property will cause some USB traffic the first time it is accessed and cache the resulting value for future use.
[ "Return", "the", "USB", "device", "s", "serial", "number", "string", "descriptor", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/core.py#L813-L821
227,467
pyusb/pyusb
usb/core.py
Device.product
def product(self): """ Return the USB device's product string descriptor. This property will cause some USB traffic the first time it is accessed and cache the resulting value for future use. """ if self._product is None: self._product = util.get_string(self, self.iProduct) return self._product
python
def product(self): """ Return the USB device's product string descriptor. This property will cause some USB traffic the first time it is accessed and cache the resulting value for future use. """ if self._product is None: self._product = util.get_string(self, self.iProduct) return self._product
[ "def", "product", "(", "self", ")", ":", "if", "self", ".", "_product", "is", "None", ":", "self", ".", "_product", "=", "util", ".", "get_string", "(", "self", ",", "self", ".", "iProduct", ")", "return", "self", ".", "_product" ]
Return the USB device's product string descriptor. This property will cause some USB traffic the first time it is accessed and cache the resulting value for future use.
[ "Return", "the", "USB", "device", "s", "product", "string", "descriptor", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/core.py#L824-L832
227,468
pyusb/pyusb
usb/core.py
Device.parent
def parent(self): """ Return the parent device. """ if self._has_parent is None: _parent = self._ctx.backend.get_parent(self._ctx.dev) self._has_parent = _parent is not None if self._has_parent: self._parent = Device(_parent, self._ctx.backend) else: self._parent = None return self._parent
python
def parent(self): """ Return the parent device. """ if self._has_parent is None: _parent = self._ctx.backend.get_parent(self._ctx.dev) self._has_parent = _parent is not None if self._has_parent: self._parent = Device(_parent, self._ctx.backend) else: self._parent = None return self._parent
[ "def", "parent", "(", "self", ")", ":", "if", "self", ".", "_has_parent", "is", "None", ":", "_parent", "=", "self", ".", "_ctx", ".", "backend", ".", "get_parent", "(", "self", ".", "_ctx", ".", "dev", ")", "self", ".", "_has_parent", "=", "_parent", "is", "not", "None", "if", "self", ".", "_has_parent", ":", "self", ".", "_parent", "=", "Device", "(", "_parent", ",", "self", ".", "_ctx", ".", "backend", ")", "else", ":", "self", ".", "_parent", "=", "None", "return", "self", ".", "_parent" ]
Return the parent device.
[ "Return", "the", "parent", "device", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/core.py#L835-L844
227,469
pyusb/pyusb
usb/core.py
Device.manufacturer
def manufacturer(self): """ Return the USB device's manufacturer string descriptor. This property will cause some USB traffic the first time it is accessed and cache the resulting value for future use. """ if self._manufacturer is None: self._manufacturer = util.get_string(self, self.iManufacturer) return self._manufacturer
python
def manufacturer(self): """ Return the USB device's manufacturer string descriptor. This property will cause some USB traffic the first time it is accessed and cache the resulting value for future use. """ if self._manufacturer is None: self._manufacturer = util.get_string(self, self.iManufacturer) return self._manufacturer
[ "def", "manufacturer", "(", "self", ")", ":", "if", "self", ".", "_manufacturer", "is", "None", ":", "self", ".", "_manufacturer", "=", "util", ".", "get_string", "(", "self", ",", "self", ".", "iManufacturer", ")", "return", "self", ".", "_manufacturer" ]
Return the USB device's manufacturer string descriptor. This property will cause some USB traffic the first time it is accessed and cache the resulting value for future use.
[ "Return", "the", "USB", "device", "s", "manufacturer", "string", "descriptor", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/core.py#L847-L855
227,470
pyusb/pyusb
usb/core.py
Device.set_interface_altsetting
def set_interface_altsetting(self, interface = None, alternate_setting = None): r"""Set the alternate setting for an interface. When you want to use an interface and it has more than one alternate setting, you should call this method to select the appropriate alternate setting. If you call the method without one or the two parameters, it will be selected the first one found in the Device in the same way of the set_configuration method. Commonly, an interface has only one alternate setting and this call is not necessary. For most devices, either it has more than one alternate setting or not, it is not harmful to make a call to this method with no arguments, as devices will silently ignore the request when there is only one alternate setting, though the USB Spec allows devices with no additional alternate setting return an error to the Host in response to a SET_INTERFACE request. If you are in doubt, you may want to call it with no arguments wrapped by a try/except clause: >>> try: >>> dev.set_interface_altsetting() >>> except usb.core.USBError: >>> pass """ self._ctx.managed_set_interface(self, interface, alternate_setting)
python
def set_interface_altsetting(self, interface = None, alternate_setting = None): r"""Set the alternate setting for an interface. When you want to use an interface and it has more than one alternate setting, you should call this method to select the appropriate alternate setting. If you call the method without one or the two parameters, it will be selected the first one found in the Device in the same way of the set_configuration method. Commonly, an interface has only one alternate setting and this call is not necessary. For most devices, either it has more than one alternate setting or not, it is not harmful to make a call to this method with no arguments, as devices will silently ignore the request when there is only one alternate setting, though the USB Spec allows devices with no additional alternate setting return an error to the Host in response to a SET_INTERFACE request. If you are in doubt, you may want to call it with no arguments wrapped by a try/except clause: >>> try: >>> dev.set_interface_altsetting() >>> except usb.core.USBError: >>> pass """ self._ctx.managed_set_interface(self, interface, alternate_setting)
[ "def", "set_interface_altsetting", "(", "self", ",", "interface", "=", "None", ",", "alternate_setting", "=", "None", ")", ":", "self", ".", "_ctx", ".", "managed_set_interface", "(", "self", ",", "interface", ",", "alternate_setting", ")" ]
r"""Set the alternate setting for an interface. When you want to use an interface and it has more than one alternate setting, you should call this method to select the appropriate alternate setting. If you call the method without one or the two parameters, it will be selected the first one found in the Device in the same way of the set_configuration method. Commonly, an interface has only one alternate setting and this call is not necessary. For most devices, either it has more than one alternate setting or not, it is not harmful to make a call to this method with no arguments, as devices will silently ignore the request when there is only one alternate setting, though the USB Spec allows devices with no additional alternate setting return an error to the Host in response to a SET_INTERFACE request. If you are in doubt, you may want to call it with no arguments wrapped by a try/except clause: >>> try: >>> dev.set_interface_altsetting() >>> except usb.core.USBError: >>> pass
[ "r", "Set", "the", "alternate", "setting", "for", "an", "interface", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/core.py#L879-L904
227,471
pyusb/pyusb
usb/core.py
Device.reset
def reset(self): r"""Reset the device.""" self._ctx.managed_open() self._ctx.dispose(self, False) self._ctx.backend.reset_device(self._ctx.handle) self._ctx.dispose(self, True)
python
def reset(self): r"""Reset the device.""" self._ctx.managed_open() self._ctx.dispose(self, False) self._ctx.backend.reset_device(self._ctx.handle) self._ctx.dispose(self, True)
[ "def", "reset", "(", "self", ")", ":", "self", ".", "_ctx", ".", "managed_open", "(", ")", "self", ".", "_ctx", ".", "dispose", "(", "self", ",", "False", ")", "self", ".", "_ctx", ".", "backend", ".", "reset_device", "(", "self", ".", "_ctx", ".", "handle", ")", "self", ".", "_ctx", ".", "dispose", "(", "self", ",", "True", ")" ]
r"""Reset the device.
[ "r", "Reset", "the", "device", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/core.py#L913-L918
227,472
pyusb/pyusb
usb/core.py
Device.ctrl_transfer
def ctrl_transfer(self, bmRequestType, bRequest, wValue=0, wIndex=0, data_or_wLength = None, timeout = None): r"""Do a control transfer on the endpoint 0. This method is used to issue a control transfer over the endpoint 0 (endpoint 0 is required to always be a control endpoint). The parameters bmRequestType, bRequest, wValue and wIndex are the same of the USB Standard Control Request format. Control requests may or may not have a data payload to write/read. In cases which it has, the direction bit of the bmRequestType field is used to infer the desired request direction. For host to device requests (OUT), data_or_wLength parameter is the data payload to send, and it must be a sequence type convertible to an array object. In this case, the return value is the number of bytes written in the data payload. For device to host requests (IN), data_or_wLength is either the wLength parameter of the control request specifying the number of bytes to read in data payload, and the return value is an array object with data read, or an array object which the data will be read to, and the return value is the number of bytes read. """ try: buff = util.create_buffer(data_or_wLength) except TypeError: buff = _interop.as_array(data_or_wLength) self._ctx.managed_open() # Thanks to Johannes Stezenbach to point me out that we need to # claim the recipient interface recipient = bmRequestType & 3 rqtype = bmRequestType & (3 << 5) if recipient == util.CTRL_RECIPIENT_INTERFACE \ and rqtype != util.CTRL_TYPE_VENDOR: interface_number = wIndex & 0xff self._ctx.managed_claim_interface(self, interface_number) ret = self._ctx.backend.ctrl_transfer( self._ctx.handle, bmRequestType, bRequest, wValue, wIndex, buff, self.__get_timeout(timeout)) if isinstance(data_or_wLength, array.array) \ or util.ctrl_direction(bmRequestType) == util.CTRL_OUT: return ret elif ret != len(buff) * buff.itemsize: return buff[:ret] else: return buff
python
def ctrl_transfer(self, bmRequestType, bRequest, wValue=0, wIndex=0, data_or_wLength = None, timeout = None): r"""Do a control transfer on the endpoint 0. This method is used to issue a control transfer over the endpoint 0 (endpoint 0 is required to always be a control endpoint). The parameters bmRequestType, bRequest, wValue and wIndex are the same of the USB Standard Control Request format. Control requests may or may not have a data payload to write/read. In cases which it has, the direction bit of the bmRequestType field is used to infer the desired request direction. For host to device requests (OUT), data_or_wLength parameter is the data payload to send, and it must be a sequence type convertible to an array object. In this case, the return value is the number of bytes written in the data payload. For device to host requests (IN), data_or_wLength is either the wLength parameter of the control request specifying the number of bytes to read in data payload, and the return value is an array object with data read, or an array object which the data will be read to, and the return value is the number of bytes read. """ try: buff = util.create_buffer(data_or_wLength) except TypeError: buff = _interop.as_array(data_or_wLength) self._ctx.managed_open() # Thanks to Johannes Stezenbach to point me out that we need to # claim the recipient interface recipient = bmRequestType & 3 rqtype = bmRequestType & (3 << 5) if recipient == util.CTRL_RECIPIENT_INTERFACE \ and rqtype != util.CTRL_TYPE_VENDOR: interface_number = wIndex & 0xff self._ctx.managed_claim_interface(self, interface_number) ret = self._ctx.backend.ctrl_transfer( self._ctx.handle, bmRequestType, bRequest, wValue, wIndex, buff, self.__get_timeout(timeout)) if isinstance(data_or_wLength, array.array) \ or util.ctrl_direction(bmRequestType) == util.CTRL_OUT: return ret elif ret != len(buff) * buff.itemsize: return buff[:ret] else: return buff
[ "def", "ctrl_transfer", "(", "self", ",", "bmRequestType", ",", "bRequest", ",", "wValue", "=", "0", ",", "wIndex", "=", "0", ",", "data_or_wLength", "=", "None", ",", "timeout", "=", "None", ")", ":", "try", ":", "buff", "=", "util", ".", "create_buffer", "(", "data_or_wLength", ")", "except", "TypeError", ":", "buff", "=", "_interop", ".", "as_array", "(", "data_or_wLength", ")", "self", ".", "_ctx", ".", "managed_open", "(", ")", "# Thanks to Johannes Stezenbach to point me out that we need to", "# claim the recipient interface", "recipient", "=", "bmRequestType", "&", "3", "rqtype", "=", "bmRequestType", "&", "(", "3", "<<", "5", ")", "if", "recipient", "==", "util", ".", "CTRL_RECIPIENT_INTERFACE", "and", "rqtype", "!=", "util", ".", "CTRL_TYPE_VENDOR", ":", "interface_number", "=", "wIndex", "&", "0xff", "self", ".", "_ctx", ".", "managed_claim_interface", "(", "self", ",", "interface_number", ")", "ret", "=", "self", ".", "_ctx", ".", "backend", ".", "ctrl_transfer", "(", "self", ".", "_ctx", ".", "handle", ",", "bmRequestType", ",", "bRequest", ",", "wValue", ",", "wIndex", ",", "buff", ",", "self", ".", "__get_timeout", "(", "timeout", ")", ")", "if", "isinstance", "(", "data_or_wLength", ",", "array", ".", "array", ")", "or", "util", ".", "ctrl_direction", "(", "bmRequestType", ")", "==", "util", ".", "CTRL_OUT", ":", "return", "ret", "elif", "ret", "!=", "len", "(", "buff", ")", "*", "buff", ".", "itemsize", ":", "return", "buff", "[", ":", "ret", "]", "else", ":", "return", "buff" ]
r"""Do a control transfer on the endpoint 0. This method is used to issue a control transfer over the endpoint 0 (endpoint 0 is required to always be a control endpoint). The parameters bmRequestType, bRequest, wValue and wIndex are the same of the USB Standard Control Request format. Control requests may or may not have a data payload to write/read. In cases which it has, the direction bit of the bmRequestType field is used to infer the desired request direction. For host to device requests (OUT), data_or_wLength parameter is the data payload to send, and it must be a sequence type convertible to an array object. In this case, the return value is the number of bytes written in the data payload. For device to host requests (IN), data_or_wLength is either the wLength parameter of the control request specifying the number of bytes to read in data payload, and the return value is an array object with data read, or an array object which the data will be read to, and the return value is the number of bytes read.
[ "r", "Do", "a", "control", "transfer", "on", "the", "endpoint", "0", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/core.py#L999-L1053
227,473
pyusb/pyusb
usb/core.py
Device.is_kernel_driver_active
def is_kernel_driver_active(self, interface): r"""Determine if there is kernel driver associated with the interface. If a kernel driver is active, the object will be unable to perform I/O. The interface parameter is the device interface number to check. """ self._ctx.managed_open() return self._ctx.backend.is_kernel_driver_active( self._ctx.handle, interface)
python
def is_kernel_driver_active(self, interface): r"""Determine if there is kernel driver associated with the interface. If a kernel driver is active, the object will be unable to perform I/O. The interface parameter is the device interface number to check. """ self._ctx.managed_open() return self._ctx.backend.is_kernel_driver_active( self._ctx.handle, interface)
[ "def", "is_kernel_driver_active", "(", "self", ",", "interface", ")", ":", "self", ".", "_ctx", ".", "managed_open", "(", ")", "return", "self", ".", "_ctx", ".", "backend", ".", "is_kernel_driver_active", "(", "self", ".", "_ctx", ".", "handle", ",", "interface", ")" ]
r"""Determine if there is kernel driver associated with the interface. If a kernel driver is active, the object will be unable to perform I/O. The interface parameter is the device interface number to check.
[ "r", "Determine", "if", "there", "is", "kernel", "driver", "associated", "with", "the", "interface", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/core.py#L1055-L1066
227,474
pyusb/pyusb
usb/core.py
Device.detach_kernel_driver
def detach_kernel_driver(self, interface): r"""Detach a kernel driver. If successful, you will then be able to perform I/O. The interface parameter is the device interface number to detach the driver from. """ self._ctx.managed_open() self._ctx.backend.detach_kernel_driver( self._ctx.handle, interface)
python
def detach_kernel_driver(self, interface): r"""Detach a kernel driver. If successful, you will then be able to perform I/O. The interface parameter is the device interface number to detach the driver from. """ self._ctx.managed_open() self._ctx.backend.detach_kernel_driver( self._ctx.handle, interface)
[ "def", "detach_kernel_driver", "(", "self", ",", "interface", ")", ":", "self", ".", "_ctx", ".", "managed_open", "(", ")", "self", ".", "_ctx", ".", "backend", ".", "detach_kernel_driver", "(", "self", ".", "_ctx", ".", "handle", ",", "interface", ")" ]
r"""Detach a kernel driver. If successful, you will then be able to perform I/O. The interface parameter is the device interface number to detach the driver from.
[ "r", "Detach", "a", "kernel", "driver", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/core.py#L1068-L1079
227,475
pyusb/pyusb
usb/libloader.py
load_library
def load_library(lib, name=None, lib_cls=None): """Loads a library. Catches and logs exceptions. Returns: the loaded library or None arguments: * lib -- path to/name of the library to be loaded * name -- the library's identifier (for logging) Defaults to None. * lib_cls -- library class. Defaults to None (-> ctypes.CDLL). """ try: if lib_cls: return lib_cls(lib) else: return ctypes.CDLL(lib) except Exception: if name: lib_msg = '%s (%s)' % (name, lib) else: lib_msg = lib lib_msg += ' could not be loaded' if sys.platform == 'cygwin': lib_msg += ' in cygwin' _LOGGER.error(lib_msg, exc_info=True) return None
python
def load_library(lib, name=None, lib_cls=None): """Loads a library. Catches and logs exceptions. Returns: the loaded library or None arguments: * lib -- path to/name of the library to be loaded * name -- the library's identifier (for logging) Defaults to None. * lib_cls -- library class. Defaults to None (-> ctypes.CDLL). """ try: if lib_cls: return lib_cls(lib) else: return ctypes.CDLL(lib) except Exception: if name: lib_msg = '%s (%s)' % (name, lib) else: lib_msg = lib lib_msg += ' could not be loaded' if sys.platform == 'cygwin': lib_msg += ' in cygwin' _LOGGER.error(lib_msg, exc_info=True) return None
[ "def", "load_library", "(", "lib", ",", "name", "=", "None", ",", "lib_cls", "=", "None", ")", ":", "try", ":", "if", "lib_cls", ":", "return", "lib_cls", "(", "lib", ")", "else", ":", "return", "ctypes", ".", "CDLL", "(", "lib", ")", "except", "Exception", ":", "if", "name", ":", "lib_msg", "=", "'%s (%s)'", "%", "(", "name", ",", "lib", ")", "else", ":", "lib_msg", "=", "lib", "lib_msg", "+=", "' could not be loaded'", "if", "sys", ".", "platform", "==", "'cygwin'", ":", "lib_msg", "+=", "' in cygwin'", "_LOGGER", ".", "error", "(", "lib_msg", ",", "exc_info", "=", "True", ")", "return", "None" ]
Loads a library. Catches and logs exceptions. Returns: the loaded library or None arguments: * lib -- path to/name of the library to be loaded * name -- the library's identifier (for logging) Defaults to None. * lib_cls -- library class. Defaults to None (-> ctypes.CDLL).
[ "Loads", "a", "library", ".", "Catches", "and", "logs", "exceptions", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/libloader.py#L88-L115
227,476
pyusb/pyusb
usb/libloader.py
load_locate_library
def load_locate_library(candidates, cygwin_lib, name, win_cls=None, cygwin_cls=None, others_cls=None, find_library=None, check_symbols=None): """Locates and loads a library. Returns: the loaded library arguments: * candidates -- candidates list for locate_library() * cygwin_lib -- name of the cygwin library * name -- lib identifier (for logging). Defaults to None. * win_cls -- class that is used to instantiate the library on win32 platforms. Defaults to None (-> ctypes.CDLL). * cygwin_cls -- library class for cygwin platforms. Defaults to None (-> ctypes.CDLL). * others_cls -- library class for all other platforms. Defaults to None (-> ctypes.CDLL). * find_library -- see locate_library(). Defaults to None. * check_symbols -- either None or a list of symbols that the loaded lib must provide (hasattr(<>)) in order to be considered valid. LibraryMissingSymbolsException is raised if any symbol is missing. raises: * NoLibraryCandidatesException * LibraryNotFoundException * LibraryNotLoadedException * LibraryMissingSymbolsException """ if sys.platform == 'cygwin': if cygwin_lib: loaded_lib = load_library(cygwin_lib, name, cygwin_cls) else: raise NoLibraryCandidatesException(name) elif candidates: lib = locate_library(candidates, find_library) if lib: if sys.platform == 'win32': loaded_lib = load_library(lib, name, win_cls) else: loaded_lib = load_library(lib, name, others_cls) else: _LOGGER.error('%r could not be found', (name or candidates)) raise LibraryNotFoundException(name) else: raise NoLibraryCandidatesException(name) if loaded_lib is None: raise LibraryNotLoadedException(name) elif check_symbols: symbols_missing = [ s for s in check_symbols if not hasattr(loaded_lib, s) ] if symbols_missing: msg = ('%r, missing symbols: %r', lib, symbols_missing ) _LOGGER.error(msg) raise LibraryMissingSymbolsException(lib) else: return loaded_lib else: return loaded_lib
python
def load_locate_library(candidates, cygwin_lib, name, win_cls=None, cygwin_cls=None, others_cls=None, find_library=None, check_symbols=None): """Locates and loads a library. Returns: the loaded library arguments: * candidates -- candidates list for locate_library() * cygwin_lib -- name of the cygwin library * name -- lib identifier (for logging). Defaults to None. * win_cls -- class that is used to instantiate the library on win32 platforms. Defaults to None (-> ctypes.CDLL). * cygwin_cls -- library class for cygwin platforms. Defaults to None (-> ctypes.CDLL). * others_cls -- library class for all other platforms. Defaults to None (-> ctypes.CDLL). * find_library -- see locate_library(). Defaults to None. * check_symbols -- either None or a list of symbols that the loaded lib must provide (hasattr(<>)) in order to be considered valid. LibraryMissingSymbolsException is raised if any symbol is missing. raises: * NoLibraryCandidatesException * LibraryNotFoundException * LibraryNotLoadedException * LibraryMissingSymbolsException """ if sys.platform == 'cygwin': if cygwin_lib: loaded_lib = load_library(cygwin_lib, name, cygwin_cls) else: raise NoLibraryCandidatesException(name) elif candidates: lib = locate_library(candidates, find_library) if lib: if sys.platform == 'win32': loaded_lib = load_library(lib, name, win_cls) else: loaded_lib = load_library(lib, name, others_cls) else: _LOGGER.error('%r could not be found', (name or candidates)) raise LibraryNotFoundException(name) else: raise NoLibraryCandidatesException(name) if loaded_lib is None: raise LibraryNotLoadedException(name) elif check_symbols: symbols_missing = [ s for s in check_symbols if not hasattr(loaded_lib, s) ] if symbols_missing: msg = ('%r, missing symbols: %r', lib, symbols_missing ) _LOGGER.error(msg) raise LibraryMissingSymbolsException(lib) else: return loaded_lib else: return loaded_lib
[ "def", "load_locate_library", "(", "candidates", ",", "cygwin_lib", ",", "name", ",", "win_cls", "=", "None", ",", "cygwin_cls", "=", "None", ",", "others_cls", "=", "None", ",", "find_library", "=", "None", ",", "check_symbols", "=", "None", ")", ":", "if", "sys", ".", "platform", "==", "'cygwin'", ":", "if", "cygwin_lib", ":", "loaded_lib", "=", "load_library", "(", "cygwin_lib", ",", "name", ",", "cygwin_cls", ")", "else", ":", "raise", "NoLibraryCandidatesException", "(", "name", ")", "elif", "candidates", ":", "lib", "=", "locate_library", "(", "candidates", ",", "find_library", ")", "if", "lib", ":", "if", "sys", ".", "platform", "==", "'win32'", ":", "loaded_lib", "=", "load_library", "(", "lib", ",", "name", ",", "win_cls", ")", "else", ":", "loaded_lib", "=", "load_library", "(", "lib", ",", "name", ",", "others_cls", ")", "else", ":", "_LOGGER", ".", "error", "(", "'%r could not be found'", ",", "(", "name", "or", "candidates", ")", ")", "raise", "LibraryNotFoundException", "(", "name", ")", "else", ":", "raise", "NoLibraryCandidatesException", "(", "name", ")", "if", "loaded_lib", "is", "None", ":", "raise", "LibraryNotLoadedException", "(", "name", ")", "elif", "check_symbols", ":", "symbols_missing", "=", "[", "s", "for", "s", "in", "check_symbols", "if", "not", "hasattr", "(", "loaded_lib", ",", "s", ")", "]", "if", "symbols_missing", ":", "msg", "=", "(", "'%r, missing symbols: %r'", ",", "lib", ",", "symbols_missing", ")", "_LOGGER", ".", "error", "(", "msg", ")", "raise", "LibraryMissingSymbolsException", "(", "lib", ")", "else", ":", "return", "loaded_lib", "else", ":", "return", "loaded_lib" ]
Locates and loads a library. Returns: the loaded library arguments: * candidates -- candidates list for locate_library() * cygwin_lib -- name of the cygwin library * name -- lib identifier (for logging). Defaults to None. * win_cls -- class that is used to instantiate the library on win32 platforms. Defaults to None (-> ctypes.CDLL). * cygwin_cls -- library class for cygwin platforms. Defaults to None (-> ctypes.CDLL). * others_cls -- library class for all other platforms. Defaults to None (-> ctypes.CDLL). * find_library -- see locate_library(). Defaults to None. * check_symbols -- either None or a list of symbols that the loaded lib must provide (hasattr(<>)) in order to be considered valid. LibraryMissingSymbolsException is raised if any symbol is missing. raises: * NoLibraryCandidatesException * LibraryNotFoundException * LibraryNotLoadedException * LibraryMissingSymbolsException
[ "Locates", "and", "loads", "a", "library", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/libloader.py#L117-L177
227,477
pyusb/pyusb
usb/control.py
get_status
def get_status(dev, recipient = None): r"""Return the status for the specified recipient. dev is the Device object to which the request will be sent to. The recipient can be None (on which the status will be queried from the device), an Interface or Endpoint descriptors. The status value is returned as an integer with the lower word being the two bytes status value. """ bmRequestType, wIndex = _parse_recipient(recipient, util.CTRL_IN) ret = dev.ctrl_transfer(bmRequestType = bmRequestType, bRequest = 0x00, wIndex = wIndex, data_or_wLength = 2) return ret[0] | (ret[1] << 8)
python
def get_status(dev, recipient = None): r"""Return the status for the specified recipient. dev is the Device object to which the request will be sent to. The recipient can be None (on which the status will be queried from the device), an Interface or Endpoint descriptors. The status value is returned as an integer with the lower word being the two bytes status value. """ bmRequestType, wIndex = _parse_recipient(recipient, util.CTRL_IN) ret = dev.ctrl_transfer(bmRequestType = bmRequestType, bRequest = 0x00, wIndex = wIndex, data_or_wLength = 2) return ret[0] | (ret[1] << 8)
[ "def", "get_status", "(", "dev", ",", "recipient", "=", "None", ")", ":", "bmRequestType", ",", "wIndex", "=", "_parse_recipient", "(", "recipient", ",", "util", ".", "CTRL_IN", ")", "ret", "=", "dev", ".", "ctrl_transfer", "(", "bmRequestType", "=", "bmRequestType", ",", "bRequest", "=", "0x00", ",", "wIndex", "=", "wIndex", ",", "data_or_wLength", "=", "2", ")", "return", "ret", "[", "0", "]", "|", "(", "ret", "[", "1", "]", "<<", "8", ")" ]
r"""Return the status for the specified recipient. dev is the Device object to which the request will be sent to. The recipient can be None (on which the status will be queried from the device), an Interface or Endpoint descriptors. The status value is returned as an integer with the lower word being the two bytes status value.
[ "r", "Return", "the", "status", "for", "the", "specified", "recipient", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/control.py#L79-L96
227,478
pyusb/pyusb
usb/control.py
get_descriptor
def get_descriptor(dev, desc_size, desc_type, desc_index, wIndex = 0): r"""Return the specified descriptor. dev is the Device object to which the request will be sent to. desc_size is the descriptor size. desc_type and desc_index are the descriptor type and index, respectively. wIndex index is used for string descriptors and represents the Language ID. For other types of descriptors, it is zero. """ wValue = desc_index | (desc_type << 8) bmRequestType = util.build_request_type( util.CTRL_IN, util.CTRL_TYPE_STANDARD, util.CTRL_RECIPIENT_DEVICE) return dev.ctrl_transfer( bmRequestType = bmRequestType, bRequest = 0x06, wValue = wValue, wIndex = wIndex, data_or_wLength = desc_size)
python
def get_descriptor(dev, desc_size, desc_type, desc_index, wIndex = 0): r"""Return the specified descriptor. dev is the Device object to which the request will be sent to. desc_size is the descriptor size. desc_type and desc_index are the descriptor type and index, respectively. wIndex index is used for string descriptors and represents the Language ID. For other types of descriptors, it is zero. """ wValue = desc_index | (desc_type << 8) bmRequestType = util.build_request_type( util.CTRL_IN, util.CTRL_TYPE_STANDARD, util.CTRL_RECIPIENT_DEVICE) return dev.ctrl_transfer( bmRequestType = bmRequestType, bRequest = 0x06, wValue = wValue, wIndex = wIndex, data_or_wLength = desc_size)
[ "def", "get_descriptor", "(", "dev", ",", "desc_size", ",", "desc_type", ",", "desc_index", ",", "wIndex", "=", "0", ")", ":", "wValue", "=", "desc_index", "|", "(", "desc_type", "<<", "8", ")", "bmRequestType", "=", "util", ".", "build_request_type", "(", "util", ".", "CTRL_IN", ",", "util", ".", "CTRL_TYPE_STANDARD", ",", "util", ".", "CTRL_RECIPIENT_DEVICE", ")", "return", "dev", ".", "ctrl_transfer", "(", "bmRequestType", "=", "bmRequestType", ",", "bRequest", "=", "0x06", ",", "wValue", "=", "wValue", ",", "wIndex", "=", "wIndex", ",", "data_or_wLength", "=", "desc_size", ")" ]
r"""Return the specified descriptor. dev is the Device object to which the request will be sent to. desc_size is the descriptor size. desc_type and desc_index are the descriptor type and index, respectively. wIndex index is used for string descriptors and represents the Language ID. For other types of descriptors, it is zero.
[ "r", "Return", "the", "specified", "descriptor", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/control.py#L135-L160
227,479
pyusb/pyusb
usb/control.py
set_descriptor
def set_descriptor(dev, desc, desc_type, desc_index, wIndex = None): r"""Update an existing descriptor or add a new one. dev is the Device object to which the request will be sent to. The desc parameter is the descriptor to be sent to the device. desc_type and desc_index are the descriptor type and index, respectively. wIndex index is used for string descriptors and represents the Language ID. For other types of descriptors, it is zero. """ wValue = desc_index | (desc_type << 8) bmRequestType = util.build_request_type( util.CTRL_OUT, util.CTRL_TYPE_STANDARD, util.CTRL_RECIPIENT_DEVICE) dev.ctrl_transfer( bmRequestType = bmRequestType, bRequest = 0x07, wValue = wValue, wIndex = wIndex, data_or_wLength = desc)
python
def set_descriptor(dev, desc, desc_type, desc_index, wIndex = None): r"""Update an existing descriptor or add a new one. dev is the Device object to which the request will be sent to. The desc parameter is the descriptor to be sent to the device. desc_type and desc_index are the descriptor type and index, respectively. wIndex index is used for string descriptors and represents the Language ID. For other types of descriptors, it is zero. """ wValue = desc_index | (desc_type << 8) bmRequestType = util.build_request_type( util.CTRL_OUT, util.CTRL_TYPE_STANDARD, util.CTRL_RECIPIENT_DEVICE) dev.ctrl_transfer( bmRequestType = bmRequestType, bRequest = 0x07, wValue = wValue, wIndex = wIndex, data_or_wLength = desc)
[ "def", "set_descriptor", "(", "dev", ",", "desc", ",", "desc_type", ",", "desc_index", ",", "wIndex", "=", "None", ")", ":", "wValue", "=", "desc_index", "|", "(", "desc_type", "<<", "8", ")", "bmRequestType", "=", "util", ".", "build_request_type", "(", "util", ".", "CTRL_OUT", ",", "util", ".", "CTRL_TYPE_STANDARD", ",", "util", ".", "CTRL_RECIPIENT_DEVICE", ")", "dev", ".", "ctrl_transfer", "(", "bmRequestType", "=", "bmRequestType", ",", "bRequest", "=", "0x07", ",", "wValue", "=", "wValue", ",", "wIndex", "=", "wIndex", ",", "data_or_wLength", "=", "desc", ")" ]
r"""Update an existing descriptor or add a new one. dev is the Device object to which the request will be sent to. The desc parameter is the descriptor to be sent to the device. desc_type and desc_index are the descriptor type and index, respectively. wIndex index is used for string descriptors and represents the Language ID. For other types of descriptors, it is zero.
[ "r", "Update", "an", "existing", "descriptor", "or", "add", "a", "new", "one", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/control.py#L162-L186
227,480
pyusb/pyusb
usb/control.py
get_configuration
def get_configuration(dev): r"""Get the current active configuration of the device. dev is the Device object to which the request will be sent to. This function differs from the Device.get_active_configuration method because the later may use cached data, while this function always does a device request. """ bmRequestType = util.build_request_type( util.CTRL_IN, util.CTRL_TYPE_STANDARD, util.CTRL_RECIPIENT_DEVICE) return dev.ctrl_transfer( bmRequestType, bRequest = 0x08, data_or_wLength = 1)[0]
python
def get_configuration(dev): r"""Get the current active configuration of the device. dev is the Device object to which the request will be sent to. This function differs from the Device.get_active_configuration method because the later may use cached data, while this function always does a device request. """ bmRequestType = util.build_request_type( util.CTRL_IN, util.CTRL_TYPE_STANDARD, util.CTRL_RECIPIENT_DEVICE) return dev.ctrl_transfer( bmRequestType, bRequest = 0x08, data_or_wLength = 1)[0]
[ "def", "get_configuration", "(", "dev", ")", ":", "bmRequestType", "=", "util", ".", "build_request_type", "(", "util", ".", "CTRL_IN", ",", "util", ".", "CTRL_TYPE_STANDARD", ",", "util", ".", "CTRL_RECIPIENT_DEVICE", ")", "return", "dev", ".", "ctrl_transfer", "(", "bmRequestType", ",", "bRequest", "=", "0x08", ",", "data_or_wLength", "=", "1", ")", "[", "0", "]" ]
r"""Get the current active configuration of the device. dev is the Device object to which the request will be sent to. This function differs from the Device.get_active_configuration method because the later may use cached data, while this function always does a device request.
[ "r", "Get", "the", "current", "active", "configuration", "of", "the", "device", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/control.py#L188-L206
227,481
pyusb/pyusb
usb/control.py
get_interface
def get_interface(dev, bInterfaceNumber): r"""Get the current alternate setting of the interface. dev is the Device object to which the request will be sent to. """ bmRequestType = util.build_request_type( util.CTRL_IN, util.CTRL_TYPE_STANDARD, util.CTRL_RECIPIENT_INTERFACE) return dev.ctrl_transfer( bmRequestType = bmRequestType, bRequest = 0x0a, wIndex = bInterfaceNumber, data_or_wLength = 1)[0]
python
def get_interface(dev, bInterfaceNumber): r"""Get the current alternate setting of the interface. dev is the Device object to which the request will be sent to. """ bmRequestType = util.build_request_type( util.CTRL_IN, util.CTRL_TYPE_STANDARD, util.CTRL_RECIPIENT_INTERFACE) return dev.ctrl_transfer( bmRequestType = bmRequestType, bRequest = 0x0a, wIndex = bInterfaceNumber, data_or_wLength = 1)[0]
[ "def", "get_interface", "(", "dev", ",", "bInterfaceNumber", ")", ":", "bmRequestType", "=", "util", ".", "build_request_type", "(", "util", ".", "CTRL_IN", ",", "util", ".", "CTRL_TYPE_STANDARD", ",", "util", ".", "CTRL_RECIPIENT_INTERFACE", ")", "return", "dev", ".", "ctrl_transfer", "(", "bmRequestType", "=", "bmRequestType", ",", "bRequest", "=", "0x0a", ",", "wIndex", "=", "bInterfaceNumber", ",", "data_or_wLength", "=", "1", ")", "[", "0", "]" ]
r"""Get the current alternate setting of the interface. dev is the Device object to which the request will be sent to.
[ "r", "Get", "the", "current", "alternate", "setting", "of", "the", "interface", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/control.py#L216-L231
227,482
pyusb/pyusb
tools/vcp_terminal.py
configInputQueue
def configInputQueue(): """ configure a queue for accepting characters and return the queue """ def captureInput(iqueue): while True: c = getch() if c == '\x03' or c == '\x04': # end on ctrl+c / ctrl+d log.debug("Break received (\\x{0:02X})".format(ord(c))) iqueue.put(c) break log.debug( "Input Char '{}' received".format( c if c != '\r' else '\\r')) iqueue.put(c) input_queue = queue.Queue() input_thread = threading.Thread(target=lambda: captureInput(input_queue)) input_thread.daemon = True input_thread.start() return input_queue, input_thread
python
def configInputQueue(): """ configure a queue for accepting characters and return the queue """ def captureInput(iqueue): while True: c = getch() if c == '\x03' or c == '\x04': # end on ctrl+c / ctrl+d log.debug("Break received (\\x{0:02X})".format(ord(c))) iqueue.put(c) break log.debug( "Input Char '{}' received".format( c if c != '\r' else '\\r')) iqueue.put(c) input_queue = queue.Queue() input_thread = threading.Thread(target=lambda: captureInput(input_queue)) input_thread.daemon = True input_thread.start() return input_queue, input_thread
[ "def", "configInputQueue", "(", ")", ":", "def", "captureInput", "(", "iqueue", ")", ":", "while", "True", ":", "c", "=", "getch", "(", ")", "if", "c", "==", "'\\x03'", "or", "c", "==", "'\\x04'", ":", "# end on ctrl+c / ctrl+d", "log", ".", "debug", "(", "\"Break received (\\\\x{0:02X})\"", ".", "format", "(", "ord", "(", "c", ")", ")", ")", "iqueue", ".", "put", "(", "c", ")", "break", "log", ".", "debug", "(", "\"Input Char '{}' received\"", ".", "format", "(", "c", "if", "c", "!=", "'\\r'", "else", "'\\\\r'", ")", ")", "iqueue", ".", "put", "(", "c", ")", "input_queue", "=", "queue", ".", "Queue", "(", ")", "input_thread", "=", "threading", ".", "Thread", "(", "target", "=", "lambda", ":", "captureInput", "(", "input_queue", ")", ")", "input_thread", ".", "daemon", "=", "True", "input_thread", ".", "start", "(", ")", "return", "input_queue", ",", "input_thread" ]
configure a queue for accepting characters and return the queue
[ "configure", "a", "queue", "for", "accepting", "characters", "and", "return", "the", "queue" ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/tools/vcp_terminal.py#L518-L537
227,483
pyusb/pyusb
tools/vcp_terminal.py
fmt_text
def fmt_text(text): """ convert characters that aren't printable to hex format """ PRINTABLE_CHAR = set( list(range(ord(' '), ord('~') + 1)) + [ord('\r'), ord('\n')]) newtext = ("\\x{:02X}".format( c) if c not in PRINTABLE_CHAR else chr(c) for c in text) textlines = "\r\n".join(l.strip('\r') for l in "".join(newtext).split('\n')) return textlines
python
def fmt_text(text): """ convert characters that aren't printable to hex format """ PRINTABLE_CHAR = set( list(range(ord(' '), ord('~') + 1)) + [ord('\r'), ord('\n')]) newtext = ("\\x{:02X}".format( c) if c not in PRINTABLE_CHAR else chr(c) for c in text) textlines = "\r\n".join(l.strip('\r') for l in "".join(newtext).split('\n')) return textlines
[ "def", "fmt_text", "(", "text", ")", ":", "PRINTABLE_CHAR", "=", "set", "(", "list", "(", "range", "(", "ord", "(", "' '", ")", ",", "ord", "(", "'~'", ")", "+", "1", ")", ")", "+", "[", "ord", "(", "'\\r'", ")", ",", "ord", "(", "'\\n'", ")", "]", ")", "newtext", "=", "(", "\"\\\\x{:02X}\"", ".", "format", "(", "c", ")", "if", "c", "not", "in", "PRINTABLE_CHAR", "else", "chr", "(", "c", ")", "for", "c", "in", "text", ")", "textlines", "=", "\"\\r\\n\"", ".", "join", "(", "l", ".", "strip", "(", "'\\r'", ")", "for", "l", "in", "\"\"", ".", "join", "(", "newtext", ")", ".", "split", "(", "'\\n'", ")", ")", "return", "textlines" ]
convert characters that aren't printable to hex format
[ "convert", "characters", "that", "aren", "t", "printable", "to", "hex", "format" ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/tools/vcp_terminal.py#L540-L549
227,484
pyusb/pyusb
tools/vcp_terminal.py
ftdi_to_clkbits
def ftdi_to_clkbits(baudrate): # from libftdi """ 10,27 => divisor = 10000, rate = 300 88,13 => divisor = 5000, rate = 600 C4,09 => divisor = 2500, rate = 1200 E2,04 => divisor = 1250, rate = 2,400 71,02 => divisor = 625, rate = 4,800 38,41 => divisor = 312.5, rate = 9,600 D0,80 => divisor = 208.25, rate = 14406 9C,80 => divisor = 156, rate = 19,230 4E,C0 => divisor = 78, rate = 38,461 34,00 => divisor = 52, rate = 57,692 1A,00 => divisor = 26, rate = 115,384 0D,00 => divisor = 13, rate = 230,769 """ clk = 48000000 clk_div = 16 frac_code = [0, 3, 2, 4, 1, 5, 6, 7] actual_baud = 0 if baudrate >= clk / clk_div: encoded_divisor = 0 actual_baud = (clk // clk_div) elif baudrate >= clk / (clk_div + clk_div / 2): encoded_divisor = 1 actual_baud = clk // (clk_div + clk_div // 2) elif baudrate >= clk / (2 * clk_div): encoded_divisor = 2 actual_baud = clk // (2 * clk_div) else: # We divide by 16 to have 3 fractional bits and one bit for rounding divisor = clk * 16 // clk_div // baudrate best_divisor = (divisor + 1) // 2 if best_divisor > 0x20000: best_divisor = 0x1ffff actual_baud = clk * 16 // clk_div // best_divisor actual_baud = (actual_baud + 1) // 2 encoded_divisor = ((best_divisor >> 3) + (frac_code[best_divisor & 0x7] << 14)) value = encoded_divisor & 0xFFFF index = encoded_divisor >> 16 return actual_baud, value, index
python
def ftdi_to_clkbits(baudrate): # from libftdi """ 10,27 => divisor = 10000, rate = 300 88,13 => divisor = 5000, rate = 600 C4,09 => divisor = 2500, rate = 1200 E2,04 => divisor = 1250, rate = 2,400 71,02 => divisor = 625, rate = 4,800 38,41 => divisor = 312.5, rate = 9,600 D0,80 => divisor = 208.25, rate = 14406 9C,80 => divisor = 156, rate = 19,230 4E,C0 => divisor = 78, rate = 38,461 34,00 => divisor = 52, rate = 57,692 1A,00 => divisor = 26, rate = 115,384 0D,00 => divisor = 13, rate = 230,769 """ clk = 48000000 clk_div = 16 frac_code = [0, 3, 2, 4, 1, 5, 6, 7] actual_baud = 0 if baudrate >= clk / clk_div: encoded_divisor = 0 actual_baud = (clk // clk_div) elif baudrate >= clk / (clk_div + clk_div / 2): encoded_divisor = 1 actual_baud = clk // (clk_div + clk_div // 2) elif baudrate >= clk / (2 * clk_div): encoded_divisor = 2 actual_baud = clk // (2 * clk_div) else: # We divide by 16 to have 3 fractional bits and one bit for rounding divisor = clk * 16 // clk_div // baudrate best_divisor = (divisor + 1) // 2 if best_divisor > 0x20000: best_divisor = 0x1ffff actual_baud = clk * 16 // clk_div // best_divisor actual_baud = (actual_baud + 1) // 2 encoded_divisor = ((best_divisor >> 3) + (frac_code[best_divisor & 0x7] << 14)) value = encoded_divisor & 0xFFFF index = encoded_divisor >> 16 return actual_baud, value, index
[ "def", "ftdi_to_clkbits", "(", "baudrate", ")", ":", "# from libftdi", "clk", "=", "48000000", "clk_div", "=", "16", "frac_code", "=", "[", "0", ",", "3", ",", "2", ",", "4", ",", "1", ",", "5", ",", "6", ",", "7", "]", "actual_baud", "=", "0", "if", "baudrate", ">=", "clk", "/", "clk_div", ":", "encoded_divisor", "=", "0", "actual_baud", "=", "(", "clk", "//", "clk_div", ")", "elif", "baudrate", ">=", "clk", "/", "(", "clk_div", "+", "clk_div", "/", "2", ")", ":", "encoded_divisor", "=", "1", "actual_baud", "=", "clk", "//", "(", "clk_div", "+", "clk_div", "//", "2", ")", "elif", "baudrate", ">=", "clk", "/", "(", "2", "*", "clk_div", ")", ":", "encoded_divisor", "=", "2", "actual_baud", "=", "clk", "//", "(", "2", "*", "clk_div", ")", "else", ":", "# We divide by 16 to have 3 fractional bits and one bit for rounding", "divisor", "=", "clk", "*", "16", "//", "clk_div", "//", "baudrate", "best_divisor", "=", "(", "divisor", "+", "1", ")", "//", "2", "if", "best_divisor", ">", "0x20000", ":", "best_divisor", "=", "0x1ffff", "actual_baud", "=", "clk", "*", "16", "//", "clk_div", "//", "best_divisor", "actual_baud", "=", "(", "actual_baud", "+", "1", ")", "//", "2", "encoded_divisor", "=", "(", "(", "best_divisor", ">>", "3", ")", "+", "(", "frac_code", "[", "best_divisor", "&", "0x7", "]", "<<", "14", ")", ")", "value", "=", "encoded_divisor", "&", "0xFFFF", "index", "=", "encoded_divisor", ">>", "16", "return", "actual_baud", ",", "value", ",", "index" ]
10,27 => divisor = 10000, rate = 300 88,13 => divisor = 5000, rate = 600 C4,09 => divisor = 2500, rate = 1200 E2,04 => divisor = 1250, rate = 2,400 71,02 => divisor = 625, rate = 4,800 38,41 => divisor = 312.5, rate = 9,600 D0,80 => divisor = 208.25, rate = 14406 9C,80 => divisor = 156, rate = 19,230 4E,C0 => divisor = 78, rate = 38,461 34,00 => divisor = 52, rate = 57,692 1A,00 => divisor = 26, rate = 115,384 0D,00 => divisor = 13, rate = 230,769
[ "10", "27", "=", ">", "divisor", "=", "10000", "rate", "=", "300", "88", "13", "=", ">", "divisor", "=", "5000", "rate", "=", "600", "C4", "09", "=", ">", "divisor", "=", "2500", "rate", "=", "1200", "E2", "04", "=", ">", "divisor", "=", "1250", "rate", "=", "2", "400", "71", "02", "=", ">", "divisor", "=", "625", "rate", "=", "4", "800", "38", "41", "=", ">", "divisor", "=", "312", ".", "5", "rate", "=", "9", "600", "D0", "80", "=", ">", "divisor", "=", "208", ".", "25", "rate", "=", "14406", "9C", "80", "=", ">", "divisor", "=", "156", "rate", "=", "19", "230", "4E", "C0", "=", ">", "divisor", "=", "78", "rate", "=", "38", "461", "34", "00", "=", ">", "divisor", "=", "52", "rate", "=", "57", "692", "1A", "00", "=", ">", "divisor", "=", "26", "rate", "=", "115", "384", "0D", "00", "=", ">", "divisor", "=", "13", "rate", "=", "230", "769" ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/tools/vcp_terminal.py#L576-L617
227,485
pyusb/pyusb
tools/vcp_terminal.py
ComPort._read
def _read(self): """ check ep for data, add it to queue and sleep for interval """ while self._rxactive: try: rv = self._ep_in.read(self._ep_in.wMaxPacketSize) if self._isFTDI: status = rv[:2] # FTDI prepends 2 flow control characters, # modem status and line status of the UART if status[0] != 1 or status[1] != 0x60: log.info( "USB Status: 0x{0:02X} 0x{1:02X}".format( *status)) rv = rv[2:] for rvi in rv: self._rxqueue.put(rvi) except usb.USBError as e: log.warn("USB Error on _read {}".format(e)) return time.sleep(self._rxinterval)
python
def _read(self): """ check ep for data, add it to queue and sleep for interval """ while self._rxactive: try: rv = self._ep_in.read(self._ep_in.wMaxPacketSize) if self._isFTDI: status = rv[:2] # FTDI prepends 2 flow control characters, # modem status and line status of the UART if status[0] != 1 or status[1] != 0x60: log.info( "USB Status: 0x{0:02X} 0x{1:02X}".format( *status)) rv = rv[2:] for rvi in rv: self._rxqueue.put(rvi) except usb.USBError as e: log.warn("USB Error on _read {}".format(e)) return time.sleep(self._rxinterval)
[ "def", "_read", "(", "self", ")", ":", "while", "self", ".", "_rxactive", ":", "try", ":", "rv", "=", "self", ".", "_ep_in", ".", "read", "(", "self", ".", "_ep_in", ".", "wMaxPacketSize", ")", "if", "self", ".", "_isFTDI", ":", "status", "=", "rv", "[", ":", "2", "]", "# FTDI prepends 2 flow control characters,", "# modem status and line status of the UART", "if", "status", "[", "0", "]", "!=", "1", "or", "status", "[", "1", "]", "!=", "0x60", ":", "log", ".", "info", "(", "\"USB Status: 0x{0:02X} 0x{1:02X}\"", ".", "format", "(", "*", "status", ")", ")", "rv", "=", "rv", "[", "2", ":", "]", "for", "rvi", "in", "rv", ":", "self", ".", "_rxqueue", ".", "put", "(", "rvi", ")", "except", "usb", ".", "USBError", "as", "e", ":", "log", ".", "warn", "(", "\"USB Error on _read {}\"", ".", "format", "(", "e", ")", ")", "return", "time", ".", "sleep", "(", "self", ".", "_rxinterval", ")" ]
check ep for data, add it to queue and sleep for interval
[ "check", "ep", "for", "data", "add", "it", "to", "queue", "and", "sleep", "for", "interval" ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/tools/vcp_terminal.py#L187-L206
227,486
pyusb/pyusb
tools/vcp_terminal.py
ComPort._resetFTDI
def _resetFTDI(self): """ reset the FTDI device """ if not self._isFTDI: return txdir = 0 # 0:OUT, 1:IN req_type = 2 # 0:std, 1:class, 2:vendor recipient = 0 # 0:device, 1:interface, 2:endpoint, 3:other req_type = (txdir << 7) + (req_type << 5) + recipient self.device.ctrl_transfer( bmRequestType=req_type, bRequest=0, # RESET wValue=0, # RESET wIndex=1, data_or_wLength=0)
python
def _resetFTDI(self): """ reset the FTDI device """ if not self._isFTDI: return txdir = 0 # 0:OUT, 1:IN req_type = 2 # 0:std, 1:class, 2:vendor recipient = 0 # 0:device, 1:interface, 2:endpoint, 3:other req_type = (txdir << 7) + (req_type << 5) + recipient self.device.ctrl_transfer( bmRequestType=req_type, bRequest=0, # RESET wValue=0, # RESET wIndex=1, data_or_wLength=0)
[ "def", "_resetFTDI", "(", "self", ")", ":", "if", "not", "self", ".", "_isFTDI", ":", "return", "txdir", "=", "0", "# 0:OUT, 1:IN", "req_type", "=", "2", "# 0:std, 1:class, 2:vendor", "recipient", "=", "0", "# 0:device, 1:interface, 2:endpoint, 3:other", "req_type", "=", "(", "txdir", "<<", "7", ")", "+", "(", "req_type", "<<", "5", ")", "+", "recipient", "self", ".", "device", ".", "ctrl_transfer", "(", "bmRequestType", "=", "req_type", ",", "bRequest", "=", "0", ",", "# RESET", "wValue", "=", "0", ",", "# RESET", "wIndex", "=", "1", ",", "data_or_wLength", "=", "0", ")" ]
reset the FTDI device
[ "reset", "the", "FTDI", "device" ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/tools/vcp_terminal.py#L359-L373
227,487
pyusb/pyusb
usb/legacy.py
busses
def busses(): r"""Returns a tuple with the usb busses.""" return (Bus(g) for k, g in groupby( sorted(core.find(find_all=True), key=lambda d: d.bus), lambda d: d.bus))
python
def busses(): r"""Returns a tuple with the usb busses.""" return (Bus(g) for k, g in groupby( sorted(core.find(find_all=True), key=lambda d: d.bus), lambda d: d.bus))
[ "def", "busses", "(", ")", ":", "return", "(", "Bus", "(", "g", ")", "for", "k", ",", "g", "in", "groupby", "(", "sorted", "(", "core", ".", "find", "(", "find_all", "=", "True", ")", ",", "key", "=", "lambda", "d", ":", "d", ".", "bus", ")", ",", "lambda", "d", ":", "d", ".", "bus", ")", ")" ]
r"""Returns a tuple with the usb busses.
[ "r", "Returns", "a", "tuple", "with", "the", "usb", "busses", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/legacy.py#L337-L341
227,488
pyusb/pyusb
usb/legacy.py
DeviceHandle.bulkWrite
def bulkWrite(self, endpoint, buffer, timeout = 100): r"""Perform a bulk write request to the endpoint specified. Arguments: endpoint: endpoint number. buffer: sequence data buffer to write. This parameter can be any sequence type. timeout: operation timeout in milliseconds. (default: 100) Returns the number of bytes written. """ return self.dev.write(endpoint, buffer, timeout)
python
def bulkWrite(self, endpoint, buffer, timeout = 100): r"""Perform a bulk write request to the endpoint specified. Arguments: endpoint: endpoint number. buffer: sequence data buffer to write. This parameter can be any sequence type. timeout: operation timeout in milliseconds. (default: 100) Returns the number of bytes written. """ return self.dev.write(endpoint, buffer, timeout)
[ "def", "bulkWrite", "(", "self", ",", "endpoint", ",", "buffer", ",", "timeout", "=", "100", ")", ":", "return", "self", ".", "dev", ".", "write", "(", "endpoint", ",", "buffer", ",", "timeout", ")" ]
r"""Perform a bulk write request to the endpoint specified. Arguments: endpoint: endpoint number. buffer: sequence data buffer to write. This parameter can be any sequence type. timeout: operation timeout in milliseconds. (default: 100) Returns the number of bytes written.
[ "r", "Perform", "a", "bulk", "write", "request", "to", "the", "endpoint", "specified", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/legacy.py#L131-L141
227,489
pyusb/pyusb
usb/legacy.py
DeviceHandle.bulkRead
def bulkRead(self, endpoint, size, timeout = 100): r"""Performs a bulk read request to the endpoint specified. Arguments: endpoint: endpoint number. size: number of bytes to read. timeout: operation timeout in milliseconds. (default: 100) Returns a tuple with the data read. """ return self.dev.read(endpoint, size, timeout)
python
def bulkRead(self, endpoint, size, timeout = 100): r"""Performs a bulk read request to the endpoint specified. Arguments: endpoint: endpoint number. size: number of bytes to read. timeout: operation timeout in milliseconds. (default: 100) Returns a tuple with the data read. """ return self.dev.read(endpoint, size, timeout)
[ "def", "bulkRead", "(", "self", ",", "endpoint", ",", "size", ",", "timeout", "=", "100", ")", ":", "return", "self", ".", "dev", ".", "read", "(", "endpoint", ",", "size", ",", "timeout", ")" ]
r"""Performs a bulk read request to the endpoint specified. Arguments: endpoint: endpoint number. size: number of bytes to read. timeout: operation timeout in milliseconds. (default: 100) Returns a tuple with the data read.
[ "r", "Performs", "a", "bulk", "read", "request", "to", "the", "endpoint", "specified", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/legacy.py#L143-L152
227,490
pyusb/pyusb
usb/legacy.py
DeviceHandle.interruptWrite
def interruptWrite(self, endpoint, buffer, timeout = 100): r"""Perform a interrupt write request to the endpoint specified. Arguments: endpoint: endpoint number. buffer: sequence data buffer to write. This parameter can be any sequence type. timeout: operation timeout in milliseconds. (default: 100) Returns the number of bytes written. """ return self.dev.write(endpoint, buffer, timeout)
python
def interruptWrite(self, endpoint, buffer, timeout = 100): r"""Perform a interrupt write request to the endpoint specified. Arguments: endpoint: endpoint number. buffer: sequence data buffer to write. This parameter can be any sequence type. timeout: operation timeout in milliseconds. (default: 100) Returns the number of bytes written. """ return self.dev.write(endpoint, buffer, timeout)
[ "def", "interruptWrite", "(", "self", ",", "endpoint", ",", "buffer", ",", "timeout", "=", "100", ")", ":", "return", "self", ".", "dev", ".", "write", "(", "endpoint", ",", "buffer", ",", "timeout", ")" ]
r"""Perform a interrupt write request to the endpoint specified. Arguments: endpoint: endpoint number. buffer: sequence data buffer to write. This parameter can be any sequence type. timeout: operation timeout in milliseconds. (default: 100) Returns the number of bytes written.
[ "r", "Perform", "a", "interrupt", "write", "request", "to", "the", "endpoint", "specified", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/legacy.py#L154-L164
227,491
pyusb/pyusb
usb/legacy.py
DeviceHandle.interruptRead
def interruptRead(self, endpoint, size, timeout = 100): r"""Performs a interrupt read request to the endpoint specified. Arguments: endpoint: endpoint number. size: number of bytes to read. timeout: operation timeout in milliseconds. (default: 100) Returns a tuple with the data read. """ return self.dev.read(endpoint, size, timeout)
python
def interruptRead(self, endpoint, size, timeout = 100): r"""Performs a interrupt read request to the endpoint specified. Arguments: endpoint: endpoint number. size: number of bytes to read. timeout: operation timeout in milliseconds. (default: 100) Returns a tuple with the data read. """ return self.dev.read(endpoint, size, timeout)
[ "def", "interruptRead", "(", "self", ",", "endpoint", ",", "size", ",", "timeout", "=", "100", ")", ":", "return", "self", ".", "dev", ".", "read", "(", "endpoint", ",", "size", ",", "timeout", ")" ]
r"""Performs a interrupt read request to the endpoint specified. Arguments: endpoint: endpoint number. size: number of bytes to read. timeout: operation timeout in milliseconds. (default: 100) Returns a tuple with the data read.
[ "r", "Performs", "a", "interrupt", "read", "request", "to", "the", "endpoint", "specified", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/legacy.py#L166-L175
227,492
pyusb/pyusb
usb/legacy.py
DeviceHandle.controlMsg
def controlMsg(self, requestType, request, buffer, value = 0, index = 0, timeout = 100): r"""Perform a control request to the default control pipe on a device. Arguments: requestType: specifies the direction of data flow, the type of request, and the recipient. request: specifies the request. buffer: if the transfer is a write transfer, buffer is a sequence with the transfer data, otherwise, buffer is the number of bytes to read. value: specific information to pass to the device. (default: 0) index: specific information to pass to the device. (default: 0) timeout: operation timeout in milliseconds. (default: 100) Returns the number of bytes written. """ return self.dev.ctrl_transfer( requestType, request, wValue = value, wIndex = index, data_or_wLength = buffer, timeout = timeout)
python
def controlMsg(self, requestType, request, buffer, value = 0, index = 0, timeout = 100): r"""Perform a control request to the default control pipe on a device. Arguments: requestType: specifies the direction of data flow, the type of request, and the recipient. request: specifies the request. buffer: if the transfer is a write transfer, buffer is a sequence with the transfer data, otherwise, buffer is the number of bytes to read. value: specific information to pass to the device. (default: 0) index: specific information to pass to the device. (default: 0) timeout: operation timeout in milliseconds. (default: 100) Returns the number of bytes written. """ return self.dev.ctrl_transfer( requestType, request, wValue = value, wIndex = index, data_or_wLength = buffer, timeout = timeout)
[ "def", "controlMsg", "(", "self", ",", "requestType", ",", "request", ",", "buffer", ",", "value", "=", "0", ",", "index", "=", "0", ",", "timeout", "=", "100", ")", ":", "return", "self", ".", "dev", ".", "ctrl_transfer", "(", "requestType", ",", "request", ",", "wValue", "=", "value", ",", "wIndex", "=", "index", ",", "data_or_wLength", "=", "buffer", ",", "timeout", "=", "timeout", ")" ]
r"""Perform a control request to the default control pipe on a device. Arguments: requestType: specifies the direction of data flow, the type of request, and the recipient. request: specifies the request. buffer: if the transfer is a write transfer, buffer is a sequence with the transfer data, otherwise, buffer is the number of bytes to read. value: specific information to pass to the device. (default: 0) index: specific information to pass to the device. (default: 0) timeout: operation timeout in milliseconds. (default: 100) Returns the number of bytes written.
[ "r", "Perform", "a", "control", "request", "to", "the", "default", "control", "pipe", "on", "a", "device", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/legacy.py#L177-L198
227,493
pyusb/pyusb
usb/legacy.py
DeviceHandle.claimInterface
def claimInterface(self, interface): r"""Claims the interface with the Operating System. Arguments: interface: interface number or an Interface object. """ if isinstance(interface, Interface): interface = interface.interfaceNumber util.claim_interface(self.dev, interface) self.__claimed_interface = interface
python
def claimInterface(self, interface): r"""Claims the interface with the Operating System. Arguments: interface: interface number or an Interface object. """ if isinstance(interface, Interface): interface = interface.interfaceNumber util.claim_interface(self.dev, interface) self.__claimed_interface = interface
[ "def", "claimInterface", "(", "self", ",", "interface", ")", ":", "if", "isinstance", "(", "interface", ",", "Interface", ")", ":", "interface", "=", "interface", ".", "interfaceNumber", "util", ".", "claim_interface", "(", "self", ".", "dev", ",", "interface", ")", "self", ".", "__claimed_interface", "=", "interface" ]
r"""Claims the interface with the Operating System. Arguments: interface: interface number or an Interface object.
[ "r", "Claims", "the", "interface", "with", "the", "Operating", "System", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/legacy.py#L208-L218
227,494
pyusb/pyusb
usb/legacy.py
DeviceHandle.releaseInterface
def releaseInterface(self): r"""Release an interface previously claimed with claimInterface.""" util.release_interface(self.dev, self.__claimed_interface) self.__claimed_interface = -1
python
def releaseInterface(self): r"""Release an interface previously claimed with claimInterface.""" util.release_interface(self.dev, self.__claimed_interface) self.__claimed_interface = -1
[ "def", "releaseInterface", "(", "self", ")", ":", "util", ".", "release_interface", "(", "self", ".", "dev", ",", "self", ".", "__claimed_interface", ")", "self", ".", "__claimed_interface", "=", "-", "1" ]
r"""Release an interface previously claimed with claimInterface.
[ "r", "Release", "an", "interface", "previously", "claimed", "with", "claimInterface", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/legacy.py#L220-L223
227,495
pyusb/pyusb
usb/legacy.py
DeviceHandle.setConfiguration
def setConfiguration(self, configuration): r"""Set the active configuration of a device. Arguments: configuration: a configuration value or a Configuration object. """ if isinstance(configuration, Configuration): configuration = configuration.value self.dev.set_configuration(configuration)
python
def setConfiguration(self, configuration): r"""Set the active configuration of a device. Arguments: configuration: a configuration value or a Configuration object. """ if isinstance(configuration, Configuration): configuration = configuration.value self.dev.set_configuration(configuration)
[ "def", "setConfiguration", "(", "self", ",", "configuration", ")", ":", "if", "isinstance", "(", "configuration", ",", "Configuration", ")", ":", "configuration", "=", "configuration", ".", "value", "self", ".", "dev", ".", "set_configuration", "(", "configuration", ")" ]
r"""Set the active configuration of a device. Arguments: configuration: a configuration value or a Configuration object.
[ "r", "Set", "the", "active", "configuration", "of", "a", "device", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/legacy.py#L238-L247
227,496
pyusb/pyusb
usb/legacy.py
DeviceHandle.setAltInterface
def setAltInterface(self, alternate): r"""Sets the active alternate setting of the current interface. Arguments: alternate: an alternate setting number or an Interface object. """ if isinstance(alternate, Interface): alternate = alternate.alternateSetting self.dev.set_interface_altsetting(self.__claimed_interface, alternate)
python
def setAltInterface(self, alternate): r"""Sets the active alternate setting of the current interface. Arguments: alternate: an alternate setting number or an Interface object. """ if isinstance(alternate, Interface): alternate = alternate.alternateSetting self.dev.set_interface_altsetting(self.__claimed_interface, alternate)
[ "def", "setAltInterface", "(", "self", ",", "alternate", ")", ":", "if", "isinstance", "(", "alternate", ",", "Interface", ")", ":", "alternate", "=", "alternate", ".", "alternateSetting", "self", ".", "dev", ".", "set_interface_altsetting", "(", "self", ".", "__claimed_interface", ",", "alternate", ")" ]
r"""Sets the active alternate setting of the current interface. Arguments: alternate: an alternate setting number or an Interface object.
[ "r", "Sets", "the", "active", "alternate", "setting", "of", "the", "current", "interface", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/legacy.py#L249-L258
227,497
pyusb/pyusb
usb/legacy.py
DeviceHandle.getString
def getString(self, index, length, langid = None): r"""Retrieve the string descriptor specified by index and langid from a device. Arguments: index: index of descriptor in the device. length: number of bytes of the string (ignored) langid: Language ID. If it is omitted, the first language will be used. """ return util.get_string(self.dev, index, langid).encode('ascii')
python
def getString(self, index, length, langid = None): r"""Retrieve the string descriptor specified by index and langid from a device. Arguments: index: index of descriptor in the device. length: number of bytes of the string (ignored) langid: Language ID. If it is omitted, the first language will be used. """ return util.get_string(self.dev, index, langid).encode('ascii')
[ "def", "getString", "(", "self", ",", "index", ",", "length", ",", "langid", "=", "None", ")", ":", "return", "util", ".", "get_string", "(", "self", ".", "dev", ",", "index", ",", "langid", ")", ".", "encode", "(", "'ascii'", ")" ]
r"""Retrieve the string descriptor specified by index and langid from a device. Arguments: index: index of descriptor in the device. length: number of bytes of the string (ignored) langid: Language ID. If it is omitted, the first language will be used.
[ "r", "Retrieve", "the", "string", "descriptor", "specified", "by", "index", "and", "langid", "from", "a", "device", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/legacy.py#L260-L270
227,498
pyusb/pyusb
usb/legacy.py
DeviceHandle.getDescriptor
def getDescriptor(self, desc_type, desc_index, length, endpoint = -1): r"""Retrieves a descriptor from the device identified by the type and index of the descriptor. Arguments: desc_type: descriptor type. desc_index: index of the descriptor. len: descriptor length. endpoint: ignored. """ return control.get_descriptor(self.dev, length, desc_type, desc_index)
python
def getDescriptor(self, desc_type, desc_index, length, endpoint = -1): r"""Retrieves a descriptor from the device identified by the type and index of the descriptor. Arguments: desc_type: descriptor type. desc_index: index of the descriptor. len: descriptor length. endpoint: ignored. """ return control.get_descriptor(self.dev, length, desc_type, desc_index)
[ "def", "getDescriptor", "(", "self", ",", "desc_type", ",", "desc_index", ",", "length", ",", "endpoint", "=", "-", "1", ")", ":", "return", "control", ".", "get_descriptor", "(", "self", ".", "dev", ",", "length", ",", "desc_type", ",", "desc_index", ")" ]
r"""Retrieves a descriptor from the device identified by the type and index of the descriptor. Arguments: desc_type: descriptor type. desc_index: index of the descriptor. len: descriptor length. endpoint: ignored.
[ "r", "Retrieves", "a", "descriptor", "from", "the", "device", "identified", "by", "the", "type", "and", "index", "of", "the", "descriptor", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/legacy.py#L272-L282
227,499
pyusb/pyusb
usb/backend/__init__.py
IBackend.get_endpoint_descriptor
def get_endpoint_descriptor(self, dev, ep, intf, alt, config): r"""Return an endpoint descriptor of the given device. The object returned is required to have all the Endpoint Descriptor fields acessible as member variables. They must be convertible (but not required to be equal) to the int type. The ep parameter is the endpoint logical index (not the bEndpointAddress field) of the endpoint descriptor desired. dev, intf, alt and config are the same values already described in the get_interface_descriptor() method. """ _not_implemented(self.get_endpoint_descriptor)
python
def get_endpoint_descriptor(self, dev, ep, intf, alt, config): r"""Return an endpoint descriptor of the given device. The object returned is required to have all the Endpoint Descriptor fields acessible as member variables. They must be convertible (but not required to be equal) to the int type. The ep parameter is the endpoint logical index (not the bEndpointAddress field) of the endpoint descriptor desired. dev, intf, alt and config are the same values already described in the get_interface_descriptor() method. """ _not_implemented(self.get_endpoint_descriptor)
[ "def", "get_endpoint_descriptor", "(", "self", ",", "dev", ",", "ep", ",", "intf", ",", "alt", ",", "config", ")", ":", "_not_implemented", "(", "self", ".", "get_endpoint_descriptor", ")" ]
r"""Return an endpoint descriptor of the given device. The object returned is required to have all the Endpoint Descriptor fields acessible as member variables. They must be convertible (but not required to be equal) to the int type. The ep parameter is the endpoint logical index (not the bEndpointAddress field) of the endpoint descriptor desired. dev, intf, alt and config are the same values already described in the get_interface_descriptor() method.
[ "r", "Return", "an", "endpoint", "descriptor", "of", "the", "given", "device", "." ]
ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9
https://github.com/pyusb/pyusb/blob/ffe6faf42c6ad273880b0b464b9bbf44c1d4b2e9/usb/backend/__init__.py#L140-L151